Implement future alternate processing
This commit is contained in:
parent
f3ae31f1c2
commit
cfda485b34
12 changed files with 1168 additions and 38 deletions
193
test/test_alt.py
Normal file
193
test/test_alt.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
"""Test alt"""
|
||||
import os
|
||||
import string
|
||||
import py
|
||||
import pytest
|
||||
import utils
|
||||
|
||||
TEST_PATHS = [utils.ALT_FILE1, utils.ALT_FILE2, utils.ALT_DIR]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy')
|
||||
@pytest.mark.parametrize(
|
||||
'tracked,encrypt,exclude', [
|
||||
(False, False, False),
|
||||
(True, False, False),
|
||||
(False, True, False),
|
||||
(False, True, True),
|
||||
], ids=['untracked', 'tracked', 'encrypted', 'excluded'])
|
||||
def test_alt_source(
|
||||
runner, yadm_y, paths,
|
||||
tracked, encrypt, exclude):
|
||||
"""Test yadm alt operates on all expected sources of alternates"""
|
||||
|
||||
utils.create_alt_files(
|
||||
paths, '##default', tracked=tracked, encrypt=encrypt, exclude=exclude)
|
||||
run = runner(yadm_y('alt'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
for link_path in TEST_PATHS:
|
||||
source_file = link_path + '##default'
|
||||
if tracked or (encrypt and not exclude):
|
||||
assert paths.work.join(link_path).islink()
|
||||
target = py.path.local(paths.work.join(link_path).readlink())
|
||||
if target.isfile():
|
||||
assert paths.work.join(link_path).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
else:
|
||||
assert paths.work.join(link_path).join(
|
||||
utils.CONTAINED).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
else:
|
||||
assert not paths.work.join(link_path).exists()
|
||||
assert str(paths.work.join(source_file)) not in linked
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy')
|
||||
@pytest.mark.parametrize('suffix', [
|
||||
'##default',
|
||||
'##o.$tst_sys', '##os.$tst_sys',
|
||||
'##c.$tst_class', '##class.$tst_class',
|
||||
'##h.$tst_host', '##hostname.$tst_host',
|
||||
'##u.$tst_user', '##user.$tst_user',
|
||||
])
|
||||
def test_alt_conditions(
|
||||
runner, yadm_y, paths,
|
||||
tst_sys, tst_host, tst_user, suffix):
|
||||
"""Test conditions supported by yadm alt"""
|
||||
|
||||
# set the class
|
||||
tst_class = 'testclass'
|
||||
utils.set_local(paths, 'class', tst_class)
|
||||
|
||||
suffix = string.Template(suffix).substitute(
|
||||
tst_sys=tst_sys,
|
||||
tst_class=tst_class,
|
||||
tst_host=tst_host,
|
||||
tst_user=tst_user,
|
||||
)
|
||||
|
||||
utils.create_alt_files(paths, suffix)
|
||||
run = runner(yadm_y('alt'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
for link_path in TEST_PATHS:
|
||||
source_file = link_path + suffix
|
||||
assert paths.work.join(link_path).islink()
|
||||
target = py.path.local(paths.work.join(link_path).readlink())
|
||||
if target.isfile():
|
||||
assert paths.work.join(link_path).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
else:
|
||||
assert paths.work.join(link_path).join(
|
||||
utils.CONTAINED).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy')
|
||||
@pytest.mark.parametrize('kind', ['builtin', '', 'envtpl', 'j2cli', 'j2'])
|
||||
@pytest.mark.parametrize('label', ['t', 'template', 'yadm', ])
|
||||
def test_alt_templates(
|
||||
runner, yadm_y, paths, kind, label):
|
||||
"""Test templates supported by yadm alt"""
|
||||
|
||||
suffix = f'##{label}.{kind}'
|
||||
utils.create_alt_files(paths, suffix)
|
||||
run = runner(yadm_y('alt'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
created = utils.parse_alt_output(run.out, linked=False)
|
||||
|
||||
for created_path in TEST_PATHS:
|
||||
if created_path != utils.ALT_DIR:
|
||||
source_file = created_path + suffix
|
||||
assert paths.work.join(created_path).isfile()
|
||||
assert paths.work.join(created_path).read().strip() == source_file
|
||||
assert str(paths.work.join(source_file)) in created
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy')
|
||||
@pytest.mark.parametrize('autoalt', [None, 'true', 'false'])
|
||||
def test_auto_alt(runner, yadm_y, paths, autoalt):
|
||||
"""Test auto alt"""
|
||||
|
||||
# set the value of auto-alt
|
||||
if autoalt:
|
||||
os.system(' '.join(yadm_y('config', 'yadm.auto-alt', autoalt)))
|
||||
|
||||
utils.create_alt_files(paths, '##default')
|
||||
run = runner(yadm_y('status'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
for link_path in TEST_PATHS:
|
||||
source_file = link_path + '##default'
|
||||
if autoalt == 'false':
|
||||
assert not paths.work.join(link_path).exists()
|
||||
else:
|
||||
assert paths.work.join(link_path).islink()
|
||||
target = py.path.local(paths.work.join(link_path).readlink())
|
||||
if target.isfile():
|
||||
assert paths.work.join(link_path).read() == source_file
|
||||
# no linking output when run via auto-alt
|
||||
assert str(paths.work.join(source_file)) not in linked
|
||||
else:
|
||||
assert paths.work.join(link_path).join(
|
||||
utils.CONTAINED).read() == source_file
|
||||
# no linking output when run via auto-alt
|
||||
assert str(paths.work.join(source_file)) not in linked
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy')
|
||||
def test_stale_link_removal(runner, yadm_y, paths):
|
||||
"""Stale links to alternative files are removed
|
||||
|
||||
This test ensures that when an already linked alternative becomes invalid
|
||||
due to a change in class, the alternate link is removed.
|
||||
"""
|
||||
|
||||
# set the class
|
||||
tst_class = 'testclass'
|
||||
utils.set_local(paths, 'class', tst_class)
|
||||
|
||||
# create files which match the test class
|
||||
utils.create_alt_files(paths, f'##class.{tst_class}')
|
||||
|
||||
# run alt to trigger linking
|
||||
run = runner(yadm_y('alt'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for stale_path in TEST_PATHS:
|
||||
source_file = stale_path + '##class.' + tst_class
|
||||
assert paths.work.join(stale_path).islink()
|
||||
target = py.path.local(paths.work.join(stale_path).readlink())
|
||||
if target.isfile():
|
||||
assert paths.work.join(stale_path).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
else:
|
||||
assert paths.work.join(stale_path).join(
|
||||
utils.CONTAINED).read() == source_file
|
||||
assert str(paths.work.join(source_file)) in linked
|
||||
|
||||
# change the class so there are no valid alternates
|
||||
utils.set_local(paths, 'class', 'changedclass')
|
||||
|
||||
# run alt to trigger linking
|
||||
run = runner(yadm_y('alt'))
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the linking is removed
|
||||
for stale_path in TEST_PATHS:
|
||||
source_file = stale_path + '##class.' + tst_class
|
||||
assert not paths.work.join(stale_path).exists()
|
||||
assert str(paths.work.join(source_file)) not in linked
|
|
@ -1,7 +1,6 @@
|
|||
"""Test alt"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import py
|
||||
import pytest
|
||||
|
@ -92,7 +91,7 @@ def test_alt(runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -195,7 +194,7 @@ def test_wild(request, runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -220,7 +219,7 @@ def test_wild(request, runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -260,7 +259,7 @@ def test_local_override(runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -300,7 +299,7 @@ def test_class_case(runner, yadm_y, paths, tst_sys, suffix):
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -335,7 +334,7 @@ def test_auto_alt(runner, yadm_y, paths, autoalt):
|
|||
run = runner(yadm_y('status'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -373,7 +372,7 @@ def test_delimiter(runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
# only a delimiter of '.' is valid
|
||||
|
@ -415,7 +414,7 @@ def test_invalid_links_removed(runner, yadm_y, paths):
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the proper linking has occurred
|
||||
for file_path in TEST_PATHS:
|
||||
|
@ -439,20 +438,10 @@ def test_invalid_links_removed(runner, yadm_y, paths):
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
linked = linked_list(run.out)
|
||||
linked = utils.parse_alt_output(run.out)
|
||||
|
||||
# assert the linking is removed
|
||||
for file_path in TEST_PATHS:
|
||||
source_file = file_path + '##' + tst_class
|
||||
assert not paths.work.join(file_path).exists()
|
||||
assert str(paths.work.join(source_file)) not in linked
|
||||
|
||||
|
||||
def linked_list(output):
|
||||
"""Parse output, and return list of linked files"""
|
||||
linked = dict()
|
||||
for line in output.splitlines():
|
||||
match = re.match('Linking (.+) to (.+)$', line)
|
||||
if match:
|
||||
linked[match.group(2)] = match.group(1)
|
||||
return linked.values()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""Test jinja"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import pytest
|
||||
import utils
|
||||
|
||||
|
@ -55,7 +54,7 @@ def test_local_override(runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
created = created_list(run.out)
|
||||
created = utils.parse_alt_output(run.out, linked=False)
|
||||
|
||||
# assert the proper creation has occurred
|
||||
for file_path in (utils.ALT_FILE1, utils.ALT_FILE2):
|
||||
|
@ -90,7 +89,7 @@ def test_auto_alt(runner, yadm_y, paths, autoalt, tst_sys,
|
|||
run = runner(yadm_y('status'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
created = created_list(run.out)
|
||||
created = utils.parse_alt_output(run.out, linked=False)
|
||||
|
||||
# assert the proper creation has occurred
|
||||
for file_path in (utils.ALT_FILE1, utils.ALT_FILE2):
|
||||
|
@ -180,7 +179,7 @@ def test_jinja(runner, yadm_y, paths,
|
|||
run = runner(yadm_y('alt'), env=env)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
created = created_list(run.out)
|
||||
created = utils.parse_alt_output(run.out, linked=False)
|
||||
|
||||
# assert the proper creation has occurred
|
||||
for file_path in (utils.ALT_FILE1, utils.ALT_FILE2):
|
||||
|
@ -194,14 +193,3 @@ def test_jinja(runner, yadm_y, paths,
|
|||
else:
|
||||
assert not paths.work.join(file_path).exists()
|
||||
assert str(paths.work.join(source_file)) not in created
|
||||
|
||||
|
||||
def created_list(output):
|
||||
"""Parse output, and return list of created files"""
|
||||
|
||||
created = dict()
|
||||
for line in output.splitlines():
|
||||
match = re.match('Creating (.+) from template (.+)$', line)
|
||||
if match:
|
||||
created[match.group(1)] = match.group(2)
|
||||
return created.values()
|
||||
|
|
61
test/test_unit_choose_template_cmd.py
Normal file
61
test/test_unit_choose_template_cmd.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
"""Unit tests: choose_template_cmd"""
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.parametrize('label', ['', 'builtin', 'other'])
|
||||
@pytest.mark.parametrize('awk', [True, False], ids=['awk', 'no-awk'])
|
||||
def test_kind_builtin(runner, yadm, awk, label):
|
||||
"""Test kind: builtin"""
|
||||
|
||||
expected = 'template_builtin'
|
||||
awk_avail = 'true'
|
||||
|
||||
if not awk:
|
||||
awk_avail = 'false'
|
||||
expected = ''
|
||||
|
||||
if label == 'other':
|
||||
expected = ''
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
function awk_available {{ { awk_avail}; }}
|
||||
template="$(choose_template_cmd "{label}")"
|
||||
echo "TEMPLATE:$template"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert f'TEMPLATE:{expected}\n' in run.out
|
||||
|
||||
|
||||
@pytest.mark.parametrize('label', ['envtpl', 'j2cli', 'j2', 'other'])
|
||||
@pytest.mark.parametrize('envtpl', [True, False], ids=['envtpl', 'no-envtpl'])
|
||||
@pytest.mark.parametrize('j2cli', [True, False], ids=['j2cli', 'no-j2cli'])
|
||||
def test_kind_j2cli_envtpl(runner, yadm, envtpl, j2cli, label):
|
||||
"""Test kind: j2 (both j2cli & envtpl)
|
||||
|
||||
j2cli is preferred over envtpl if available.
|
||||
"""
|
||||
|
||||
envtpl_avail = 'true' if envtpl else 'false'
|
||||
j2cli_avail = 'true' if j2cli else 'false'
|
||||
|
||||
if label in ('j2cli', 'j2') and j2cli:
|
||||
expected = 'template_j2cli'
|
||||
elif label in ('envtpl', 'j2') and envtpl:
|
||||
expected = 'template_envtpl'
|
||||
else:
|
||||
expected = ''
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
function envtpl_available {{ { envtpl_avail}; }}
|
||||
function j2cli_available {{ { j2cli_avail}; }}
|
||||
template="$(choose_template_cmd "{label}")"
|
||||
echo "TEMPLATE:$template"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert f'TEMPLATE:{expected}\n' in run.out
|
114
test/test_unit_record_score.py
Normal file
114
test/test_unit_record_score.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
"""Unit tests: record_score"""
|
||||
import pytest
|
||||
|
||||
INIT_VARS = """
|
||||
score=0
|
||||
local_class=testclass
|
||||
local_system=testsystem
|
||||
local_host=testhost
|
||||
local_user=testuser
|
||||
alt_scores=()
|
||||
alt_filenames=()
|
||||
alt_targets=()
|
||||
alt_template_cmds=()
|
||||
"""
|
||||
|
||||
REPORT_RESULTS = """
|
||||
echo "SIZE:${#alt_scores[@]}"
|
||||
echo "SCORES:${alt_scores[@]}"
|
||||
echo "FILENAMES:${alt_filenames[@]}"
|
||||
echo "TARGETS:${alt_targets[@]}"
|
||||
"""
|
||||
|
||||
|
||||
def test_dont_record_zeros(runner, yadm):
|
||||
"""Record nothing if the score is zero"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
record_score "0" "testfile" "testtarget"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:0\n' in run.out
|
||||
assert 'SCORES:\n' in run.out
|
||||
assert 'FILENAMES:\n' in run.out
|
||||
assert 'TARGETS:\n' in run.out
|
||||
|
||||
|
||||
def test_new_scores(runner, yadm):
|
||||
"""Test new scores"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
record_score "1" "file_one" "targ_one"
|
||||
record_score "2" "file_two" "targ_two"
|
||||
record_score "4" "file_three" "targ_three"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:3\n' in run.out
|
||||
assert 'SCORES:1 2 4\n' in run.out
|
||||
assert 'FILENAMES:file_one file_two file_three\n' in run.out
|
||||
assert 'TARGETS:targ_one targ_two targ_three\n' in run.out
|
||||
|
||||
|
||||
@pytest.mark.parametrize('difference', ['lower', 'equal', 'higher'])
|
||||
def test_existing_scores(runner, yadm, difference):
|
||||
"""Test existing scores"""
|
||||
|
||||
expected_score = '2'
|
||||
expected_target = 'existing_target'
|
||||
if difference == 'lower':
|
||||
score = '1'
|
||||
elif difference == 'equal':
|
||||
score = '2'
|
||||
else:
|
||||
score = '4'
|
||||
expected_score = '4'
|
||||
expected_target = 'new_target'
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
alt_scores=(2)
|
||||
alt_filenames=("testfile")
|
||||
alt_targets=("existing_target")
|
||||
record_score "{score}" "testfile" "new_target"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:1\n' in run.out
|
||||
assert f'SCORES:{expected_score}\n' in run.out
|
||||
assert 'FILENAMES:testfile\n' in run.out
|
||||
assert f'TARGETS:{expected_target}\n' in run.out
|
||||
|
||||
|
||||
def test_existing_template(runner, yadm):
|
||||
"""Record nothing if a template command is registered for this file"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
alt_scores=(1)
|
||||
alt_filenames=("testfile")
|
||||
alt_targets=()
|
||||
alt_template_cmds=("existing_template")
|
||||
record_score "2" "testfile" "new_target"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:1\n' in run.out
|
||||
assert 'SCORES:1\n' in run.out
|
||||
assert 'FILENAMES:testfile\n' in run.out
|
||||
assert 'TARGETS:\n' in run.out
|
55
test/test_unit_record_template.py
Normal file
55
test/test_unit_record_template.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
"""Unit tests: record_template"""
|
||||
|
||||
INIT_VARS = """
|
||||
alt_filenames=()
|
||||
alt_template_cmds=()
|
||||
alt_targets=()
|
||||
"""
|
||||
|
||||
REPORT_RESULTS = """
|
||||
echo "SIZE:${#alt_filenames[@]}"
|
||||
echo "FILENAMES:${alt_filenames[@]}"
|
||||
echo "CMDS:${alt_template_cmds[@]}"
|
||||
echo "TARGS:${alt_targets[@]}"
|
||||
"""
|
||||
|
||||
|
||||
def test_new_template(runner, yadm):
|
||||
"""Test new template"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
record_template "file_one" "cmd_one" "targ_one"
|
||||
record_template "file_two" "cmd_two" "targ_two"
|
||||
record_template "file_three" "cmd_three" "targ_three"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:3\n' in run.out
|
||||
assert 'FILENAMES:file_one file_two file_three\n' in run.out
|
||||
assert 'CMDS:cmd_one cmd_two cmd_three\n' in run.out
|
||||
assert 'TARGS:targ_one targ_two targ_three\n' in run.out
|
||||
|
||||
|
||||
def test_existing_template(runner, yadm):
|
||||
"""Overwrite existing templates"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
{INIT_VARS}
|
||||
alt_filenames=("testfile")
|
||||
alt_template_cmds=("existing_cmd")
|
||||
alt_targets=("existing_targ")
|
||||
record_template "testfile" "new_cmd" "new_targ"
|
||||
{REPORT_RESULTS}
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert 'SIZE:1\n' in run.out
|
||||
assert 'FILENAMES:testfile\n' in run.out
|
||||
assert 'CMDS:new_cmd\n' in run.out
|
||||
assert 'TARGS:new_targ\n' in run.out
|
229
test/test_unit_score_file.py
Normal file
229
test/test_unit_score_file.py
Normal file
|
@ -0,0 +1,229 @@
|
|||
"""Unit tests: score_file"""
|
||||
import pytest
|
||||
|
||||
CONDITION = {
|
||||
'default': {
|
||||
'labels': ['default'],
|
||||
'modifier': 0,
|
||||
},
|
||||
'system': {
|
||||
'labels': ['o', 'os'],
|
||||
'modifier': 1,
|
||||
},
|
||||
'class': {
|
||||
'labels': ['c', 'class'],
|
||||
'modifier': 2,
|
||||
},
|
||||
'hostname': {
|
||||
'labels': ['h', 'hostname'],
|
||||
'modifier': 4,
|
||||
},
|
||||
'user': {
|
||||
'labels': ['u', 'user'],
|
||||
'modifier': 8,
|
||||
},
|
||||
}
|
||||
TEMPLATE_LABELS = ['t', 'template', 'yadm']
|
||||
|
||||
|
||||
def calculate_score(filename):
|
||||
"""Calculate the expected score"""
|
||||
# pylint: disable=too-many-branches
|
||||
score = 0
|
||||
|
||||
_, conditions = filename.split('##', 1)
|
||||
|
||||
for condition in conditions.split(','):
|
||||
label = condition
|
||||
value = None
|
||||
if '.' in condition:
|
||||
label, value = condition.split('.', 1)
|
||||
if label in CONDITION['default']['labels']:
|
||||
score += 1000
|
||||
elif label in CONDITION['system']['labels']:
|
||||
if value == 'testsystem':
|
||||
score += 1000 + CONDITION['system']['modifier']
|
||||
else:
|
||||
return 0
|
||||
elif label in CONDITION['class']['labels']:
|
||||
if value == 'testclass':
|
||||
score += 1000 + CONDITION['class']['modifier']
|
||||
else:
|
||||
return 0
|
||||
elif label in CONDITION['hostname']['labels']:
|
||||
if value == 'testhost':
|
||||
score += 1000 + CONDITION['hostname']['modifier']
|
||||
else:
|
||||
return 0
|
||||
elif label in CONDITION['user']['labels']:
|
||||
if value == 'testuser':
|
||||
score += 1000 + CONDITION['user']['modifier']
|
||||
else:
|
||||
return 0
|
||||
elif label in TEMPLATE_LABELS:
|
||||
return 0
|
||||
return score
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'default', ['default', None], ids=['default', 'no-default'])
|
||||
@pytest.mark.parametrize(
|
||||
'system', ['system', None], ids=['system', 'no-system'])
|
||||
@pytest.mark.parametrize(
|
||||
'cla', ['class', None], ids=['class', 'no-class'])
|
||||
@pytest.mark.parametrize(
|
||||
'host', ['hostname', None], ids=['hostname', 'no-host'])
|
||||
@pytest.mark.parametrize(
|
||||
'user', ['user', None], ids=['user', 'no-user'])
|
||||
def test_score_values(
|
||||
runner, yadm, default, system, cla, host, user):
|
||||
"""Test score results"""
|
||||
# pylint: disable=too-many-branches
|
||||
local_class = 'testclass'
|
||||
local_system = 'testsystem'
|
||||
local_host = 'testhost'
|
||||
local_user = 'testuser'
|
||||
filenames = {'filename##': 0}
|
||||
|
||||
if default:
|
||||
for filename in list(filenames):
|
||||
for label in CONDITION[default]['labels']:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += label
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
if system:
|
||||
for filename in list(filenames):
|
||||
for match in [True, False]:
|
||||
for label in CONDITION[system]['labels']:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += '.'.join([
|
||||
label,
|
||||
local_system if match else 'badsys'
|
||||
])
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
if cla:
|
||||
for filename in list(filenames):
|
||||
for match in [True, False]:
|
||||
for label in CONDITION[cla]['labels']:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += '.'.join([
|
||||
label,
|
||||
local_class if match else 'badclass'
|
||||
])
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
if host:
|
||||
for filename in list(filenames):
|
||||
for match in [True, False]:
|
||||
for label in CONDITION[host]['labels']:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += '.'.join([
|
||||
label,
|
||||
local_host if match else 'badhost'
|
||||
])
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
if user:
|
||||
for filename in list(filenames):
|
||||
for match in [True, False]:
|
||||
for label in CONDITION[user]['labels']:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += '.'.join([
|
||||
label,
|
||||
local_user if match else 'baduser'
|
||||
])
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
score=0
|
||||
local_class={local_class}
|
||||
local_system={local_system}
|
||||
local_host={local_host}
|
||||
local_user={local_user}
|
||||
"""
|
||||
expected = ''
|
||||
for filename in filenames:
|
||||
script += f"""
|
||||
score_file "{filename}"
|
||||
echo "{filename}"
|
||||
echo "$score"
|
||||
"""
|
||||
expected += filename + '\n'
|
||||
expected += str(filenames[filename]) + '\n'
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert run.out == expected
|
||||
|
||||
|
||||
def test_score_values_templates(runner, yadm):
|
||||
"""Test score results"""
|
||||
local_class = 'testclass'
|
||||
local_system = 'testsystem'
|
||||
local_host = 'testhost'
|
||||
local_user = 'testuser'
|
||||
filenames = {'filename##': 0}
|
||||
|
||||
for filename in list(filenames):
|
||||
for label in TEMPLATE_LABELS:
|
||||
newfile = filename
|
||||
if not newfile.endswith('##'):
|
||||
newfile += ','
|
||||
newfile += '.'.join([label, 'testtemplate'])
|
||||
filenames[newfile] = calculate_score(newfile)
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
score=0
|
||||
local_class={local_class}
|
||||
local_system={local_system}
|
||||
local_host={local_host}
|
||||
local_user={local_user}
|
||||
"""
|
||||
expected = ''
|
||||
for filename in filenames:
|
||||
script += f"""
|
||||
score_file "{filename}"
|
||||
echo "{filename}"
|
||||
echo "$score"
|
||||
"""
|
||||
expected += filename + '\n'
|
||||
expected += str(filenames[filename]) + '\n'
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert run.out == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'cmd_generated',
|
||||
[True, False],
|
||||
ids=['supported-template', 'unsupported-template'])
|
||||
def test_template_recording(runner, yadm, cmd_generated):
|
||||
"""Template should be recorded if choose_template_cmd outputs a command"""
|
||||
|
||||
mock = 'function choose_template_cmd() { return; }'
|
||||
expected = ''
|
||||
if cmd_generated:
|
||||
mock = 'function choose_template_cmd() { echo "test_cmd"; }'
|
||||
expected = 'template recorded'
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
function record_template() {{ echo "template recorded"; }}
|
||||
{mock}
|
||||
score_file "testfile##template.kind"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert run.out.rstrip() == expected
|
|
@ -60,3 +60,18 @@ def test_set_local_alt_values(
|
|||
assert f"user='override'" in run.out
|
||||
else:
|
||||
assert f"user='{tst_user}'" in run.out
|
||||
|
||||
|
||||
def test_distro(runner, yadm):
|
||||
"""Assert that local_distro is set"""
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
function query_distro() {{ echo "testdistro"; }}
|
||||
set_local_alt_values
|
||||
echo "distro='$local_distro'"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert run.out.strip() == "distro='testdistro'"
|
||||
|
|
98
test/test_unit_template_builtin.py
Normal file
98
test/test_unit_template_builtin.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
"""Unit tests: template_builtin"""
|
||||
|
||||
# these values are also testing the handling of bizarre characters
|
||||
LOCAL_CLASS = "builtin_Test+@-!^Class"
|
||||
LOCAL_SYSTEM = "builtin_Test+@-!^System"
|
||||
LOCAL_HOST = "builtin_Test+@-!^Host"
|
||||
LOCAL_USER = "builtin_Test+@-!^User"
|
||||
LOCAL_DISTRO = "builtin_Test+@-!^Distro"
|
||||
TEMPLATE = f'''
|
||||
start of template
|
||||
builtin class = >YADM_CLASS<
|
||||
builtin os = >YADM_OS<
|
||||
builtin host = >YADM_HOSTNAME<
|
||||
builtin user = >YADM_USER<
|
||||
builtin distro = >YADM_DISTRO<
|
||||
YADM_IF CLASS="wrongclass1"
|
||||
wrong class 1
|
||||
YADM_END
|
||||
YADM_IF CLASS="{LOCAL_CLASS}"
|
||||
Included section for class = YADM_CLASS (YADM_CLASS repeated)
|
||||
YADM_END
|
||||
YADM_IF CLASS="wrongclass2"
|
||||
wrong class 2
|
||||
YADM_END
|
||||
YADM_IF OS="wrongos1"
|
||||
wrong os 1
|
||||
YADM_END
|
||||
YADM_IF OS="{LOCAL_SYSTEM}"
|
||||
Included section for os = YADM_OS (YADM_OS repeated)
|
||||
YADM_END
|
||||
YADM_IF OS="wrongos2"
|
||||
wrong os 2
|
||||
YADM_END
|
||||
YADM_IF HOSTNAME="wronghost1"
|
||||
wrong host 1
|
||||
YADM_END
|
||||
YADM_IF HOSTNAME="{LOCAL_HOST}"
|
||||
Included section for host = YADM_HOSTNAME (YADM_HOSTNAME repeated)
|
||||
YADM_END
|
||||
YADM_IF HOSTNAME="wronghost2"
|
||||
wrong host 2
|
||||
YADM_END
|
||||
YADM_IF USER="wronguser1"
|
||||
wrong user 1
|
||||
YADM_END
|
||||
YADM_IF USER="{LOCAL_USER}"
|
||||
Included section for user = YADM_USER (YADM_USER repeated)
|
||||
YADM_END
|
||||
YADM_IF USER="wronguser2"
|
||||
wrong user 2
|
||||
YADM_END
|
||||
YADM_IF DISTRO="wrongdistro1"
|
||||
wrong distro 1
|
||||
YADM_END
|
||||
YADM_IF DISTRO="{LOCAL_DISTRO}"
|
||||
Included section for distro = YADM_DISTRO (YADM_DISTRO repeated)
|
||||
YADM_END
|
||||
YADM_IF DISTRO="wrongdistro2"
|
||||
wrong distro 2
|
||||
YADM_END
|
||||
end of template
|
||||
'''
|
||||
EXPECTED = f'''
|
||||
start of template
|
||||
builtin class = >{LOCAL_CLASS}<
|
||||
builtin os = >{LOCAL_SYSTEM}<
|
||||
builtin host = >{LOCAL_HOST}<
|
||||
builtin user = >{LOCAL_USER}<
|
||||
builtin distro = >{LOCAL_DISTRO}<
|
||||
Included section for class = {LOCAL_CLASS} ({LOCAL_CLASS} repeated)
|
||||
Included section for os = {LOCAL_SYSTEM} ({LOCAL_SYSTEM} repeated)
|
||||
Included section for host = {LOCAL_HOST} ({LOCAL_HOST} repeated)
|
||||
Included section for user = {LOCAL_USER} ({LOCAL_USER} repeated)
|
||||
Included section for distro = {LOCAL_DISTRO} ({LOCAL_DISTRO} repeated)
|
||||
end of template
|
||||
'''
|
||||
|
||||
|
||||
def test_template_builtin(runner, yadm, tmpdir):
|
||||
"""Test template_builtin"""
|
||||
|
||||
input_file = tmpdir.join('input')
|
||||
input_file.write(TEMPLATE, ensure=True)
|
||||
output_file = tmpdir.join('output')
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
local_class="{LOCAL_CLASS}"
|
||||
local_system="{LOCAL_SYSTEM}"
|
||||
local_host="{LOCAL_HOST}"
|
||||
local_user="{LOCAL_USER}"
|
||||
local_distro="{LOCAL_DISTRO}"
|
||||
template_builtin "{input_file}" "{output_file}"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert output_file.read() == EXPECTED
|
99
test/test_unit_template_j2.py
Normal file
99
test/test_unit_template_j2.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
"""Unit tests: template_j2cli & template_envtpl"""
|
||||
import pytest
|
||||
|
||||
LOCAL_CLASS = "j2_Test+@-!^Class"
|
||||
LOCAL_SYSTEM = "j2_Test+@-!^System"
|
||||
LOCAL_HOST = "j2_Test+@-!^Host"
|
||||
LOCAL_USER = "j2_Test+@-!^User"
|
||||
LOCAL_DISTRO = "j2_Test+@-!^Distro"
|
||||
TEMPLATE = f'''
|
||||
start of template
|
||||
j2 class = >{{{{YADM_CLASS}}}}<
|
||||
j2 os = >{{{{YADM_OS}}}}<
|
||||
j2 host = >{{{{YADM_HOSTNAME}}}}<
|
||||
j2 user = >{{{{YADM_USER}}}}<
|
||||
j2 distro = >{{{{YADM_DISTRO}}}}<
|
||||
{{%- if YADM_CLASS == "wrongclass1" %}}
|
||||
wrong class 1
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_CLASS == "{LOCAL_CLASS}" %}}
|
||||
Included section for class = {{{{YADM_CLASS}}}} ({{{{YADM_CLASS}}}} repeated)
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_CLASS == "wrongclass2" %}}
|
||||
wrong class 2
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_OS == "wrongos1" %}}
|
||||
wrong os 1
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_OS == "{LOCAL_SYSTEM}" %}}
|
||||
Included section for os = {{{{YADM_OS}}}} ({{{{YADM_OS}}}} repeated)
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_OS == "wrongos2" %}}
|
||||
wrong os 2
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_HOSTNAME == "wronghost1" %}}
|
||||
wrong host 1
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_HOSTNAME == "{LOCAL_HOST}" %}}
|
||||
Included section for host = {{{{YADM_HOSTNAME}}}} ({{{{YADM_HOSTNAME}}}} again)
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_HOSTNAME == "wronghost2" %}}
|
||||
wrong host 2
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_USER == "wronguser1" %}}
|
||||
wrong user 1
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_USER == "{LOCAL_USER}" %}}
|
||||
Included section for user = {{{{YADM_USER}}}} ({{{{YADM_USER}}}} repeated)
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_USER == "wronguser2" %}}
|
||||
wrong user 2
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_DISTRO == "wrongdistro1" %}}
|
||||
wrong distro 1
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_DISTRO == "{LOCAL_DISTRO}" %}}
|
||||
Included section for distro = {{{{YADM_DISTRO}}}} ({{{{YADM_DISTRO}}}} again)
|
||||
{{%- endif %}}
|
||||
{{%- if YADM_DISTRO == "wrongdistro2" %}}
|
||||
wrong distro 2
|
||||
{{%- endif %}}
|
||||
end of template
|
||||
'''
|
||||
EXPECTED = f'''
|
||||
start of template
|
||||
j2 class = >{LOCAL_CLASS}<
|
||||
j2 os = >{LOCAL_SYSTEM}<
|
||||
j2 host = >{LOCAL_HOST}<
|
||||
j2 user = >{LOCAL_USER}<
|
||||
j2 distro = >{LOCAL_DISTRO}<
|
||||
Included section for class = {LOCAL_CLASS} ({LOCAL_CLASS} repeated)
|
||||
Included section for os = {LOCAL_SYSTEM} ({LOCAL_SYSTEM} repeated)
|
||||
Included section for host = {LOCAL_HOST} ({LOCAL_HOST} again)
|
||||
Included section for user = {LOCAL_USER} ({LOCAL_USER} repeated)
|
||||
Included section for distro = {LOCAL_DISTRO} ({LOCAL_DISTRO} again)
|
||||
end of template
|
||||
'''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('processor', ('j2cli', 'envtpl'))
|
||||
def test_template_j2(runner, yadm, tmpdir, processor):
|
||||
"""Test processing by j2cli & envtpl"""
|
||||
|
||||
input_file = tmpdir.join('input')
|
||||
input_file.write(TEMPLATE, ensure=True)
|
||||
output_file = tmpdir.join('output')
|
||||
|
||||
script = f"""
|
||||
YADM_TEST=1 source {yadm}
|
||||
local_class="{LOCAL_CLASS}"
|
||||
local_system="{LOCAL_SYSTEM}"
|
||||
local_host="{LOCAL_HOST}"
|
||||
local_user="{LOCAL_USER}"
|
||||
local_distro="{LOCAL_DISTRO}"
|
||||
template_{processor} "{input_file}" "{output_file}"
|
||||
"""
|
||||
run = runner(command=['bash'], inp=script)
|
||||
assert run.success
|
||||
assert run.err == ''
|
||||
assert output_file.read() == EXPECTED
|
|
@ -3,6 +3,7 @@
|
|||
This module holds values/functions common to multiple tests.
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
ALT_FILE1 = 'test_alt'
|
||||
|
@ -55,7 +56,7 @@ def create_alt_files(paths, suffix,
|
|||
# Do not test directory support for jinja alternates
|
||||
test_paths = [new_file1, new_file2]
|
||||
test_names = [ALT_FILE1, ALT_FILE2]
|
||||
if suffix != '##yadm.j2':
|
||||
if not re.match(r'##(t|template|yadm)\.', suffix):
|
||||
test_paths += [new_dir]
|
||||
test_names += [ALT_DIR]
|
||||
|
||||
|
@ -69,6 +70,22 @@ def create_alt_files(paths, suffix,
|
|||
_create_encrypt(encrypt, test_names, suffix, paths, exclude)
|
||||
|
||||
|
||||
def parse_alt_output(output, linked=True):
|
||||
"""Parse output of 'alt', and return list of linked files"""
|
||||
regex = r'Creating (.+) from template (.+)$'
|
||||
if linked:
|
||||
regex = r'Linking (.+) to (.+)$'
|
||||
parsed_list = dict()
|
||||
for line in output.splitlines():
|
||||
match = re.match(regex, line)
|
||||
if match:
|
||||
if linked:
|
||||
parsed_list[match.group(2)] = match.group(1)
|
||||
else:
|
||||
parsed_list[match.group(1)] = match.group(2)
|
||||
return parsed_list.values()
|
||||
|
||||
|
||||
def _create_includefiles(includefile, paths, test_paths):
|
||||
if includefile:
|
||||
for dpath in INCLUDE_DIRS:
|
||||
|
|
276
yadm
276
yadm
|
@ -39,6 +39,8 @@ FULL_COMMAND=""
|
|||
|
||||
GPG_PROGRAM="gpg"
|
||||
GIT_PROGRAM="git"
|
||||
AWK_PROGRAM="awk"
|
||||
J2CLI_PROGRAM="j2"
|
||||
ENVTPL_PROGRAM="envtpl"
|
||||
LSB_RELEASE_PROGRAM="lsb_release"
|
||||
|
||||
|
@ -128,6 +130,225 @@ function main() {
|
|||
|
||||
}
|
||||
|
||||
|
||||
# ****** Alternate Processing ******
|
||||
|
||||
function score_file() {
|
||||
target="$1"
|
||||
filename="${target%%##*}"
|
||||
conditions="${target#*##}"
|
||||
score=0
|
||||
IFS=',' read -ra fields <<< "$conditions"
|
||||
for field in "${fields[@]}"; do
|
||||
label=${field%%.*}
|
||||
value=${field#*.}
|
||||
score=$((score + 1000))
|
||||
# default condition
|
||||
if [[ "$label" =~ ^(default)$ ]]; then
|
||||
score=$((score + 0))
|
||||
# variable conditions
|
||||
elif [[ "$label" =~ ^(o|os)$ ]]; then
|
||||
if [ "$value" = "$local_system" ]; then
|
||||
score=$((score + 1))
|
||||
else
|
||||
score=0
|
||||
return
|
||||
fi
|
||||
elif [[ "$label" =~ ^(c|class)$ ]]; then
|
||||
if [ "$value" = "$local_class" ]; then
|
||||
score=$((score + 2))
|
||||
else
|
||||
score=0
|
||||
return
|
||||
fi
|
||||
elif [[ "$label" =~ ^(h|hostname)$ ]]; then
|
||||
if [ "$value" = "$local_host" ]; then
|
||||
score=$((score + 4))
|
||||
else
|
||||
score=0
|
||||
return
|
||||
fi
|
||||
elif [[ "$label" =~ ^(u|user)$ ]]; then
|
||||
if [ "$value" = "$local_user" ]; then
|
||||
score=$((score + 8))
|
||||
else
|
||||
score=0
|
||||
return
|
||||
fi
|
||||
# templates
|
||||
elif [[ "$label" =~ ^(t|template|yadm)$ ]]; then
|
||||
score=0
|
||||
cmd=$(choose_template_cmd "$value")
|
||||
if [ -n "$cmd" ]; then
|
||||
record_template "$filename" "$cmd" "$target"
|
||||
else
|
||||
debug "No supported template processor for template $target"
|
||||
[ -n "$loud" ] && echo "No supported template processor for template $target"
|
||||
fi
|
||||
return 0
|
||||
# unsupported values
|
||||
else
|
||||
score=0
|
||||
return
|
||||
fi
|
||||
done
|
||||
|
||||
record_score "$score" "$filename" "$target"
|
||||
}
|
||||
|
||||
function record_score() {
|
||||
score="$1"
|
||||
filename="$2"
|
||||
target="$3"
|
||||
|
||||
# record nothing if the score is zero
|
||||
[ "$score" -eq 0 ] && return
|
||||
|
||||
# search for the index of this filename, to see if we already are tracking it
|
||||
index=-1
|
||||
for search_index in "${!alt_filenames[@]}"; do
|
||||
if [ "${alt_filenames[$search_index]}" = "$filename" ]; then
|
||||
index="$search_index"
|
||||
break
|
||||
fi
|
||||
done
|
||||
# if we don't find an existing index, create one by appending to the array
|
||||
if [ "$index" -eq -1 ]; then
|
||||
alt_filenames+=("$filename")
|
||||
# set index to the last index (newly created one)
|
||||
for index in "${!alt_filenames[@]}"; do :; done
|
||||
# and set its initial score to zero
|
||||
alt_scores[$index]=0
|
||||
fi
|
||||
|
||||
# record nothing if a template command is registered for this file
|
||||
[ "${alt_template_cmds[$index]+isset}" ] && return
|
||||
|
||||
# record higher scoring targets
|
||||
if [ "$score" -gt "${alt_scores[$index]}" ]; then
|
||||
alt_scores[$index]="$score"
|
||||
alt_targets[$index]="$target"
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function record_template() {
|
||||
filename="$1"
|
||||
cmd="$2"
|
||||
target="$3"
|
||||
|
||||
# search for the index of this filename, to see if we already are tracking it
|
||||
index=-1
|
||||
for search_index in "${!alt_filenames[@]}"; do
|
||||
if [ "${alt_filenames[$search_index]}" = "$filename" ]; then
|
||||
index="$search_index"
|
||||
break
|
||||
fi
|
||||
done
|
||||
# if we don't find an existing index, create one by appending to the array
|
||||
if [ "$index" -eq -1 ]; then
|
||||
alt_filenames+=("$filename")
|
||||
# set index to the last index (newly created one)
|
||||
for index in "${!alt_filenames[@]}"; do :; done
|
||||
fi
|
||||
|
||||
# record the template command, last one wins
|
||||
alt_template_cmds[$index]="$cmd"
|
||||
alt_targets[$index]="$target"
|
||||
|
||||
}
|
||||
|
||||
function choose_template_cmd() {
|
||||
kind="$1"
|
||||
|
||||
if [ "$kind" = "builtin" ] || [ "$kind" = "" ] && awk_available; then
|
||||
echo "template_builtin"
|
||||
elif [ "$kind" = "j2cli" ] || [ "$kind" = "j2" ] && j2cli_available; then
|
||||
echo "template_j2cli"
|
||||
elif [ "$kind" = "envtpl" ] || [ "$kind" = "j2" ] && envtpl_available; then
|
||||
echo "template_envtpl"
|
||||
else
|
||||
return # this "kind" of template is not supported
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# ****** Template Processors ******
|
||||
|
||||
function template_builtin() {
|
||||
input="$1"
|
||||
output="$2"
|
||||
|
||||
awk_pgm=$(cat << "EOF"
|
||||
# built-in template processor
|
||||
BEGIN {
|
||||
c["CLASS"] = class
|
||||
c["OS"] = os
|
||||
c["HOSTNAME"] = host
|
||||
c["USER"] = user
|
||||
c["DISTRO"] = distro
|
||||
valid = conditions()
|
||||
end = "^YADM_END$"
|
||||
skip = "^YADM_(IF|END$)"
|
||||
}
|
||||
{ replace_vars() } # variable replacements
|
||||
$0 ~ valid, $0 ~ end { if ($0 ~ skip) next } # valid conditional blocks
|
||||
/^YADM_IF/, $0 ~ end { next } # invalid conditional blocks
|
||||
{ print }
|
||||
|
||||
function replace_vars() {
|
||||
for (label in c) {
|
||||
gsub(("YADM_" label), c[label])
|
||||
}
|
||||
}
|
||||
|
||||
function conditions() {
|
||||
pattern = "^("
|
||||
for (label in c) {
|
||||
value = c[label]
|
||||
gsub(/[\\.^$(){}\[\]|*+?]/, "\\\\&", value)
|
||||
pattern = sprintf("%sYADM_IF +%s *= *\"%s\"|", pattern, label, value)
|
||||
}
|
||||
sub(/\|$/,")",pattern)
|
||||
return pattern
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
"$AWK_PROGRAM" \
|
||||
-v class="$local_class" \
|
||||
-v os="$local_system" \
|
||||
-v host="$local_host" \
|
||||
-v user="$local_user" \
|
||||
-v distro="$local_distro" \
|
||||
"$awk_pgm" \
|
||||
"$input" > "$output"
|
||||
}
|
||||
|
||||
function template_j2cli() {
|
||||
input="$1"
|
||||
output="$2"
|
||||
|
||||
YADM_CLASS="$local_class" \
|
||||
YADM_OS="$local_system" \
|
||||
YADM_HOSTNAME="$local_host" \
|
||||
YADM_USER="$local_user" \
|
||||
YADM_DISTRO="$local_distro" \
|
||||
"$J2CLI_PROGRAM" "$input" -o "$output"
|
||||
}
|
||||
|
||||
function template_envtpl() {
|
||||
input="$1"
|
||||
output="$2"
|
||||
|
||||
YADM_CLASS="$local_class" \
|
||||
YADM_OS="$local_system" \
|
||||
YADM_HOSTNAME="$local_host" \
|
||||
YADM_USER="$local_user" \
|
||||
YADM_DISTRO="$local_distro" \
|
||||
"$ENVTPL_PROGRAM" --keep-template "$input" -o "$output"
|
||||
}
|
||||
|
||||
# ****** yadm Commands ******
|
||||
|
||||
function alt() {
|
||||
|
@ -140,6 +361,7 @@ function alt() {
|
|||
local local_system
|
||||
local local_host
|
||||
local local_user
|
||||
local local_distro
|
||||
set_local_alt_values
|
||||
|
||||
# only be noisy if the "alt" command was run directly
|
||||
|
@ -220,12 +442,54 @@ function set_local_alt_values() {
|
|||
local_user=$(id -u -n)
|
||||
fi
|
||||
|
||||
local_distro="$(query_distro)"
|
||||
|
||||
}
|
||||
|
||||
function alt_future_linking() {
|
||||
|
||||
local alt_scores
|
||||
local alt_filenames
|
||||
local alt_targets
|
||||
local alt_template_cmds
|
||||
alt_scores=()
|
||||
alt_filenames=()
|
||||
alt_targets=()
|
||||
alt_template_cmds=()
|
||||
|
||||
return
|
||||
for alt_path in $(for tracked in "${tracked_files[@]}"; do printf "%s\n" "$tracked" "${tracked%/*}"; done | LC_ALL=C sort -u) "${ENCRYPT_INCLUDE_FILES[@]}"; do
|
||||
alt_path="$YADM_WORK/$alt_path"
|
||||
if [[ "$alt_path" =~ .\#\#. ]]; then
|
||||
if [ -e "$alt_path" ] ; then
|
||||
score_file "$alt_path"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
for index in "${!alt_filenames[@]}"; do
|
||||
filename="${alt_filenames[$index]}"
|
||||
target="${alt_targets[$index]}"
|
||||
template_cmd="${alt_template_cmds[$index]}"
|
||||
if [ -n "$template_cmd" ]; then
|
||||
# a template is defined, process the template
|
||||
debug "Creating $filename from template $target"
|
||||
[ -n "$loud" ] && echo "Creating $filename from template $target"
|
||||
"$template_cmd" "$target" "$filename"
|
||||
elif [ -n "$target" ]; then
|
||||
# a link target is defined, create symlink
|
||||
debug "Linking $target to $filename"
|
||||
[ -n "$loud" ] && echo "Linking $target to $filename"
|
||||
if [ "$do_copy" -eq 1 ]; then
|
||||
if [ -L "$filename" ]; then
|
||||
rm -f "$filename"
|
||||
fi
|
||||
cp -f "$target" "$filename"
|
||||
else
|
||||
ln -nfs "$target" "$filename"
|
||||
alt_linked+=("$target")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
|
@ -290,7 +554,7 @@ function alt_past_linking() {
|
|||
YADM_OS="$local_system" \
|
||||
YADM_HOSTNAME="$local_host" \
|
||||
YADM_USER="$local_user" \
|
||||
YADM_DISTRO=$(query_distro) \
|
||||
YADM_DISTRO="$local_distro" \
|
||||
"$ENVTPL_PROGRAM" --keep-template "$tracked_file" -o "$real_file"
|
||||
else
|
||||
debug "envtpl not available, not creating $real_file from template $tracked_file"
|
||||
|
@ -1204,6 +1468,14 @@ function bootstrap_available() {
|
|||
[ -f "$YADM_BOOTSTRAP" ] && [ -x "$YADM_BOOTSTRAP" ] && return
|
||||
return 1
|
||||
}
|
||||
function awk_available() {
|
||||
command -v "$AWK_PROGRAM" >/dev/null 2>&1 && return
|
||||
return 1
|
||||
}
|
||||
function j2cli_available() {
|
||||
command -v "$J2CLI_PROGRAM" >/dev/null 2>&1 && return
|
||||
return 1
|
||||
}
|
||||
function envtpl_available() {
|
||||
command -v "$ENVTPL_PROGRAM" >/dev/null 2>&1 && return
|
||||
return 1
|
||||
|
|
Loading…
Reference in a new issue