parent
f3ae31f1c2
commit
cfda485b34
@ -0,0 +1,193 @@ |
||||
"""Test alt""" |
||||
import os |
||||
import string |
||||
import py |
||||
import pytest |
||||
import utils |
||||
|
||||
TEST_PATHS = [utils.ALT_FILE1, utils.ALT_FILE2, utils.ALT_DIR] |
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy') |
||||
@pytest.mark.parametrize( |
||||
'tracked,encrypt,exclude', [ |
||||
(False, False, False), |
||||
(True, False, False), |
||||
(False, True, False), |
||||
(False, True, True), |
||||
], ids=['untracked', 'tracked', 'encrypted', 'excluded']) |
||||
def test_alt_source( |
||||
runner, yadm_y, paths, |
||||
tracked, encrypt, exclude): |
||||
"""Test yadm alt operates on all expected sources of alternates""" |
||||
|
||||
utils.create_alt_files( |
||||
paths, '##default', tracked=tracked, encrypt=encrypt, exclude=exclude) |
||||
run = runner(yadm_y('alt')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
linked = utils.parse_alt_output(run.out) |
||||
|
||||
for link_path in TEST_PATHS: |
||||
source_file = link_path + '##default' |
||||
if tracked or (encrypt and not exclude): |
||||
assert paths.work.join(link_path).islink() |
||||
target = py.path.local(paths.work.join(link_path).readlink()) |
||||
if target.isfile(): |
||||
assert paths.work.join(link_path).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
else: |
||||
assert paths.work.join(link_path).join( |
||||
utils.CONTAINED).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
else: |
||||
assert not paths.work.join(link_path).exists() |
||||
assert str(paths.work.join(source_file)) not in linked |
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy') |
||||
@pytest.mark.parametrize('suffix', [ |
||||
'##default', |
||||
'##o.$tst_sys', '##os.$tst_sys', |
||||
'##c.$tst_class', '##class.$tst_class', |
||||
'##h.$tst_host', '##hostname.$tst_host', |
||||
'##u.$tst_user', '##user.$tst_user', |
||||
]) |
||||
def test_alt_conditions( |
||||
runner, yadm_y, paths, |
||||
tst_sys, tst_host, tst_user, suffix): |
||||
"""Test conditions supported by yadm alt""" |
||||
|
||||
# set the class |
||||
tst_class = 'testclass' |
||||
utils.set_local(paths, 'class', tst_class) |
||||
|
||||
suffix = string.Template(suffix).substitute( |
||||
tst_sys=tst_sys, |
||||
tst_class=tst_class, |
||||
tst_host=tst_host, |
||||
tst_user=tst_user, |
||||
) |
||||
|
||||
utils.create_alt_files(paths, suffix) |
||||
run = runner(yadm_y('alt')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
linked = utils.parse_alt_output(run.out) |
||||
|
||||
for link_path in TEST_PATHS: |
||||
source_file = link_path + suffix |
||||
assert paths.work.join(link_path).islink() |
||||
target = py.path.local(paths.work.join(link_path).readlink()) |
||||
if target.isfile(): |
||||
assert paths.work.join(link_path).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
else: |
||||
assert paths.work.join(link_path).join( |
||||
utils.CONTAINED).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy') |
||||
@pytest.mark.parametrize('kind', ['builtin', '', 'envtpl', 'j2cli', 'j2']) |
||||
@pytest.mark.parametrize('label', ['t', 'template', 'yadm', ]) |
||||
def test_alt_templates( |
||||
runner, yadm_y, paths, kind, label): |
||||
"""Test templates supported by yadm alt""" |
||||
|
||||
suffix = f'##{label}.{kind}' |
||||
utils.create_alt_files(paths, suffix) |
||||
run = runner(yadm_y('alt')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
created = utils.parse_alt_output(run.out, linked=False) |
||||
|
||||
for created_path in TEST_PATHS: |
||||
if created_path != utils.ALT_DIR: |
||||
source_file = created_path + suffix |
||||
assert paths.work.join(created_path).isfile() |
||||
assert paths.work.join(created_path).read().strip() == source_file |
||||
assert str(paths.work.join(source_file)) in created |
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy') |
||||
@pytest.mark.parametrize('autoalt', [None, 'true', 'false']) |
||||
def test_auto_alt(runner, yadm_y, paths, autoalt): |
||||
"""Test auto alt""" |
||||
|
||||
# set the value of auto-alt |
||||
if autoalt: |
||||
os.system(' '.join(yadm_y('config', 'yadm.auto-alt', autoalt))) |
||||
|
||||
utils.create_alt_files(paths, '##default') |
||||
run = runner(yadm_y('status')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
linked = utils.parse_alt_output(run.out) |
||||
|
||||
for link_path in TEST_PATHS: |
||||
source_file = link_path + '##default' |
||||
if autoalt == 'false': |
||||
assert not paths.work.join(link_path).exists() |
||||
else: |
||||
assert paths.work.join(link_path).islink() |
||||
target = py.path.local(paths.work.join(link_path).readlink()) |
||||
if target.isfile(): |
||||
assert paths.work.join(link_path).read() == source_file |
||||
# no linking output when run via auto-alt |
||||
assert str(paths.work.join(source_file)) not in linked |
||||
else: |
||||
assert paths.work.join(link_path).join( |
||||
utils.CONTAINED).read() == source_file |
||||
# no linking output when run via auto-alt |
||||
assert str(paths.work.join(source_file)) not in linked |
||||
|
||||
|
||||
@pytest.mark.usefixtures('ds1_copy') |
||||
def test_stale_link_removal(runner, yadm_y, paths): |
||||
"""Stale links to alternative files are removed |
||||
|
||||
This test ensures that when an already linked alternative becomes invalid |
||||
due to a change in class, the alternate link is removed. |
||||
""" |
||||
|
||||
# set the class |
||||
tst_class = 'testclass' |
||||
utils.set_local(paths, 'class', tst_class) |
||||
|
||||
# create files which match the test class |
||||
utils.create_alt_files(paths, f'##class.{tst_class}') |
||||
|
||||
# run alt to trigger linking |
||||
run = runner(yadm_y('alt')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
linked = utils.parse_alt_output(run.out) |
||||
|
||||
# assert the proper linking has occurred |
||||
for stale_path in TEST_PATHS: |
||||
source_file = stale_path + '##class.' + tst_class |
||||
assert paths.work.join(stale_path).islink() |
||||
target = py.path.local(paths.work.join(stale_path).readlink()) |
||||
if target.isfile(): |
||||
assert paths.work.join(stale_path).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
else: |
||||
assert paths.work.join(stale_path).join( |
||||
utils.CONTAINED).read() == source_file |
||||
assert str(paths.work.join(source_file)) in linked |
||||
|
||||
# change the class so there are no valid alternates |
||||
utils.set_local(paths, 'class', 'changedclass') |
||||
|
||||
# run alt to trigger linking |
||||
run = runner(yadm_y('alt')) |
||||
assert run.success |
||||
assert run.err == '' |
||||
linked = utils.parse_alt_output(run.out) |
||||
|
||||
# assert the linking is removed |
||||
for stale_path in TEST_PATHS: |
||||
source_file = stale_path + '##class.' + tst_class |
||||
assert not paths.work.join(stale_path).exists() |
||||
assert str(paths.work.join(source_file)) not in linked |
@ -0,0 +1,61 @@ |
||||
"""Unit tests: choose_template_cmd""" |
||||
import pytest |
||||
|
||||
|
||||
@pytest.mark.parametrize('label', ['', 'builtin', 'other']) |
||||
@pytest.mark.parametrize('awk', [True, False], ids=['awk', 'no-awk']) |
||||
def test_kind_builtin(runner, yadm, awk, label): |
||||
"""Test kind: builtin""" |
||||
|
||||
expected = 'template_builtin' |
||||
awk_avail = 'true' |
||||
|
||||
if not awk: |
||||
awk_avail = 'false' |
||||
expected = '' |
||||
|
||||
if label == 'other': |
||||
expected = '' |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
function awk_available {{ { awk_avail}; }} |
||||
template="$(choose_template_cmd "{label}")" |
||||
echo "TEMPLATE:$template" |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert f'TEMPLATE:{expected}\n' in run.out |
||||
|
||||
|
||||
@pytest.mark.parametrize('label', ['envtpl', 'j2cli', 'j2', 'other']) |
||||
@pytest.mark.parametrize('envtpl', [True, False], ids=['envtpl', 'no-envtpl']) |
||||
@pytest.mark.parametrize('j2cli', [True, False], ids=['j2cli', 'no-j2cli']) |
||||
def test_kind_j2cli_envtpl(runner, yadm, envtpl, j2cli, label): |
||||
"""Test kind: j2 (both j2cli & envtpl) |
||||
|
||||
j2cli is preferred over envtpl if available. |
||||
""" |
||||
|
||||
envtpl_avail = 'true' if envtpl else 'false' |
||||
j2cli_avail = 'true' if j2cli else 'false' |
||||
|
||||
if label in ('j2cli', 'j2') and j2cli: |
||||
expected = 'template_j2cli' |
||||
elif label in ('envtpl', 'j2') and envtpl: |
||||
expected = 'template_envtpl' |
||||
else: |
||||
expected = '' |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
function envtpl_available {{ { envtpl_avail}; }} |
||||
function j2cli_available {{ { j2cli_avail}; }} |
||||
template="$(choose_template_cmd "{label}")" |
||||
echo "TEMPLATE:$template" |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert f'TEMPLATE:{expected}\n' in run.out |
@ -0,0 +1,114 @@ |
||||
"""Unit tests: record_score""" |
||||
import pytest |
||||
|
||||
INIT_VARS = """ |
||||
score=0 |
||||
local_class=testclass |
||||
local_system=testsystem |
||||
local_host=testhost |
||||
local_user=testuser |
||||
alt_scores=() |
||||
alt_filenames=() |
||||
alt_targets=() |
||||
alt_template_cmds=() |
||||
""" |
||||
|
||||
REPORT_RESULTS = """ |
||||
echo "SIZE:${#alt_scores[@]}" |
||||
echo "SCORES:${alt_scores[@]}" |
||||
echo "FILENAMES:${alt_filenames[@]}" |
||||
echo "TARGETS:${alt_targets[@]}" |
||||
""" |
||||
|
||||
|
||||
def test_dont_record_zeros(runner, yadm): |
||||
"""Record nothing if the score is zero""" |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
record_score "0" "testfile" "testtarget" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:0\n' in run.out |
||||
assert 'SCORES:\n' in run.out |
||||
assert 'FILENAMES:\n' in run.out |
||||
assert 'TARGETS:\n' in run.out |
||||
|
||||
|
||||
def test_new_scores(runner, yadm): |
||||
"""Test new scores""" |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
record_score "1" "file_one" "targ_one" |
||||
record_score "2" "file_two" "targ_two" |
||||
record_score "4" "file_three" "targ_three" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:3\n' in run.out |
||||
assert 'SCORES:1 2 4\n' in run.out |
||||
assert 'FILENAMES:file_one file_two file_three\n' in run.out |
||||
assert 'TARGETS:targ_one targ_two targ_three\n' in run.out |
||||
|
||||
|
||||
@pytest.mark.parametrize('difference', ['lower', 'equal', 'higher']) |
||||
def test_existing_scores(runner, yadm, difference): |
||||
"""Test existing scores""" |
||||
|
||||
expected_score = '2' |
||||
expected_target = 'existing_target' |
||||
if difference == 'lower': |
||||
score = '1' |
||||
elif difference == 'equal': |
||||
score = '2' |
||||
else: |
||||
score = '4' |
||||
expected_score = '4' |
||||
expected_target = 'new_target' |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
alt_scores=(2) |
||||
alt_filenames=("testfile") |
||||
alt_targets=("existing_target") |
||||
record_score "{score}" "testfile" "new_target" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:1\n' in run.out |
||||
assert f'SCORES:{expected_score}\n' in run.out |
||||
assert 'FILENAMES:testfile\n' in run.out |
||||
assert f'TARGETS:{expected_target}\n' in run.out |
||||
|
||||
|
||||
def test_existing_template(runner, yadm): |
||||
"""Record nothing if a template command is registered for this file""" |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
alt_scores=(1) |
||||
alt_filenames=("testfile") |
||||
alt_targets=() |
||||
alt_template_cmds=("existing_template") |
||||
record_score "2" "testfile" "new_target" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:1\n' in run.out |
||||
assert 'SCORES:1\n' in run.out |
||||
assert 'FILENAMES:testfile\n' in run.out |
||||
assert 'TARGETS:\n' in run.out |
@ -0,0 +1,55 @@ |
||||
"""Unit tests: record_template""" |
||||
|
||||
INIT_VARS = """ |
||||
alt_filenames=() |
||||
alt_template_cmds=() |
||||
alt_targets=() |
||||
""" |
||||
|
||||
REPORT_RESULTS = """ |
||||
echo "SIZE:${#alt_filenames[@]}" |
||||
echo "FILENAMES:${alt_filenames[@]}" |
||||
echo "CMDS:${alt_template_cmds[@]}" |
||||
echo "TARGS:${alt_targets[@]}" |
||||
""" |
||||
|
||||
|
||||
def test_new_template(runner, yadm): |
||||
"""Test new template""" |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
record_template "file_one" "cmd_one" "targ_one" |
||||
record_template "file_two" "cmd_two" "targ_two" |
||||
record_template "file_three" "cmd_three" "targ_three" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:3\n' in run.out |
||||
assert 'FILENAMES:file_one file_two file_three\n' in run.out |
||||
assert 'CMDS:cmd_one cmd_two cmd_three\n' in run.out |
||||
assert 'TARGS:targ_one targ_two targ_three\n' in run.out |
||||
|
||||
|
||||
def test_existing_template(runner, yadm): |
||||
"""Overwrite existing templates""" |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
{INIT_VARS} |
||||
alt_filenames=("testfile") |
||||
alt_template_cmds=("existing_cmd") |
||||
alt_targets=("existing_targ") |
||||
record_template "testfile" "new_cmd" "new_targ" |
||||
{REPORT_RESULTS} |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert 'SIZE:1\n' in run.out |
||||
assert 'FILENAMES:testfile\n' in run.out |
||||
assert 'CMDS:new_cmd\n' in run.out |
||||
assert 'TARGS:new_targ\n' in run.out |
@ -0,0 +1,229 @@ |
||||
"""Unit tests: score_file""" |
||||
import pytest |
||||
|
||||
CONDITION = { |
||||
'default': { |
||||
'labels': ['default'], |
||||
'modifier': 0, |
||||
}, |
||||
'system': { |
||||
'labels': ['o', 'os'], |
||||
'modifier': 1, |
||||
}, |
||||
'class': { |
||||
'labels': ['c', 'class'], |
||||
'modifier': 2, |
||||
}, |
||||
'hostname': { |
||||
'labels': ['h', 'hostname'], |
||||
'modifier': 4, |
||||
}, |
||||
'user': { |
||||
'labels': ['u', 'user'], |
||||
'modifier': 8, |
||||
}, |
||||
} |
||||
TEMPLATE_LABELS = ['t', 'template', 'yadm'] |
||||
|
||||
|
||||
def calculate_score(filename): |
||||
"""Calculate the expected score""" |
||||
# pylint: disable=too-many-branches |
||||
score = 0 |
||||
|
||||
_, conditions = filename.split('##', 1) |
||||
|
||||
for condition in conditions.split(','): |
||||
label = condition |
||||
value = None |
||||
if '.' in condition: |
||||
label, value = condition.split('.', 1) |
||||
if label in CONDITION['default']['labels']: |
||||
score += 1000 |
||||
elif label in CONDITION['system']['labels']: |
||||
if value == 'testsystem': |
||||
score += 1000 + CONDITION['system']['modifier'] |
||||
else: |
||||
return 0 |
||||
elif label in CONDITION['class']['labels']: |
||||
if value == 'testclass': |
||||
score += 1000 + CONDITION['class']['modifier'] |
||||
else: |
||||
return 0 |
||||
elif label in CONDITION['hostname']['labels']: |
||||
if value == 'testhost': |
||||
score += 1000 + CONDITION['hostname']['modifier'] |
||||
else: |
||||
return 0 |
||||
elif label in CONDITION['user']['labels']: |
||||
if value == 'testuser': |
||||
score += 1000 + CONDITION['user']['modifier'] |
||||
else: |
||||
return 0 |
||||
elif label in TEMPLATE_LABELS: |
||||
return 0 |
||||
return score |
||||
|
||||
|
||||
@pytest.mark.parametrize( |
||||
'default', ['default', None], ids=['default', 'no-default']) |
||||
@pytest.mark.parametrize( |
||||
'system', ['system', None], ids=['system', 'no-system']) |
||||
@pytest.mark.parametrize( |
||||
'cla', ['class', None], ids=['class', 'no-class']) |
||||
@pytest.mark.parametrize( |
||||
'host', ['hostname', None], ids=['hostname', 'no-host']) |
||||
@pytest.mark.parametrize( |
||||
'user', ['user', None], ids=['user', 'no-user']) |
||||
def test_score_values( |
||||
runner, yadm, default, system, cla, host, user): |
||||
"""Test score results""" |
||||
# pylint: disable=too-many-branches |
||||
local_class = 'testclass' |
||||
local_system = 'testsystem' |
||||
local_host = 'testhost' |
||||
local_user = 'testuser' |
||||
filenames = {'filename##': 0} |
||||
|
||||
if default: |
||||
for filename in list(filenames): |
||||
for label in CONDITION[default]['labels']: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += label |
||||
filenames[newfile] = calculate_score(newfile) |
||||
if system: |
||||
for filename in list(filenames): |
||||
for match in [True, False]: |
||||
for label in CONDITION[system]['labels']: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += '.'.join([ |
||||
label, |
||||
local_system if match else 'badsys' |
||||
]) |
||||
filenames[newfile] = calculate_score(newfile) |
||||
if cla: |
||||
for filename in list(filenames): |
||||
for match in [True, False]: |
||||
for label in CONDITION[cla]['labels']: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += '.'.join([ |
||||
label, |
||||
local_class if match else 'badclass' |
||||
]) |
||||
filenames[newfile] = calculate_score(newfile) |
||||
if host: |
||||
for filename in list(filenames): |
||||
for match in [True, False]: |
||||
for label in CONDITION[host]['labels']: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += '.'.join([ |
||||
label, |
||||
local_host if match else 'badhost' |
||||
]) |
||||
filenames[newfile] = calculate_score(newfile) |
||||
if user: |
||||
for filename in list(filenames): |
||||
for match in [True, False]: |
||||
for label in CONDITION[user]['labels']: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += '.'.join([ |
||||
label, |
||||
local_user if match else 'baduser' |
||||
]) |
||||
filenames[newfile] = calculate_score(newfile) |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
score=0 |
||||
local_class={local_class} |
||||
local_system={local_system} |
||||
local_host={local_host} |
||||
local_user={local_user} |
||||
""" |
||||
expected = '' |
||||
for filename in filenames: |
||||
script += f""" |
||||
score_file "{filename}" |
||||
echo "{filename}" |
||||
echo "$score" |
||||
""" |
||||
expected += filename + '\n' |
||||
expected += str(filenames[filename]) + '\n' |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert run.out == expected |
||||
|
||||
|
||||
def test_score_values_templates(runner, yadm): |
||||
"""Test score results""" |
||||
local_class = 'testclass' |
||||
local_system = 'testsystem' |
||||
local_host = 'testhost' |
||||
local_user = 'testuser' |
||||
filenames = {'filename##': 0} |
||||
|
||||
for filename in list(filenames): |
||||
for label in TEMPLATE_LABELS: |
||||
newfile = filename |
||||
if not newfile.endswith('##'): |
||||
newfile += ',' |
||||
newfile += '.'.join([label, 'testtemplate']) |
||||
filenames[newfile] = calculate_score(newfile) |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
score=0 |
||||
local_class={local_class} |
||||
local_system={local_system} |
||||
local_host={local_host} |
||||
local_user={local_user} |
||||
""" |
||||
expected = '' |
||||
for filename in filenames: |
||||
script += f""" |
||||
score_file "{filename}" |
||||
echo "{filename}" |
||||
echo "$score" |
||||
""" |
||||
expected += filename + '\n' |
||||
expected += str(filenames[filename]) + '\n' |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert run.out == expected |
||||
|
||||
|
||||
@pytest.mark.parametrize( |
||||
'cmd_generated', |
||||
[True, False], |
||||
ids=['supported-template', 'unsupported-template']) |
||||
def test_template_recording(runner, yadm, cmd_generated): |
||||
"""Template should be recorded if choose_template_cmd outputs a command""" |
||||
|
||||
mock = 'function choose_template_cmd() { return; }' |
||||
expected = '' |
||||
if cmd_generated: |
||||
mock = 'function choose_template_cmd() { echo "test_cmd"; }' |
||||
expected = 'template recorded' |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
function record_template() {{ echo "template recorded"; }} |
||||
{mock} |
||||
score_file "testfile##template.kind" |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert run.out.rstrip() == expected |
@ -0,0 +1,98 @@ |
||||
"""Unit tests: template_builtin""" |
||||
|
||||
# these values are also testing the handling of bizarre characters |
||||
LOCAL_CLASS = "builtin_Test+@-!^Class" |
||||
LOCAL_SYSTEM = "builtin_Test+@-!^System" |
||||
LOCAL_HOST = "builtin_Test+@-!^Host" |
||||
LOCAL_USER = "builtin_Test+@-!^User" |
||||
LOCAL_DISTRO = "builtin_Test+@-!^Distro" |
||||
TEMPLATE = f''' |
||||
start of template |
||||
builtin class = >YADM_CLASS< |
||||
builtin os = >YADM_OS< |
||||
builtin host = >YADM_HOSTNAME< |
||||
builtin user = >YADM_USER< |
||||
builtin distro = >YADM_DISTRO< |
||||
YADM_IF CLASS="wrongclass1" |
||||
wrong class 1 |
||||
YADM_END |
||||
YADM_IF CLASS="{LOCAL_CLASS}" |
||||
Included section for class = YADM_CLASS (YADM_CLASS repeated) |
||||
YADM_END |
||||
YADM_IF CLASS="wrongclass2" |
||||
wrong class 2 |
||||
YADM_END |
||||
YADM_IF OS="wrongos1" |
||||
wrong os 1 |
||||
YADM_END |
||||
YADM_IF OS="{LOCAL_SYSTEM}" |
||||
Included section for os = YADM_OS (YADM_OS repeated) |
||||
YADM_END |
||||
YADM_IF OS="wrongos2" |
||||
wrong os 2 |
||||
YADM_END |
||||
YADM_IF HOSTNAME="wronghost1" |
||||
wrong host 1 |
||||
YADM_END |
||||
YADM_IF HOSTNAME="{LOCAL_HOST}" |
||||
Included section for host = YADM_HOSTNAME (YADM_HOSTNAME repeated) |
||||
YADM_END |
||||
YADM_IF HOSTNAME="wronghost2" |
||||
wrong host 2 |
||||
YADM_END |
||||
YADM_IF USER="wronguser1" |
||||
wrong user 1 |
||||
YADM_END |
||||
YADM_IF USER="{LOCAL_USER}" |
||||
Included section for user = YADM_USER (YADM_USER repeated) |
||||
YADM_END |
||||
YADM_IF USER="wronguser2" |
||||
wrong user 2 |
||||
YADM_END |
||||
YADM_IF DISTRO="wrongdistro1" |
||||
wrong distro 1 |
||||
YADM_END |
||||
YADM_IF DISTRO="{LOCAL_DISTRO}" |
||||
Included section for distro = YADM_DISTRO (YADM_DISTRO repeated) |
||||
YADM_END |
||||
YADM_IF DISTRO="wrongdistro2" |
||||
wrong distro 2 |
||||
YADM_END |
||||
end of template |
||||
''' |
||||
EXPECTED = f''' |
||||
start of template |
||||
builtin class = >{LOCAL_CLASS}< |
||||
builtin os = >{LOCAL_SYSTEM}< |
||||
builtin host = >{LOCAL_HOST}< |
||||
builtin user = >{LOCAL_USER}< |
||||
builtin distro = >{LOCAL_DISTRO}< |
||||
Included section for class = {LOCAL_CLASS} ({LOCAL_CLASS} repeated) |
||||
Included section for os = {LOCAL_SYSTEM} ({LOCAL_SYSTEM} repeated) |
||||
Included section for host = {LOCAL_HOST} ({LOCAL_HOST} repeated) |
||||
Included section for user = {LOCAL_USER} ({LOCAL_USER} repeated) |
||||
Included section for distro = {LOCAL_DISTRO} ({LOCAL_DISTRO} repeated) |
||||
end of template |
||||
''' |
||||
|
||||
|
||||
def test_template_builtin(runner, yadm, tmpdir): |
||||
"""Test template_builtin""" |
||||
|
||||
input_file = tmpdir.join('input') |
||||
input_file.write(TEMPLATE, ensure=True) |
||||
output_file = tmpdir.join('output') |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
local_class="{LOCAL_CLASS}" |
||||
local_system="{LOCAL_SYSTEM}" |
||||
local_host="{LOCAL_HOST}" |
||||
local_user="{LOCAL_USER}" |
||||
local_distro="{LOCAL_DISTRO}" |
||||
template_builtin "{input_file}" "{output_file}" |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert output_file.read() == EXPECTED |
@ -0,0 +1,99 @@ |
||||
"""Unit tests: template_j2cli & template_envtpl""" |
||||
import pytest |
||||
|
||||
LOCAL_CLASS = "j2_Test+@-!^Class" |
||||
LOCAL_SYSTEM = "j2_Test+@-!^System" |
||||
LOCAL_HOST = "j2_Test+@-!^Host" |
||||
LOCAL_USER = "j2_Test+@-!^User" |
||||
LOCAL_DISTRO = "j2_Test+@-!^Distro" |
||||
TEMPLATE = f''' |
||||
start of template |
||||
j2 class = >{{{{YADM_CLASS}}}}< |
||||
j2 os = >{{{{YADM_OS}}}}< |
||||
j2 host = >{{{{YADM_HOSTNAME}}}}< |
||||
j2 user = >{{{{YADM_USER}}}}< |
||||
j2 distro = >{{{{YADM_DISTRO}}}}< |
||||
{{%- if YADM_CLASS == "wrongclass1" %}} |
||||
wrong class 1 |
||||
{{%- endif %}} |
||||
{{%- if YADM_CLASS == "{LOCAL_CLASS}" %}} |
||||
Included section for class = {{{{YADM_CLASS}}}} ({{{{YADM_CLASS}}}} repeated) |
||||
{{%- endif %}} |
||||
{{%- if YADM_CLASS == "wrongclass2" %}} |
||||
wrong class 2 |
||||
{{%- endif %}} |
||||
{{%- if YADM_OS == "wrongos1" %}} |
||||
wrong os 1 |
||||
{{%- endif %}} |
||||
{{%- if YADM_OS == "{LOCAL_SYSTEM}" %}} |
||||
Included section for os = {{{{YADM_OS}}}} ({{{{YADM_OS}}}} repeated) |
||||
{{%- endif %}} |
||||
{{%- if YADM_OS == "wrongos2" %}} |
||||
wrong os 2 |
||||
{{%- endif %}} |
||||
{{%- if YADM_HOSTNAME == "wronghost1" %}} |
||||
wrong host 1 |
||||
{{%- endif %}} |
||||
{{%- if YADM_HOSTNAME == "{LOCAL_HOST}" %}} |
||||
Included section for host = {{{{YADM_HOSTNAME}}}} ({{{{YADM_HOSTNAME}}}} again) |
||||
{{%- endif %}} |
||||
{{%- if YADM_HOSTNAME == "wronghost2" %}} |
||||
wrong host 2 |
||||
{{%- endif %}} |
||||
{{%- if YADM_USER == "wronguser1" %}} |
||||
wrong user 1 |
||||
{{%- endif %}} |
||||
{{%- if YADM_USER == "{LOCAL_USER}" %}} |
||||
Included section for user = {{{{YADM_USER}}}} ({{{{YADM_USER}}}} repeated) |
||||
{{%- endif %}} |
||||
{{%- if YADM_USER == "wronguser2" %}} |
||||
wrong user 2 |
||||
{{%- endif %}} |
||||
{{%- if YADM_DISTRO == "wrongdistro1" %}} |
||||
wrong distro 1 |
||||
{{%- endif %}} |
||||
{{%- if YADM_DISTRO == "{LOCAL_DISTRO}" %}} |
||||
Included section for distro = {{{{YADM_DISTRO}}}} ({{{{YADM_DISTRO}}}} again) |
||||
{{%- endif %}} |
||||
{{%- if YADM_DISTRO == "wrongdistro2" %}} |
||||
wrong distro 2 |
||||
{{%- endif %}} |
||||
end of template |
||||
''' |
||||
EXPECTED = f''' |
||||
start of template |
||||
j2 class = >{LOCAL_CLASS}< |
||||
j2 os = >{LOCAL_SYSTEM}< |
||||
j2 host = >{LOCAL_HOST}< |
||||
j2 user = >{LOCAL_USER}< |
||||
j2 distro = >{LOCAL_DISTRO}< |
||||
Included section for class = {LOCAL_CLASS} ({LOCAL_CLASS} repeated) |
||||
Included section for os = {LOCAL_SYSTEM} ({LOCAL_SYSTEM} repeated) |
||||
Included section for host = {LOCAL_HOST} ({LOCAL_HOST} again) |
||||
Included section for user = {LOCAL_USER} ({LOCAL_USER} repeated) |
||||
Included section for distro = {LOCAL_DISTRO} ({LOCAL_DISTRO} again) |
||||
end of template |
||||
''' |
||||
|
||||
|
||||
@pytest.mark.parametrize('processor', ('j2cli', 'envtpl')) |
||||
def test_template_j2(runner, yadm, tmpdir, processor): |
||||
"""Test processing by j2cli & envtpl""" |
||||
|
||||
input_file = tmpdir.join('input') |
||||
input_file.write(TEMPLATE, ensure=True) |
||||
output_file = tmpdir.join('output') |
||||
|
||||
script = f""" |
||||
YADM_TEST=1 source {yadm} |
||||
local_class="{LOCAL_CLASS}" |
||||
local_system="{LOCAL_SYSTEM}" |
||||
local_host="{LOCAL_HOST}" |
||||
local_user="{LOCAL_USER}" |
||||
local_distro="{LOCAL_DISTRO}" |
||||
template_{processor} "{input_file}" "{output_file}" |
||||
""" |
||||
run = runner(command=['bash'], inp=script) |
||||
assert run.success |
||||
assert run.err == '' |
||||
assert output_file.read() == EXPECTED |