2019-11-26 20:33:24 +01:00
|
|
|
from lxml import etree
|
|
|
|
from os.path import isfile, join, isdir
|
2020-02-14 17:59:39 +01:00
|
|
|
from pytest import fixture, mark
|
2022-01-19 18:24:00 +01:00
|
|
|
from os import listdir, mkdir, environ
|
2019-11-26 20:33:24 +01:00
|
|
|
from json import dump, load, dumps, loads
|
|
|
|
|
2022-01-19 18:24:00 +01:00
|
|
|
environ['TIRAMISU_LOCALE'] = 'en'
|
|
|
|
|
2019-12-02 10:31:55 +01:00
|
|
|
from tiramisu import Config
|
2021-02-26 20:38:17 +01:00
|
|
|
from tiramisu.error import PropertiesOptionError
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
2020-08-12 08:23:38 +02:00
|
|
|
dico_dirs = 'tests/dictionaries'
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = set()
|
|
|
|
|
|
|
|
for test in listdir(dico_dirs):
|
|
|
|
if isdir(join(dico_dirs, test)):
|
2020-07-29 08:59:40 +02:00
|
|
|
if isdir(join(dico_dirs, test, 'tiramisu')):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok.add(test)
|
|
|
|
|
2020-11-18 22:18:16 +01:00
|
|
|
debug = False
|
|
|
|
#debug = True
|
2019-11-26 20:33:24 +01:00
|
|
|
excludes = set([])
|
2020-11-20 18:02:40 +01:00
|
|
|
#excludes = set(['01base_file_utfchar'])
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok -= excludes
|
2020-07-16 09:50:01 +02:00
|
|
|
#test_ok = ['10check_valid_ipnetmask']
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = list(test_ok)
|
|
|
|
test_ok.sort()
|
|
|
|
|
|
|
|
|
|
|
|
@fixture(scope="module", params=test_ok)
|
|
|
|
def test_dir(request):
|
|
|
|
return request.param
|
|
|
|
|
|
|
|
|
2022-10-01 22:27:22 +02:00
|
|
|
async def launch_flattener(test_dir,
|
|
|
|
filename,
|
|
|
|
):
|
2019-11-26 20:33:24 +01:00
|
|
|
makedict_dir = join(test_dir, 'makedict')
|
|
|
|
makedict_file = join(makedict_dir, 'base.json')
|
2021-02-26 20:38:17 +01:00
|
|
|
makedict_before = join(makedict_dir, 'before.json')
|
|
|
|
makedict_after = join(makedict_dir, 'after.json')
|
2020-07-20 18:13:53 +02:00
|
|
|
|
2022-10-01 22:27:22 +02:00
|
|
|
modulepath = test_dir.replace('/', '.') + f'.tiramisu.{filename}'
|
2020-07-20 18:13:53 +02:00
|
|
|
mod = __import__(modulepath)
|
|
|
|
for token in modulepath.split(".")[1:]:
|
|
|
|
mod = getattr(mod, token)
|
|
|
|
config = await Config(mod.option_0)
|
2021-02-26 20:38:17 +01:00
|
|
|
# change default rights
|
|
|
|
ro_origin = await config.property.getdefault('read_only', 'append')
|
|
|
|
ro_append = frozenset(ro_origin - {'force_store_value'})
|
|
|
|
rw_origin = await config.property.getdefault('read_write', 'append')
|
|
|
|
rw_append = frozenset(rw_origin - {'force_store_value'})
|
|
|
|
await config.property.setdefault(ro_append, 'read_only', 'append')
|
|
|
|
await config.property.setdefault(rw_append, 'read_write', 'append')
|
|
|
|
|
2020-12-24 17:52:14 +01:00
|
|
|
await config.information.set('test_information', 'value')
|
2020-07-06 19:47:45 +02:00
|
|
|
await config.property.read_only()
|
|
|
|
await config.property.pop('mandatory')
|
2020-10-03 22:10:32 +02:00
|
|
|
await config.information.set('info', 'value')
|
2020-12-26 15:15:51 +01:00
|
|
|
config_dict = await config.value.dict(leader_to_list=True)
|
2022-10-01 22:27:22 +02:00
|
|
|
if filename == 'base':
|
|
|
|
if not isdir(makedict_dir):
|
|
|
|
mkdir(makedict_dir)
|
|
|
|
if not isfile(makedict_file) or debug:
|
|
|
|
with open(makedict_file, 'w') as fh:
|
|
|
|
dump(config_dict, fh, indent=4)
|
|
|
|
fh.write('\n')
|
|
|
|
else:
|
|
|
|
config_dict_prefix = {'1': {}, '2': {}}
|
|
|
|
for key, value in config_dict.items():
|
|
|
|
prefix, path = key.split('.', 1)
|
|
|
|
if value and isinstance(value, list) and isinstance(value[0], dict):
|
|
|
|
new_value = []
|
|
|
|
for dct in value:
|
|
|
|
new_dct = {}
|
|
|
|
for k, v in dct.items():
|
|
|
|
k = k.split('.', 1)[-1]
|
|
|
|
new_dct[k] = v
|
|
|
|
new_value.append(new_dct)
|
|
|
|
value = new_value
|
|
|
|
config_dict_prefix[prefix][path] = value
|
|
|
|
assert loads(dumps(config_dict_prefix['1'])) == loads(dumps(config_dict_prefix['2']))
|
|
|
|
config_dict = config_dict_prefix['1']
|
2019-11-26 20:33:24 +01:00
|
|
|
if not isfile(makedict_file):
|
2021-02-26 20:38:17 +01:00
|
|
|
raise Exception('dict is not empty')
|
|
|
|
with open(makedict_file, 'r') as fh:
|
2022-03-05 11:13:05 +01:00
|
|
|
assert load(fh) == loads(dumps(config_dict)), f"error in file {makedict_file}"
|
2021-02-26 20:38:17 +01:00
|
|
|
#
|
2022-10-01 22:27:22 +02:00
|
|
|
await value_owner(makedict_before, config, filename)
|
2021-02-26 20:38:17 +01:00
|
|
|
# deploy
|
|
|
|
ro = await config.property.getdefault('read_only', 'append')
|
|
|
|
ro = frozenset(list(ro) + ['force_store_value'])
|
|
|
|
await config.property.setdefault(ro, 'read_only', 'append')
|
|
|
|
rw = await config.property.getdefault('read_write', 'append')
|
|
|
|
rw = frozenset(list(rw) + ['force_store_value'])
|
|
|
|
await config.property.setdefault(rw, 'read_write', 'append')
|
|
|
|
await config.property.add('force_store_value')
|
|
|
|
#
|
2022-10-01 22:27:22 +02:00
|
|
|
await value_owner(makedict_after, config, filename)
|
2021-02-26 20:38:17 +01:00
|
|
|
|
2022-10-01 22:27:22 +02:00
|
|
|
|
|
|
|
async def value_owner(makedict_value_owner, config, filename):
|
2021-02-26 20:38:17 +01:00
|
|
|
ret = {}
|
|
|
|
for key in await config.option.list(recursive=True):
|
|
|
|
path = await key.option.path()
|
|
|
|
if await key.option.isfollower():
|
|
|
|
value = []
|
|
|
|
owner = []
|
|
|
|
for idx in range(0, await key.value.len()):
|
|
|
|
try:
|
|
|
|
option = config.option(path, idx)
|
|
|
|
value.append(await option.value.get())
|
|
|
|
owner.append(await option.owner.get())
|
|
|
|
except PropertiesOptionError as err:
|
|
|
|
value.append(str(err))
|
|
|
|
owner.append('error')
|
|
|
|
else:
|
|
|
|
value = await key.value.get()
|
|
|
|
owner = await key.owner.get()
|
|
|
|
ret[path] = {'owner': owner,
|
|
|
|
'value': value,
|
|
|
|
}
|
2022-10-01 22:27:22 +02:00
|
|
|
if filename == 'base':
|
|
|
|
if not isfile(makedict_value_owner) or debug:
|
|
|
|
with open(makedict_value_owner, 'w') as fh:
|
|
|
|
dump(ret, fh, indent=4)
|
|
|
|
fh.write('\n')
|
|
|
|
else:
|
|
|
|
ret_prefix = {'1': {}, '2': {}}
|
|
|
|
for key, value in ret.items():
|
|
|
|
prefix, path = key.split('.', 1)
|
|
|
|
ret_prefix[prefix][path] = value
|
|
|
|
assert loads(dumps(ret_prefix['1'])) == loads(dumps(ret_prefix['2']))
|
|
|
|
ret = ret_prefix['1']
|
2021-02-26 20:38:17 +01:00
|
|
|
with open(makedict_value_owner, 'r') as fh:
|
2022-03-05 11:13:05 +01:00
|
|
|
assert load(fh) == loads(dumps(ret)), f"error in file {makedict_value_owner}"
|
|
|
|
|
2019-11-26 20:33:24 +01:00
|
|
|
|
2020-02-14 17:59:39 +01:00
|
|
|
@mark.asyncio
|
|
|
|
async def test_dictionary(test_dir):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_dir = join(dico_dirs, test_dir)
|
2022-10-01 22:27:22 +02:00
|
|
|
await launch_flattener(test_dir, 'base')
|
|
|
|
|
|
|
|
|
|
|
|
@mark.asyncio
|
|
|
|
async def test_dictionary_multi(test_dir):
|
|
|
|
test_dir = join(dico_dirs, test_dir)
|
|
|
|
await launch_flattener(test_dir, 'multi')
|