2019-11-26 20:33:24 +01:00
|
|
|
from os.path import isfile, join, isdir
|
2023-05-19 12:56:13 +02:00
|
|
|
from pytest import fixture
|
2022-01-19 18:24:00 +01:00
|
|
|
from os import listdir, mkdir, environ
|
2019-11-26 20:33:24 +01:00
|
|
|
from json import dump, load, dumps, loads
|
2024-01-28 18:58:20 +01:00
|
|
|
from pathlib import Path
|
2019-11-26 20:33:24 +01:00
|
|
|
|
2022-01-19 18:24:00 +01:00
|
|
|
environ['TIRAMISU_LOCALE'] = 'en'
|
|
|
|
|
2024-01-20 23:00:59 +01:00
|
|
|
from .custom import CustomOption
|
2019-12-02 10:31:55 +01:00
|
|
|
from tiramisu import Config
|
2021-02-26 20:38:17 +01:00
|
|
|
from tiramisu.error import PropertiesOptionError
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
2020-08-12 08:23:38 +02:00
|
|
|
dico_dirs = 'tests/dictionaries'
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = set()
|
|
|
|
|
|
|
|
for test in listdir(dico_dirs):
|
|
|
|
if isdir(join(dico_dirs, test)):
|
2020-07-29 08:59:40 +02:00
|
|
|
if isdir(join(dico_dirs, test, 'tiramisu')):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok.add(test)
|
|
|
|
|
2020-11-18 22:18:16 +01:00
|
|
|
debug = False
|
|
|
|
#debug = True
|
2019-11-26 20:33:24 +01:00
|
|
|
excludes = set([])
|
2023-10-12 08:17:30 +02:00
|
|
|
excludes = set([
|
|
|
|
'80leadership_subfamily',
|
|
|
|
'80valid_enum_variables',
|
|
|
|
])
|
|
|
|
|
2020-11-20 18:02:40 +01:00
|
|
|
#excludes = set(['01base_file_utfchar'])
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok -= excludes
|
2024-01-20 23:00:59 +01:00
|
|
|
#test_ok = ['10base_certificate_list']
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = list(test_ok)
|
|
|
|
test_ok.sort()
|
|
|
|
|
|
|
|
|
|
|
|
@fixture(scope="module", params=test_ok)
|
|
|
|
def test_dir(request):
|
|
|
|
return request.param
|
|
|
|
|
|
|
|
|
2023-05-19 12:56:13 +02:00
|
|
|
def launch_flattener(test_dir,
|
|
|
|
filename,
|
|
|
|
):
|
2019-11-26 20:33:24 +01:00
|
|
|
makedict_dir = join(test_dir, 'makedict')
|
|
|
|
makedict_file = join(makedict_dir, 'base.json')
|
2021-02-26 20:38:17 +01:00
|
|
|
makedict_before = join(makedict_dir, 'before.json')
|
|
|
|
makedict_after = join(makedict_dir, 'after.json')
|
2023-06-22 12:11:14 +02:00
|
|
|
informations_file = join(test_dir, 'informations.json')
|
2024-01-28 18:58:20 +01:00
|
|
|
mandatory_file = Path(makedict_dir) / 'mandatory.json'
|
2020-07-20 18:13:53 +02:00
|
|
|
|
2024-01-20 23:00:59 +01:00
|
|
|
modulepath = join(test_dir, 'tiramisu', filename + '.py')
|
|
|
|
with open(modulepath) as fh:
|
|
|
|
optiondescription = {}
|
|
|
|
exec(fh.read(), {'CustomOption': CustomOption}, optiondescription) # pylint: disable=W0122
|
|
|
|
config = Config(optiondescription["option_0"])
|
2021-02-26 20:38:17 +01:00
|
|
|
# change default rights
|
2023-06-22 12:11:14 +02:00
|
|
|
ro_origin = config.property.default('read_only', 'append')
|
2021-02-26 20:38:17 +01:00
|
|
|
ro_append = frozenset(ro_origin - {'force_store_value'})
|
2023-06-22 12:11:14 +02:00
|
|
|
rw_origin = config.property.default('read_write', 'append')
|
2021-02-26 20:38:17 +01:00
|
|
|
rw_append = frozenset(rw_origin - {'force_store_value'})
|
2023-05-19 12:56:13 +02:00
|
|
|
config.property.setdefault(ro_append, 'read_only', 'append')
|
|
|
|
config.property.setdefault(rw_append, 'read_write', 'append')
|
|
|
|
|
|
|
|
config.information.set('test_information', 'value')
|
|
|
|
config.property.read_only()
|
|
|
|
config.property.remove('mandatory')
|
|
|
|
config.information.set('info', 'value')
|
2023-06-22 12:11:14 +02:00
|
|
|
if isfile(informations_file):
|
|
|
|
with open(informations_file) as informations:
|
|
|
|
for key, value in load(informations).items():
|
|
|
|
if filename == 'base':
|
|
|
|
config.option(key).information.set('test_information', value)
|
|
|
|
else:
|
|
|
|
for root in ['1', '2']:
|
|
|
|
config.option(f'{root}.{key}').information.set('test_information', value)
|
|
|
|
#
|
2024-01-20 23:00:59 +01:00
|
|
|
config_dict = config.value.get()
|
2022-10-01 22:27:22 +02:00
|
|
|
if filename == 'base':
|
|
|
|
if not isdir(makedict_dir):
|
|
|
|
mkdir(makedict_dir)
|
|
|
|
if not isfile(makedict_file) or debug:
|
|
|
|
with open(makedict_file, 'w') as fh:
|
|
|
|
dump(config_dict, fh, indent=4)
|
|
|
|
fh.write('\n')
|
|
|
|
else:
|
|
|
|
config_dict_prefix = {'1': {}, '2': {}}
|
|
|
|
for key, value in config_dict.items():
|
|
|
|
prefix, path = key.split('.', 1)
|
|
|
|
if value and isinstance(value, list) and isinstance(value[0], dict):
|
|
|
|
new_value = []
|
|
|
|
for dct in value:
|
|
|
|
new_dct = {}
|
|
|
|
for k, v in dct.items():
|
|
|
|
k = k.split('.', 1)[-1]
|
|
|
|
new_dct[k] = v
|
|
|
|
new_value.append(new_dct)
|
|
|
|
value = new_value
|
|
|
|
config_dict_prefix[prefix][path] = value
|
|
|
|
assert loads(dumps(config_dict_prefix['1'])) == loads(dumps(config_dict_prefix['2']))
|
|
|
|
config_dict = config_dict_prefix['1']
|
2019-11-26 20:33:24 +01:00
|
|
|
if not isfile(makedict_file):
|
2021-02-26 20:38:17 +01:00
|
|
|
raise Exception('dict is not empty')
|
|
|
|
with open(makedict_file, 'r') as fh:
|
2022-03-05 11:13:05 +01:00
|
|
|
assert load(fh) == loads(dumps(config_dict)), f"error in file {makedict_file}"
|
2021-02-26 20:38:17 +01:00
|
|
|
#
|
2023-05-19 12:56:13 +02:00
|
|
|
value_owner(makedict_before, config, filename)
|
2021-02-26 20:38:17 +01:00
|
|
|
# deploy
|
2023-06-22 12:11:14 +02:00
|
|
|
ro = config.property.default('read_only', 'append')
|
2021-02-26 20:38:17 +01:00
|
|
|
ro = frozenset(list(ro) + ['force_store_value'])
|
2023-05-19 12:56:13 +02:00
|
|
|
config.property.setdefault(ro, 'read_only', 'append')
|
2023-06-22 12:11:14 +02:00
|
|
|
rw = config.property.default('read_write', 'append')
|
2021-02-26 20:38:17 +01:00
|
|
|
rw = frozenset(list(rw) + ['force_store_value'])
|
2023-05-19 12:56:13 +02:00
|
|
|
config.property.setdefault(rw, 'read_write', 'append')
|
|
|
|
config.property.add('force_store_value')
|
2021-02-26 20:38:17 +01:00
|
|
|
#
|
2023-05-19 12:56:13 +02:00
|
|
|
value_owner(makedict_after, config, filename)
|
2024-01-28 18:58:20 +01:00
|
|
|
#
|
|
|
|
mandatory(mandatory_file, config.value.mandatory(), filename)
|
2021-02-26 20:38:17 +01:00
|
|
|
|
2022-10-01 22:27:22 +02:00
|
|
|
|
2023-05-19 12:56:13 +02:00
|
|
|
def value_owner(makedict_value_owner, config, filename):
|
2021-02-26 20:38:17 +01:00
|
|
|
ret = {}
|
2023-05-19 12:56:13 +02:00
|
|
|
for key in config.option.list(recursive=True):
|
2023-06-22 12:11:14 +02:00
|
|
|
path = key.path()
|
|
|
|
if not key.issymlinkoption() and key.isfollower():
|
2021-02-26 20:38:17 +01:00
|
|
|
value = []
|
|
|
|
owner = []
|
2023-05-19 12:56:13 +02:00
|
|
|
for idx in range(0, key.value.len()):
|
2021-02-26 20:38:17 +01:00
|
|
|
try:
|
|
|
|
option = config.option(path, idx)
|
2023-05-19 12:56:13 +02:00
|
|
|
value.append(option.value.get())
|
|
|
|
owner.append(option.owner.get())
|
2021-02-26 20:38:17 +01:00
|
|
|
except PropertiesOptionError as err:
|
|
|
|
value.append(str(err))
|
|
|
|
owner.append('error')
|
|
|
|
else:
|
2023-05-19 12:56:13 +02:00
|
|
|
value = key.value.get()
|
|
|
|
owner = key.owner.get()
|
2021-02-26 20:38:17 +01:00
|
|
|
ret[path] = {'owner': owner,
|
|
|
|
'value': value,
|
|
|
|
}
|
2022-10-01 22:27:22 +02:00
|
|
|
if filename == 'base':
|
|
|
|
if not isfile(makedict_value_owner) or debug:
|
|
|
|
with open(makedict_value_owner, 'w') as fh:
|
|
|
|
dump(ret, fh, indent=4)
|
|
|
|
fh.write('\n')
|
|
|
|
else:
|
|
|
|
ret_prefix = {'1': {}, '2': {}}
|
|
|
|
for key, value in ret.items():
|
|
|
|
prefix, path = key.split('.', 1)
|
|
|
|
ret_prefix[prefix][path] = value
|
|
|
|
assert loads(dumps(ret_prefix['1'])) == loads(dumps(ret_prefix['2']))
|
|
|
|
ret = ret_prefix['1']
|
2021-02-26 20:38:17 +01:00
|
|
|
with open(makedict_value_owner, 'r') as fh:
|
2022-03-05 11:13:05 +01:00
|
|
|
assert load(fh) == loads(dumps(ret)), f"error in file {makedict_value_owner}"
|
|
|
|
|
2019-11-26 20:33:24 +01:00
|
|
|
|
2024-01-28 18:58:20 +01:00
|
|
|
def mandatory(mandatory_file, mandatories, filename):
|
|
|
|
ret = [opt.path() for opt in mandatories]
|
|
|
|
if not mandatory_file.is_file():
|
|
|
|
with mandatory_file.open('w') as fh:
|
|
|
|
dump(ret, fh)
|
|
|
|
if filename != 'base':
|
|
|
|
ret_prefix = {'1': [], '2': []}
|
|
|
|
for key in ret:
|
|
|
|
prefix, path = key.split('.', 1)
|
|
|
|
ret_prefix[prefix].append(path)
|
|
|
|
assert ret_prefix['1'] == ret_prefix['2']
|
|
|
|
ret = ret_prefix['1']
|
|
|
|
with mandatory_file.open() as fh:
|
|
|
|
assert load(fh) == ret, f"error in file {mandatory_file}"
|
|
|
|
|
|
|
|
|
2023-05-19 12:56:13 +02:00
|
|
|
def test_dictionary(test_dir):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_dir = join(dico_dirs, test_dir)
|
2023-05-19 12:56:13 +02:00
|
|
|
launch_flattener(test_dir, 'base')
|
2022-10-01 22:27:22 +02:00
|
|
|
|
|
|
|
|
2023-05-19 12:56:13 +02:00
|
|
|
def test_dictionary_multi(test_dir):
|
2022-10-01 22:27:22 +02:00
|
|
|
test_dir = join(dico_dirs, test_dir)
|
2023-05-19 12:56:13 +02:00
|
|
|
launch_flattener(test_dir, 'multi')
|