2019-11-26 20:33:24 +01:00
|
|
|
from lxml import etree
|
|
|
|
from os.path import isfile, join, isdir
|
2020-02-14 17:59:39 +01:00
|
|
|
from pytest import fixture, mark
|
2022-01-19 18:24:00 +01:00
|
|
|
from os import listdir, mkdir, environ
|
2019-11-26 20:33:24 +01:00
|
|
|
from json import dump, load, dumps, loads
|
|
|
|
|
2022-01-19 18:24:00 +01:00
|
|
|
environ['TIRAMISU_LOCALE'] = 'en'
|
|
|
|
|
2019-12-02 10:31:55 +01:00
|
|
|
from tiramisu import Config
|
2021-02-26 20:38:17 +01:00
|
|
|
from tiramisu.error import PropertiesOptionError
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
2020-08-12 08:23:38 +02:00
|
|
|
dico_dirs = 'tests/dictionaries'
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = set()
|
|
|
|
|
|
|
|
for test in listdir(dico_dirs):
|
|
|
|
if isdir(join(dico_dirs, test)):
|
2020-07-29 08:59:40 +02:00
|
|
|
if isdir(join(dico_dirs, test, 'tiramisu')):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok.add(test)
|
|
|
|
|
2020-11-18 22:18:16 +01:00
|
|
|
debug = False
|
|
|
|
#debug = True
|
2019-11-26 20:33:24 +01:00
|
|
|
excludes = set([])
|
2020-11-20 18:02:40 +01:00
|
|
|
#excludes = set(['01base_file_utfchar'])
|
2019-11-26 20:33:24 +01:00
|
|
|
test_ok -= excludes
|
2020-07-16 09:50:01 +02:00
|
|
|
#test_ok = ['10check_valid_ipnetmask']
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
test_ok = list(test_ok)
|
|
|
|
test_ok.sort()
|
|
|
|
|
|
|
|
|
|
|
|
@fixture(scope="module", params=test_ok)
|
|
|
|
def test_dir(request):
|
|
|
|
return request.param
|
|
|
|
|
|
|
|
|
2020-02-14 17:59:39 +01:00
|
|
|
async def launch_flattener(test_dir):
|
2019-11-26 20:33:24 +01:00
|
|
|
makedict_dir = join(test_dir, 'makedict')
|
|
|
|
makedict_file = join(makedict_dir, 'base.json')
|
2021-02-26 20:38:17 +01:00
|
|
|
makedict_before = join(makedict_dir, 'before.json')
|
|
|
|
makedict_after = join(makedict_dir, 'after.json')
|
2020-07-20 18:13:53 +02:00
|
|
|
|
|
|
|
modulepath = test_dir.replace('/', '.') + '.tiramisu.base'
|
|
|
|
mod = __import__(modulepath)
|
|
|
|
for token in modulepath.split(".")[1:]:
|
|
|
|
mod = getattr(mod, token)
|
|
|
|
config = await Config(mod.option_0)
|
2021-02-26 20:38:17 +01:00
|
|
|
# change default rights
|
|
|
|
ro_origin = await config.property.getdefault('read_only', 'append')
|
|
|
|
ro_append = frozenset(ro_origin - {'force_store_value'})
|
|
|
|
rw_origin = await config.property.getdefault('read_write', 'append')
|
|
|
|
rw_append = frozenset(rw_origin - {'force_store_value'})
|
|
|
|
await config.property.setdefault(ro_append, 'read_only', 'append')
|
|
|
|
await config.property.setdefault(rw_append, 'read_write', 'append')
|
|
|
|
|
2020-12-24 17:52:14 +01:00
|
|
|
await config.information.set('test_information', 'value')
|
2020-07-06 19:47:45 +02:00
|
|
|
await config.property.read_only()
|
|
|
|
await config.property.pop('mandatory')
|
2020-10-03 22:10:32 +02:00
|
|
|
await config.information.set('info', 'value')
|
2020-12-26 15:15:51 +01:00
|
|
|
config_dict = await config.value.dict(leader_to_list=True)
|
2021-02-26 20:38:17 +01:00
|
|
|
if not isdir(makedict_dir):
|
|
|
|
mkdir(makedict_dir)
|
|
|
|
if not isfile(makedict_file) or debug:
|
|
|
|
with open(makedict_file, 'w') as fh:
|
|
|
|
dump(config_dict, fh, indent=4)
|
|
|
|
fh.write('\n')
|
2019-11-26 20:33:24 +01:00
|
|
|
if not isfile(makedict_file):
|
2021-02-26 20:38:17 +01:00
|
|
|
raise Exception('dict is not empty')
|
|
|
|
with open(makedict_file, 'r') as fh:
|
|
|
|
assert load(fh) == loads(dumps(config_dict))
|
|
|
|
#
|
|
|
|
await value_owner(makedict_before, config)
|
|
|
|
# deploy
|
|
|
|
ro = await config.property.getdefault('read_only', 'append')
|
|
|
|
ro = frozenset(list(ro) + ['force_store_value'])
|
|
|
|
await config.property.setdefault(ro, 'read_only', 'append')
|
|
|
|
rw = await config.property.getdefault('read_write', 'append')
|
|
|
|
rw = frozenset(list(rw) + ['force_store_value'])
|
|
|
|
await config.property.setdefault(rw, 'read_write', 'append')
|
|
|
|
await config.property.add('force_store_value')
|
|
|
|
#
|
|
|
|
await value_owner(makedict_after, config)
|
|
|
|
|
|
|
|
async def value_owner(makedict_value_owner, config):
|
|
|
|
ret = {}
|
|
|
|
for key in await config.option.list(recursive=True):
|
|
|
|
path = await key.option.path()
|
|
|
|
if await key.option.isfollower():
|
|
|
|
value = []
|
|
|
|
owner = []
|
|
|
|
for idx in range(0, await key.value.len()):
|
|
|
|
try:
|
|
|
|
option = config.option(path, idx)
|
|
|
|
value.append(await option.value.get())
|
|
|
|
owner.append(await option.owner.get())
|
|
|
|
except PropertiesOptionError as err:
|
|
|
|
value.append(str(err))
|
|
|
|
owner.append('error')
|
|
|
|
else:
|
|
|
|
value = await key.value.get()
|
|
|
|
owner = await key.owner.get()
|
|
|
|
ret[path] = {'owner': owner,
|
|
|
|
'value': value,
|
|
|
|
}
|
|
|
|
if not isfile(makedict_value_owner) or debug:
|
|
|
|
with open(makedict_value_owner, 'w') as fh:
|
|
|
|
dump(ret, fh, indent=4)
|
|
|
|
fh.write('\n')
|
|
|
|
with open(makedict_value_owner, 'r') as fh:
|
|
|
|
assert load(fh) == loads(dumps(ret))
|
2019-11-26 20:33:24 +01:00
|
|
|
|
|
|
|
|
2020-02-14 17:59:39 +01:00
|
|
|
@mark.asyncio
|
|
|
|
async def test_dictionary(test_dir):
|
2019-11-26 20:33:24 +01:00
|
|
|
test_dir = join(dico_dirs, test_dir)
|
2020-02-14 17:59:39 +01:00
|
|
|
await launch_flattener(test_dir)
|