Merge branch 'float_version' into develop
This commit is contained in:
commit
d5661511da
9 changed files with 361 additions and 133 deletions
47
.pre-commit-config.yaml
Normal file
47
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v2.3.0
|
||||||
|
hooks:
|
||||||
|
- id: check-yaml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: check-ast
|
||||||
|
- id: check-added-large-files
|
||||||
|
- id: check-json
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
- id: check-symlinks
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 22.10.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
|
||||||
|
# - repo: https://github.com/hhatto/autopep8
|
||||||
|
# rev: v2.0.4
|
||||||
|
# hooks:
|
||||||
|
# - id: autopep8
|
||||||
|
|
||||||
|
# - repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
# rev: v1.6.1
|
||||||
|
# hooks:
|
||||||
|
# - id: mypy
|
||||||
|
|
||||||
|
# - repo: https://github.com/PyCQA/pylint
|
||||||
|
# rev: v3.0.2
|
||||||
|
# hooks:
|
||||||
|
# - id: pylint
|
||||||
|
|
||||||
|
# - repo: https://github.com/PyCQA/isort
|
||||||
|
# rev: 5.11.5
|
||||||
|
# hooks:
|
||||||
|
# - id: isort
|
||||||
|
|
||||||
|
# - repo: https://github.com/motet-a/jinjalint
|
||||||
|
# rev: 0.5
|
||||||
|
# hooks:
|
||||||
|
# - id: jinjalint
|
||||||
|
|
||||||
|
# - repo: https://github.com/rstcheck/rstcheck
|
||||||
|
# rev: v6.2.0
|
||||||
|
# hooks:
|
||||||
|
# - id: rstcheck
|
|
@ -14,3 +14,47 @@ This process describes how to install and run the project locally, e.g. for deve
|
||||||
*Nota*: command is to be executed through the terminal
|
*Nota*: command is to be executed through the terminal
|
||||||
|
|
||||||
`pip install rougail`
|
`pip install rougail`
|
||||||
|
|
||||||
|
Code quality
|
||||||
|
---------------
|
||||||
|
|
||||||
|
We are using `pre-commit <https://pre-commit.com/>`_, there is a :file:`.pre-commit-config.yaml`
|
||||||
|
pre-commit config file in the root's project.
|
||||||
|
|
||||||
|
You need to:
|
||||||
|
|
||||||
|
- install the pre-commit library::
|
||||||
|
|
||||||
|
pip install pre-commit
|
||||||
|
|
||||||
|
- registrer the pre-commit git hooks with this command::
|
||||||
|
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
- launch the quality code procedure with::
|
||||||
|
|
||||||
|
pre-commit
|
||||||
|
|
||||||
|
or simply just commit your changes, pre-commit will automatically be launched.
|
||||||
|
|
||||||
|
.. attention:: If an error is found, the commit will not happen.
|
||||||
|
You must resolve all errors that pre-commit that pre-commit points out to you before.
|
||||||
|
|
||||||
|
.. note:: If you need for some reason to disable `pre-commit`, just set
|
||||||
|
the `PRE_COMMIT_ALLOW_NO_CONFIG` environment variable before commiting::
|
||||||
|
|
||||||
|
PRE_COMMIT_ALLOW_NO_CONFIG=1 git commit
|
||||||
|
|
||||||
|
Coding standard
|
||||||
|
------------------
|
||||||
|
|
||||||
|
We use black
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 22.10.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
|
||||||
|
And some YAML and JSON validators.
|
||||||
|
|
|
@ -26,11 +26,14 @@ dependencies = [
|
||||||
"ruamel.yaml ~= 0.17.40",
|
"ruamel.yaml ~= 0.17.40",
|
||||||
"pydantic ~= 2.5.2",
|
"pydantic ~= 2.5.2",
|
||||||
"jinja2 ~= 3.1.2",
|
"jinja2 ~= 3.1.2",
|
||||||
"tiramisu ~= 4.1.0",
|
"tiramisu ~= 4.1.0"
|
||||||
]
|
]
|
||||||
[project.optional-dependancies]
|
|
||||||
|
[project.optional-dependencies]
|
||||||
dev = [
|
dev = [
|
||||||
"pylint ~= 3.0.3",
|
"pylint ~= 3.0.3",
|
||||||
|
"pytest ~= 8.2.2",
|
||||||
|
"lxml ~= 5.2.2"
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
|
|
|
@ -28,14 +28,14 @@ You should have received a copy of the GNU General Public License
|
||||||
along with this program; if not, write to the Free Software
|
along with this program; if not, write to the Free Software
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
"""
|
"""
|
||||||
from os.path import join, abspath, dirname
|
from os.path import abspath, dirname, join
|
||||||
|
|
||||||
|
|
||||||
ROUGAILROOT = "/srv/rougail"
|
ROUGAILROOT = "/srv/rougail"
|
||||||
DTDDIR = join(dirname(abspath(__file__)), "data")
|
DTDDIR = join(dirname(abspath(__file__)), "data")
|
||||||
|
|
||||||
|
|
||||||
RougailConfig = {
|
RougailConfig = {
|
||||||
|
"default_dictionary_format_version": None,
|
||||||
"dictionaries_dir": [join(ROUGAILROOT, "dictionaries")],
|
"dictionaries_dir": [join(ROUGAILROOT, "dictionaries")],
|
||||||
"extra_dictionaries": {},
|
"extra_dictionaries": {},
|
||||||
"services_dir": [join(ROUGAILROOT, "services")],
|
"services_dir": [join(ROUGAILROOT, "services")],
|
||||||
|
|
|
@ -29,46 +29,43 @@ along with this program; if not, write to the Free Software
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
from itertools import chain
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from re import compile, findall
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
|
||||||
Union,
|
|
||||||
get_type_hints,
|
|
||||||
Any,
|
Any,
|
||||||
Literal,
|
|
||||||
List,
|
|
||||||
Dict,
|
Dict,
|
||||||
Iterator,
|
Iterator,
|
||||||
|
List,
|
||||||
|
Literal,
|
||||||
|
Optional,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
Union,
|
||||||
|
get_type_hints,
|
||||||
)
|
)
|
||||||
from itertools import chain
|
|
||||||
from re import findall, compile
|
|
||||||
|
|
||||||
|
from pydantic import ValidationError
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
from ruamel.yaml.comments import CommentedMap
|
from ruamel.yaml.comments import CommentedMap
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
from tiramisu.error import display_list
|
from tiramisu.error import display_list
|
||||||
|
|
||||||
from .i18n import _
|
|
||||||
from .annotator import SpaceAnnotator
|
from .annotator import SpaceAnnotator
|
||||||
from .tiramisureflector import TiramisuReflector
|
from .error import DictConsistencyError
|
||||||
from .utils import get_realpath
|
from .i18n import _
|
||||||
|
from .object_model import CONVERT_OPTION # Choice,
|
||||||
from .object_model import (
|
from .object_model import (
|
||||||
CONVERT_OPTION,
|
|
||||||
Family,
|
|
||||||
Dynamic,
|
|
||||||
Variable,
|
|
||||||
#Choice,
|
|
||||||
SymLink,
|
|
||||||
CALCULATION_TYPES,
|
CALCULATION_TYPES,
|
||||||
Calculation,
|
|
||||||
VariableCalculation,
|
|
||||||
PARAM_TYPES,
|
PARAM_TYPES,
|
||||||
AnyParam,
|
AnyParam,
|
||||||
|
Calculation,
|
||||||
|
Dynamic,
|
||||||
|
Family,
|
||||||
|
SymLink,
|
||||||
|
Variable,
|
||||||
|
VariableCalculation,
|
||||||
)
|
)
|
||||||
from .error import DictConsistencyError
|
from .tiramisureflector import TiramisuReflector
|
||||||
|
from .utils import get_realpath
|
||||||
|
|
||||||
property_types = Union[Literal[True], Calculation]
|
property_types = Union[Literal[True], Calculation]
|
||||||
properties_types = Dict[str, property_types]
|
properties_types = Dict[str, property_types]
|
||||||
|
@ -102,11 +99,12 @@ class Property:
|
||||||
class Paths:
|
class Paths:
|
||||||
_regexp_relative = compile(r"^_*\.(.*)$")
|
_regexp_relative = compile(r"^_*\.(.*)$")
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(
|
||||||
default_namespace: str,
|
self,
|
||||||
) -> None:
|
default_namespace: str,
|
||||||
|
) -> None:
|
||||||
self._data: Dict[str, Union[Variable, Family]] = {}
|
self._data: Dict[str, Union[Variable, Family]] = {}
|
||||||
self._dynamics: Dict[str: str] = {}
|
self._dynamics: Dict[str:str] = {}
|
||||||
self.default_namespace = default_namespace
|
self.default_namespace = default_namespace
|
||||||
self.path_prefix = None
|
self.path_prefix = None
|
||||||
|
|
||||||
|
@ -136,7 +134,7 @@ class Paths:
|
||||||
xmlfiles: List[str],
|
xmlfiles: List[str],
|
||||||
) -> Any:
|
) -> Any:
|
||||||
suffix = None
|
suffix = None
|
||||||
if version != '1.0' and self._regexp_relative.search(path):
|
if version != "1.0" and self._regexp_relative.search(path):
|
||||||
relative, subpath = path.split(".", 1)
|
relative, subpath = path.split(".", 1)
|
||||||
relative_len = len(relative)
|
relative_len = len(relative)
|
||||||
path_len = current_path.count(".")
|
path_len = current_path.count(".")
|
||||||
|
@ -145,29 +143,34 @@ class Paths:
|
||||||
else:
|
else:
|
||||||
path = get_realpath(path, suffix_path)
|
path = get_realpath(path, suffix_path)
|
||||||
dynamic = None
|
dynamic = None
|
||||||
if not path in self._data and '{{ suffix }}' not in path:
|
if not path in self._data and "{{ suffix }}" not in path:
|
||||||
new_path = None
|
new_path = None
|
||||||
current_path = None
|
current_path = None
|
||||||
for name in path.split('.'):
|
for name in path.split("."):
|
||||||
parent_path = current_path
|
parent_path = current_path
|
||||||
if current_path:
|
if current_path:
|
||||||
current_path += '.' + name
|
current_path += "." + name
|
||||||
else:
|
else:
|
||||||
current_path = name
|
current_path = name
|
||||||
if current_path in self._data:
|
if current_path in self._data:
|
||||||
if new_path:
|
if new_path:
|
||||||
new_path += '.' + name
|
new_path += "." + name
|
||||||
else:
|
else:
|
||||||
new_path = name
|
new_path = name
|
||||||
continue
|
continue
|
||||||
for dynamic_path in self._dynamics:
|
for dynamic_path in self._dynamics:
|
||||||
parent_dynamic, name_dynamic = dynamic_path.rsplit('.', 1)
|
parent_dynamic, name_dynamic = dynamic_path.rsplit(".", 1)
|
||||||
if version == '1.0' and parent_dynamic == parent_path and name_dynamic.endswith('{{ suffix }}') and name == name_dynamic.replace('{{ suffix }}', ''):
|
if (
|
||||||
new_path += '.' + name_dynamic
|
version == "1.0"
|
||||||
|
and parent_dynamic == parent_path
|
||||||
|
and name_dynamic.endswith("{{ suffix }}")
|
||||||
|
and name == name_dynamic.replace("{{ suffix }}", "")
|
||||||
|
):
|
||||||
|
new_path += "." + name_dynamic
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
if new_path:
|
if new_path:
|
||||||
new_path += '.' + name
|
new_path += "." + name
|
||||||
else:
|
else:
|
||||||
new_path = name
|
new_path = name
|
||||||
path = new_path
|
path = new_path
|
||||||
|
@ -175,33 +178,36 @@ class Paths:
|
||||||
current_path = None
|
current_path = None
|
||||||
new_path = current_path
|
new_path = current_path
|
||||||
suffixes = []
|
suffixes = []
|
||||||
for name in path.split('.'):
|
for name in path.split("."):
|
||||||
parent_path = current_path
|
parent_path = current_path
|
||||||
if current_path:
|
if current_path:
|
||||||
current_path += '.' + name
|
current_path += "." + name
|
||||||
else:
|
else:
|
||||||
current_path = name
|
current_path = name
|
||||||
#parent_path, name_path = path.rsplit('.', 1)
|
# parent_path, name_path = path.rsplit('.', 1)
|
||||||
if current_path in self._data:
|
if current_path in self._data:
|
||||||
if new_path:
|
if new_path:
|
||||||
new_path += '.' + name
|
new_path += "." + name
|
||||||
else:
|
else:
|
||||||
new_path = name
|
new_path = name
|
||||||
continue
|
continue
|
||||||
for dynamic_path in self._dynamics:
|
for dynamic_path in self._dynamics:
|
||||||
parent_dynamic, name_dynamic = dynamic_path.rsplit('.', 1)
|
parent_dynamic, name_dynamic = dynamic_path.rsplit(".", 1)
|
||||||
if "{{ suffix }}" not in name_dynamic or parent_path != parent_dynamic:
|
if (
|
||||||
|
"{{ suffix }}" not in name_dynamic
|
||||||
|
or parent_path != parent_dynamic
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
regexp = "^" + name_dynamic.replace("{{ suffix }}", "(.*)")
|
regexp = "^" + name_dynamic.replace("{{ suffix }}", "(.*)")
|
||||||
finded = findall(regexp, name)
|
finded = findall(regexp, name)
|
||||||
if len(finded) != 1 or not finded[0]:
|
if len(finded) != 1 or not finded[0]:
|
||||||
continue
|
continue
|
||||||
suffixes.append(finded[0])
|
suffixes.append(finded[0])
|
||||||
new_path += '.' + name_dynamic
|
new_path += "." + name_dynamic
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
if new_path:
|
if new_path:
|
||||||
new_path += '.' + name
|
new_path += "." + name
|
||||||
else:
|
else:
|
||||||
new_path = name
|
new_path = name
|
||||||
if "{{ suffix }}" in name:
|
if "{{ suffix }}" in name:
|
||||||
|
@ -213,9 +219,14 @@ class Paths:
|
||||||
return None, None
|
return None, None
|
||||||
option = self._data[path]
|
option = self._data[path]
|
||||||
option_namespace = option.namespace
|
option_namespace = option.namespace
|
||||||
if self.default_namespace not in [namespace, option_namespace] and namespace != option_namespace:
|
if (
|
||||||
msg = _(f'A variable or a family located in the "{option_namespace}" namespace '
|
self.default_namespace not in [namespace, option_namespace]
|
||||||
f'shall not be used in the "{namespace}" namespace')
|
and namespace != option_namespace
|
||||||
|
):
|
||||||
|
msg = _(
|
||||||
|
f'A variable or a family located in the "{option_namespace}" namespace '
|
||||||
|
f'shall not be used in the "{namespace}" namespace'
|
||||||
|
)
|
||||||
raise DictConsistencyError(msg, 38, xmlfiles)
|
raise DictConsistencyError(msg, 38, xmlfiles)
|
||||||
return option, suffixes
|
return option, suffixes
|
||||||
|
|
||||||
|
@ -292,7 +303,7 @@ class ParserVariable:
|
||||||
#
|
#
|
||||||
self.family = Family
|
self.family = Family
|
||||||
self.dynamic = Dynamic
|
self.dynamic = Dynamic
|
||||||
self.choice = Variable #Choice
|
self.choice = Variable # Choice
|
||||||
#
|
#
|
||||||
self.exclude_imports = []
|
self.exclude_imports = []
|
||||||
self.informations = Informations()
|
self.informations = Informations()
|
||||||
|
@ -309,7 +320,7 @@ class ParserVariable:
|
||||||
self.variable = Variable
|
self.variable = Variable
|
||||||
hint = get_type_hints(self.dynamic)
|
hint = get_type_hints(self.dynamic)
|
||||||
# FIXME: only for format 1.0
|
# FIXME: only for format 1.0
|
||||||
hint['variable'] = str
|
hint["variable"] = str
|
||||||
self.family_types = hint["type"].__args__ # pylint: disable=W0201
|
self.family_types = hint["type"].__args__ # pylint: disable=W0201
|
||||||
self.family_attrs = frozenset( # pylint: disable=W0201
|
self.family_attrs = frozenset( # pylint: disable=W0201
|
||||||
set(hint) - {"name", "path", "xmlfiles"} | {"redefine"}
|
set(hint) - {"name", "path", "xmlfiles"} | {"redefine"}
|
||||||
|
@ -320,7 +331,9 @@ class ParserVariable:
|
||||||
#
|
#
|
||||||
hint = get_type_hints(self.variable)
|
hint = get_type_hints(self.variable)
|
||||||
|
|
||||||
self.variable_types = self.convert_options #hint["type"].__args__ # pylint: disable=W0201
|
self.variable_types = (
|
||||||
|
self.convert_options
|
||||||
|
) # hint["type"].__args__ # pylint: disable=W0201
|
||||||
#
|
#
|
||||||
hint = get_type_hints(self.choice)
|
hint = get_type_hints(self.choice)
|
||||||
self.choice_attrs = frozenset( # pylint: disable=W0201
|
self.choice_attrs = frozenset( # pylint: disable=W0201
|
||||||
|
@ -379,7 +392,7 @@ class ParserVariable:
|
||||||
else:
|
else:
|
||||||
return "variable"
|
return "variable"
|
||||||
else:
|
else:
|
||||||
if version == '1.0':
|
if version == "1.0":
|
||||||
msg = f'Invalid value for the variable "{path}": "{obj}"'
|
msg = f'Invalid value for the variable "{path}": "{obj}"'
|
||||||
raise DictConsistencyError(msg, 102, [filename])
|
raise DictConsistencyError(msg, 102, [filename])
|
||||||
return "variable"
|
return "variable"
|
||||||
|
@ -420,7 +433,7 @@ class ParserVariable:
|
||||||
msg = f'the variable or family name "{name}" is incorrect, it must not starts with "_" character'
|
msg = f'the variable or family name "{name}" is incorrect, it must not starts with "_" character'
|
||||||
raise DictConsistencyError(msg, 16, [filename])
|
raise DictConsistencyError(msg, 16, [filename])
|
||||||
path = f"{subpath}.{name}"
|
path = f"{subpath}.{name}"
|
||||||
if version == '0.1' and not isinstance(obj, dict) and obj is not None:
|
if version == "0.1" and not isinstance(obj, dict) and obj is not None:
|
||||||
msg = f'the variable "{path}" has a wrong type "{type(obj)}"'
|
msg = f'the variable "{path}" has a wrong type "{type(obj)}"'
|
||||||
raise DictConsistencyError(msg, 17, [filename])
|
raise DictConsistencyError(msg, 17, [filename])
|
||||||
typ = self.is_family_or_variable(
|
typ = self.is_family_or_variable(
|
||||||
|
@ -460,7 +473,7 @@ class ParserVariable:
|
||||||
first_variable: bool = False,
|
first_variable: bool = False,
|
||||||
family_is_leadership: bool = False,
|
family_is_leadership: bool = False,
|
||||||
family_is_dynamic: bool = False,
|
family_is_dynamic: bool = False,
|
||||||
parent_dynamic: Optional[str] = None
|
parent_dynamic: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Parse a family"""
|
"""Parse a family"""
|
||||||
if obj is None:
|
if obj is None:
|
||||||
|
@ -505,14 +518,14 @@ class ParserVariable:
|
||||||
if self.get_family_or_variable_type(family_obj) == "dynamic":
|
if self.get_family_or_variable_type(family_obj) == "dynamic":
|
||||||
family_is_dynamic = True
|
family_is_dynamic = True
|
||||||
parent_dynamic = path
|
parent_dynamic = path
|
||||||
if version == '1.0' and '{{ suffix }}' not in name:
|
if version == "1.0" and "{{ suffix }}" not in name:
|
||||||
name += '{{ suffix }}'
|
name += "{{ suffix }}"
|
||||||
path += '{{ suffix }}'
|
path += "{{ suffix }}"
|
||||||
if '{{ suffix }}' not in name:
|
if "{{ suffix }}" not in name:
|
||||||
msg = f'dynamic family name must have "{{{{ suffix }}}}" in his name for "{path}"'
|
msg = f'dynamic family name must have "{{{{ suffix }}}}" in his name for "{path}"'
|
||||||
raise DictConsistencyError(msg, 13, [filename])
|
raise DictConsistencyError(msg, 13, [filename])
|
||||||
if version != '1.0' and not family_obj and comment:
|
if version != "1.0" and not family_obj and comment:
|
||||||
family_obj['description'] = comment
|
family_obj["description"] = comment
|
||||||
self.add_family(
|
self.add_family(
|
||||||
path,
|
path,
|
||||||
name,
|
name,
|
||||||
|
@ -592,29 +605,41 @@ class ParserVariable:
|
||||||
family_obj = self.dynamic
|
family_obj = self.dynamic
|
||||||
if version == "1.0":
|
if version == "1.0":
|
||||||
if "variable" not in family:
|
if "variable" not in family:
|
||||||
raise DictConsistencyError(f'dynamic family must have "variable" attribute for "{path}"', 101, family["xmlfiles"])
|
raise DictConsistencyError(
|
||||||
if 'dynamic' in family:
|
f'dynamic family must have "variable" attribute for "{path}"',
|
||||||
raise DictConsistencyError('variable and dynamic cannot be set together in the dynamic family "{path}"', 100, family['xmlfiles'])
|
101,
|
||||||
family['dynamic'] = {'type': 'variable',
|
family["xmlfiles"],
|
||||||
'variable': family['variable'],
|
)
|
||||||
'propertyerror': False,
|
if "dynamic" in family:
|
||||||
'allow_none': True,
|
raise DictConsistencyError(
|
||||||
}
|
'variable and dynamic cannot be set together in the dynamic family "{path}"',
|
||||||
del family['variable']
|
100,
|
||||||
#FIXME only for 1.0
|
family["xmlfiles"],
|
||||||
|
)
|
||||||
|
family["dynamic"] = {
|
||||||
|
"type": "variable",
|
||||||
|
"variable": family["variable"],
|
||||||
|
"propertyerror": False,
|
||||||
|
"allow_none": True,
|
||||||
|
}
|
||||||
|
del family["variable"]
|
||||||
|
# FIXME only for 1.0
|
||||||
if "variable" in family:
|
if "variable" in family:
|
||||||
raise Exception(f'dynamic family must not have "variable" attribute for "{family["path"]}" in {family["xmlfiles"]}')
|
raise Exception(
|
||||||
|
f'dynamic family must not have "variable" attribute for "{family["path"]}" in {family["xmlfiles"]}'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
family_obj = self.family
|
family_obj = self.family
|
||||||
# convert to Calculation objects
|
# convert to Calculation objects
|
||||||
self.parse_parameters(path,
|
self.parse_parameters(
|
||||||
family,
|
path,
|
||||||
filename,
|
family,
|
||||||
family_is_dynamic,
|
filename,
|
||||||
False,
|
family_is_dynamic,
|
||||||
version,
|
False,
|
||||||
typ='family',
|
version,
|
||||||
)
|
typ="family",
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
self.paths.add(
|
self.paths.add(
|
||||||
path,
|
path,
|
||||||
|
@ -651,27 +676,28 @@ class ParserVariable:
|
||||||
parent_dynamic: Optional[str] = None,
|
parent_dynamic: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Parse variable"""
|
"""Parse variable"""
|
||||||
if version == '1.0' or isinstance(obj, dict):
|
if version == "1.0" or isinstance(obj, dict):
|
||||||
if obj is None:
|
if obj is None:
|
||||||
obj = {}
|
obj = {}
|
||||||
extra_attrs = set(obj) - self.choice_attrs
|
extra_attrs = set(obj) - self.choice_attrs
|
||||||
else:
|
else:
|
||||||
extra_attrs = []
|
extra_attrs = []
|
||||||
obj = {'default': obj}
|
obj = {"default": obj}
|
||||||
if comment:
|
if comment:
|
||||||
obj['description'] = comment
|
obj["description"] = comment
|
||||||
if extra_attrs:
|
if extra_attrs:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'"{path}" is not a valid variable, there are additional '
|
f'"{path}" is not a valid variable, there are additional '
|
||||||
f'attributes: "{", ".join(extra_attrs)}"'
|
f'attributes: "{", ".join(extra_attrs)}"'
|
||||||
)
|
)
|
||||||
self.parse_parameters(path,
|
self.parse_parameters(
|
||||||
obj,
|
path,
|
||||||
filename,
|
obj,
|
||||||
family_is_dynamic,
|
filename,
|
||||||
family_is_leadership is True and first_variable is False,
|
family_is_dynamic,
|
||||||
version,
|
family_is_leadership is True and first_variable is False,
|
||||||
)
|
version,
|
||||||
|
)
|
||||||
self.parse_params(path, obj)
|
self.parse_params(path, obj)
|
||||||
if path in self.paths:
|
if path in self.paths:
|
||||||
if "exists" in obj and not obj.pop("exists"):
|
if "exists" in obj and not obj.pop("exists"):
|
||||||
|
@ -680,7 +706,11 @@ class ParserVariable:
|
||||||
msg = f'Variable "{path}" already exists'
|
msg = f'Variable "{path}" already exists'
|
||||||
raise DictConsistencyError(msg, 45, [filename])
|
raise DictConsistencyError(msg, 45, [filename])
|
||||||
self.paths.add(
|
self.paths.add(
|
||||||
path, self.paths[path].model_copy(update=obj),family_is_dynamic, parent_dynamic, force=True
|
path,
|
||||||
|
self.paths[path].model_copy(update=obj),
|
||||||
|
family_is_dynamic,
|
||||||
|
parent_dynamic,
|
||||||
|
force=True,
|
||||||
)
|
)
|
||||||
self.paths[path].xmlfiles.append(filename)
|
self.paths[path].xmlfiles.append(filename)
|
||||||
else:
|
else:
|
||||||
|
@ -694,12 +724,7 @@ class ParserVariable:
|
||||||
raise DictConsistencyError(msg, 46, [filename])
|
raise DictConsistencyError(msg, 46, [filename])
|
||||||
obj["path"] = path
|
obj["path"] = path
|
||||||
self.add_variable(
|
self.add_variable(
|
||||||
name,
|
name, obj, filename, family_is_dynamic, parent_dynamic, version
|
||||||
obj,
|
|
||||||
filename,
|
|
||||||
family_is_dynamic,
|
|
||||||
parent_dynamic,
|
|
||||||
version
|
|
||||||
)
|
)
|
||||||
if family_is_leadership:
|
if family_is_leadership:
|
||||||
if first_variable:
|
if first_variable:
|
||||||
|
@ -707,18 +732,19 @@ class ParserVariable:
|
||||||
else:
|
else:
|
||||||
self.followers.append(path)
|
self.followers.append(path)
|
||||||
|
|
||||||
def parse_parameters(self,
|
def parse_parameters(
|
||||||
path: str,
|
self,
|
||||||
obj: dict,
|
path: str,
|
||||||
filename: str,
|
obj: dict,
|
||||||
family_is_dynamic: bool,
|
filename: str,
|
||||||
is_follower: bool,
|
family_is_dynamic: bool,
|
||||||
version: str,
|
is_follower: bool,
|
||||||
*,
|
version: str,
|
||||||
typ: str='variable',
|
*,
|
||||||
):
|
typ: str = "variable",
|
||||||
|
):
|
||||||
"""Parse variable or family parameters"""
|
"""Parse variable or family parameters"""
|
||||||
if typ == 'variable':
|
if typ == "variable":
|
||||||
calculations = self.choice_calculations
|
calculations = self.choice_calculations
|
||||||
else:
|
else:
|
||||||
calculations = self.family_calculations
|
calculations = self.family_calculations
|
||||||
|
@ -783,7 +809,18 @@ class ParserVariable:
|
||||||
params = []
|
params = []
|
||||||
for key, val in obj["params"].items():
|
for key, val in obj["params"].items():
|
||||||
try:
|
try:
|
||||||
params.append(AnyParam(key=key, value=val, type="any", path=None, is_follower=None, attribute=None, family_is_dynamic=None, xmlfiles=None))
|
params.append(
|
||||||
|
AnyParam(
|
||||||
|
key=key,
|
||||||
|
value=val,
|
||||||
|
type="any",
|
||||||
|
path=None,
|
||||||
|
is_follower=None,
|
||||||
|
attribute=None,
|
||||||
|
family_is_dynamic=None,
|
||||||
|
xmlfiles=None,
|
||||||
|
)
|
||||||
|
)
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'"{key}" has an invalid "params" for {path}: {err}'
|
f'"{key}" has an invalid "params" for {path}: {err}'
|
||||||
|
@ -797,7 +834,7 @@ class ParserVariable:
|
||||||
filename: str,
|
filename: str,
|
||||||
family_is_dynamic: bool,
|
family_is_dynamic: bool,
|
||||||
parent_dynamic: Optional[str],
|
parent_dynamic: Optional[str],
|
||||||
version: str
|
version: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a new variable"""
|
"""Add a new variable"""
|
||||||
if not isinstance(filename, list):
|
if not isinstance(filename, list):
|
||||||
|
@ -916,11 +953,11 @@ class ParserVariable:
|
||||||
else:
|
else:
|
||||||
param_typ = val["type"]
|
param_typ = val["type"]
|
||||||
val["key"] = key
|
val["key"] = key
|
||||||
val['path'] = path
|
val["path"] = path
|
||||||
val['family_is_dynamic'] = family_is_dynamic
|
val["family_is_dynamic"] = family_is_dynamic
|
||||||
val['is_follower'] = is_follower
|
val["is_follower"] = is_follower
|
||||||
val['attribute'] = attribute
|
val["attribute"] = attribute
|
||||||
val['xmlfiles'] = xmlfiles
|
val["xmlfiles"] = xmlfiles
|
||||||
try:
|
try:
|
||||||
params.append(PARAM_TYPES[param_typ](**val))
|
params.append(PARAM_TYPES[param_typ](**val))
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
|
@ -1031,10 +1068,11 @@ class RougailConvert(ParserVariable):
|
||||||
if path_prefix:
|
if path_prefix:
|
||||||
self.path_prefix = None
|
self.path_prefix = None
|
||||||
|
|
||||||
def get_comment(self,
|
def get_comment(
|
||||||
name: str,
|
self,
|
||||||
objects: CommentedMap,
|
name: str,
|
||||||
) -> Optional[str]:
|
objects: CommentedMap,
|
||||||
|
) -> Optional[str]:
|
||||||
if name in objects.ca.items:
|
if name in objects.ca.items:
|
||||||
comment = objects.ca.items[name][2]
|
comment = objects.ca.items[name][2]
|
||||||
else:
|
else:
|
||||||
|
@ -1110,8 +1148,14 @@ class RougailConvert(ParserVariable):
|
||||||
version = str(obj.pop(name))
|
version = str(obj.pop(name))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
msg = '"version" attribut is mandatory in YAML file'
|
# the `version` attribute is not mandatory
|
||||||
raise DictConsistencyError(msg, 27, [filename])
|
default_version = self.rougailconfig["default_dictionary_format_version"]
|
||||||
|
if default_version is not None:
|
||||||
|
version = default_version
|
||||||
|
else:
|
||||||
|
msg = '"version" attribut is mandatory in YAML file'
|
||||||
|
raise DictConsistencyError(msg, 27, [filename])
|
||||||
|
|
||||||
if version not in self.supported_version:
|
if version not in self.supported_version:
|
||||||
msg = f'version "{version}" is not supported, list of supported versions: {display_list(self.supported_version, separator="or", add_quote=True)}'
|
msg = f'version "{version}" is not supported, list of supported versions: {display_list(self.supported_version, separator="or", add_quote=True)}'
|
||||||
raise DictConsistencyError(msg, 28, [filename])
|
raise DictConsistencyError(msg, 28, [filename])
|
||||||
|
|
|
@ -23,29 +23,27 @@ along with this program; if not, write to the Free Software
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import List, Any, Optional, Tuple
|
|
||||||
from os.path import join, isfile, isdir, basename
|
|
||||||
from os import listdir, makedirs
|
from os import listdir, makedirs
|
||||||
|
from os.path import basename, isdir, isfile, join
|
||||||
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from lxml.etree import parse, XMLParser, XMLSyntaxError # pylint: disable=E0611
|
from lxml.etree import SubElement # pylint: disable=E0611
|
||||||
from lxml.etree import Element, SubElement, tostring
|
from lxml.etree import Element, XMLParser, XMLSyntaxError, parse, tostring
|
||||||
except ModuleNotFoundError as err:
|
except ModuleNotFoundError as err:
|
||||||
parse = None
|
parse = None
|
||||||
|
|
||||||
# from ast import parse as ast_parse
|
# from ast import parse as ast_parse
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from ruamel.yaml import YAML
|
|
||||||
from yaml import dump, SafeDumper
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .i18n import _
|
from ruamel.yaml import YAML
|
||||||
from .error import UpgradeError
|
|
||||||
|
|
||||||
from .utils import normalize_family
|
|
||||||
from .config import RougailConfig
|
from .config import RougailConfig
|
||||||
|
from .error import UpgradeError
|
||||||
|
from .i18n import _
|
||||||
from .object_model import CONVERT_OPTION
|
from .object_model import CONVERT_OPTION
|
||||||
|
from .utils import normalize_family
|
||||||
|
|
||||||
VERSIONS = ["0.10", "1.0", "1.1"]
|
VERSIONS = ["0.10", "1.0", "1.1"]
|
||||||
|
|
||||||
|
@ -644,7 +642,7 @@ class RougailUpgrade:
|
||||||
ext = "xml"
|
ext = "xml"
|
||||||
else:
|
else:
|
||||||
with xmlsrc.open() as xml_fh:
|
with xmlsrc.open() as xml_fh:
|
||||||
root = YAML(typ='safe').load(file_fh)
|
root = YAML(typ="safe").load(file_fh)
|
||||||
search_function_name = get_function_name(str(root["version"]))
|
search_function_name = get_function_name(str(root["version"]))
|
||||||
ext = "yml"
|
ext = "yml"
|
||||||
function_found = False
|
function_found = False
|
||||||
|
|
5
tests/data/dict1/dict.yml
Normal file
5
tests/data/dict1/dict.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# dict with a correct version declared
|
||||||
|
#version: "1.1"
|
||||||
|
hello:
|
||||||
|
type: string
|
||||||
|
default: world
|
5
tests/data/dict2/dict.yml
Normal file
5
tests/data/dict2/dict.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# dict with a correct version declared
|
||||||
|
version: "1.0"
|
||||||
|
hello:
|
||||||
|
type: string
|
||||||
|
default: world
|
82
tests/test_version.py
Normal file
82
tests/test_version.py
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
from shutil import rmtree # , copyfile, copytree
|
||||||
|
from os import getcwd, makedirs
|
||||||
|
from os.path import isfile, join, isdir
|
||||||
|
from pytest import fixture, raises
|
||||||
|
from os import listdir
|
||||||
|
|
||||||
|
from rougail import Rougail, RougailConfig
|
||||||
|
from rougail.error import DictConsistencyError
|
||||||
|
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
# dico_dirs = 'tests/data'
|
||||||
|
# test_ok = set()
|
||||||
|
# for test in listdir(dico_dirs):
|
||||||
|
# if isdir(join(dico_dirs, test)):
|
||||||
|
# test_ok.add(test)
|
||||||
|
# excludes = set([])
|
||||||
|
# test_ok -= excludes
|
||||||
|
|
||||||
|
# ORI_DIR = getcwd()
|
||||||
|
|
||||||
|
# test_ok = list(test_ok)
|
||||||
|
# test_ok.sort()
|
||||||
|
##print(test_ok)
|
||||||
|
|
||||||
|
# @fixture(scope="module", params=test_ok)
|
||||||
|
# def test_dir(request):
|
||||||
|
# return request.param
|
||||||
|
|
||||||
|
"""
|
||||||
|
the kinematics are as follows:
|
||||||
|
- if no version attribute is defined in the yaml file, then the default version attribute of rougailconfig is taken
|
||||||
|
- if a version attribute is defined in the yaml file, this is the one that is taken
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_default_version():
|
||||||
|
"retrieves the default_dictionary_format_version if no version in the yaml file"
|
||||||
|
|
||||||
|
RougailConfig["dictionaries_dir"] = ["tests/data/dict1"]
|
||||||
|
RougailConfig["default_dictionary_format_version"] = "1.1"
|
||||||
|
rougail = Rougail()
|
||||||
|
config = rougail.get_config()
|
||||||
|
|
||||||
|
filename = "tests/data/dict1/dict.yml"
|
||||||
|
with open(filename, encoding="utf8") as file_fh:
|
||||||
|
objects = YAML(typ="safe").load(file_fh)
|
||||||
|
version = rougail.converted.validate_file_version(objects, filename)
|
||||||
|
assert version == RougailConfig["default_dictionary_format_version"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_file_version_from_yml():
|
||||||
|
"retrives the yaml file version defined in the yaml file"
|
||||||
|
|
||||||
|
RougailConfig["dictionaries_dir"] = ["tests/data/dict2"]
|
||||||
|
RougailConfig["default_dictionary_format_version"] = "1.1"
|
||||||
|
rougail = Rougail()
|
||||||
|
config = rougail.get_config()
|
||||||
|
|
||||||
|
filename = "tests/data/dict2/dict.yml"
|
||||||
|
with open(filename, encoding="utf8") as file_fh:
|
||||||
|
objects = YAML(typ="safe").load(file_fh)
|
||||||
|
version = rougail.converted.validate_file_version(objects, filename)
|
||||||
|
assert version == "1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_retrieve_version_from_config():
|
||||||
|
|
||||||
|
RougailConfig["dictionaries_dir"] = ["tests/data/dict2"]
|
||||||
|
RougailConfig["default_dictionary_format_version"] = "1.1"
|
||||||
|
rougail = Rougail()
|
||||||
|
# FIXME replace with rougail.annotator()
|
||||||
|
# rougail.converted.annotator()
|
||||||
|
rougail.get_config()
|
||||||
|
assert rougail.converted.paths._data["rougail.hello"].version == "1.0"
|
||||||
|
|
||||||
|
|
||||||
|
# def test_dictionary(test_dir):
|
||||||
|
# assert getcwd() == ORI_DIR
|
||||||
|
# test_dir = join(dico_dirs, test_dir)
|
||||||
|
# launch_test(test_dir, 'dict')
|
||||||
|
# assert getcwd() == ORI_DIR
|
Loading…
Reference in a new issue