forked from stove/risotto
reorganise
This commit is contained in:
parent
6e38f1c4d1
commit
212699f571
25 changed files with 1224 additions and 739 deletions
0
ansible/__init__.py
Normal file
0
ansible/__init__.py
Normal file
0
ansible/action_plugins/__init__.py
Normal file
0
ansible/action_plugins/__init__.py
Normal file
|
@ -13,11 +13,12 @@ class ActionModule(ActionBase):
|
||||||
super(ActionModule, self).run(tmp, task_vars)
|
super(ActionModule, self).run(tmp, task_vars)
|
||||||
module_args = self._task.args.copy()
|
module_args = self._task.args.copy()
|
||||||
modules = module_args['modules']
|
modules = module_args['modules']
|
||||||
dataset_directory = RISOTTO_CONFIG['directories']['dataset']
|
dataset_directories = RISOTTO_CONFIG['directories']['datasets']
|
||||||
install_dir = join('/tmp/risotto/images')
|
install_dir = join('/tmp/risotto/images')
|
||||||
if isdir(install_dir):
|
if isdir(install_dir):
|
||||||
rmtree(install_dir)
|
rmtree(install_dir)
|
||||||
for module_name, depends in modules.items():
|
for module_name, depends in modules.items():
|
||||||
|
for dataset_directory in dataset_directories:
|
||||||
for depend in depends:
|
for depend in depends:
|
||||||
manual = join(dataset_directory, depend, 'manual', 'image')
|
manual = join(dataset_directory, depend, 'manual', 'image')
|
||||||
if not isdir(manual):
|
if not isdir(manual):
|
||||||
|
@ -45,4 +46,13 @@ class ActionModule(ActionBase):
|
||||||
else:
|
else:
|
||||||
copytree(src_file, dst_file)
|
copytree(src_file, dst_file)
|
||||||
return dict(ansible_facts=dict({}))
|
return dict(ansible_facts=dict({}))
|
||||||
|
#A REFAIRE ICI tests_dir = join(as_dir, 'tests')
|
||||||
|
#A REFAIRE ICI if isdir(tests_dir):
|
||||||
|
#A REFAIRE ICI cfg.tests.append(tests_dir)
|
||||||
|
#A REFAIRE ICI# for filename in listdir(tests_dir):
|
||||||
|
#A REFAIRE ICI# src_file = join(tests_dir, filename)
|
||||||
|
#A REFAIRE ICI# dst_file = join(INSTALL_DIR, 'tests', filename)
|
||||||
|
#A REFAIRE ICI# applicationservice_copy(src_file,
|
||||||
|
#A REFAIRE ICI# dst_file,
|
||||||
|
#A REFAIRE ICI# False,
|
||||||
|
#A REFAIRE ICI# )
|
||||||
|
|
|
@ -1,46 +1,29 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
from ansible.plugins.action import ActionBase
|
|
||||||
from asyncio import run
|
from asyncio import run
|
||||||
from shutil import rmtree
|
from os import readlink
|
||||||
from os.path import isdir, join
|
from os.path import join, islink
|
||||||
from os import makedirs
|
from risotto.machine import load, templates
|
||||||
|
try:
|
||||||
from risotto.machine import templates, load, ROUGAIL_NAMESPACE
|
from ansible.plugins.action import ActionBase
|
||||||
from risotto.utils import RISOTTO_CONFIG
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from rougail.utils import normalize_family
|
class FakeModule(AnsibleModule):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
class ActionBase():
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
raise Exception('works only with ansible')
|
||||||
|
|
||||||
|
|
||||||
TIRAMISU_CACHE = 'tiramisu_cache.py'
|
async def build_files(server_name: str,
|
||||||
VALUES_CACHE = 'values_cache.py'
|
just_copy: bool,
|
||||||
INSTALL_DIR = RISOTTO_CONFIG['directories']['dest']
|
) -> None:
|
||||||
|
config = await load()
|
||||||
|
|
||||||
async def build_files(server_name, is_host):
|
|
||||||
module_infos, rougailconfig, config = await load(TIRAMISU_CACHE,
|
|
||||||
VALUES_CACHE,
|
|
||||||
)
|
|
||||||
subconfig = config.option(normalize_family(server_name))
|
|
||||||
module_name = await subconfig.option(await subconfig.information.get('provider:global:module_name')).value.get()
|
|
||||||
module_info = module_infos[module_name]
|
|
||||||
rougailconfig['tmp_dir'] = 'tmp'
|
|
||||||
rougailconfig['destinations_dir'] = INSTALL_DIR
|
|
||||||
rougailconfig['templates_dir'] = module_info['infos'].templates_dir
|
|
||||||
if is_host:
|
|
||||||
tmpfile = await subconfig.option(f'{ROUGAIL_NAMESPACE}.host_install_dir').value.get()
|
|
||||||
rougailconfig['tmpfile_dest_dir'] = f'{tmpfile}'
|
|
||||||
rougailconfig['default_systemd_directory'] = '/usr/local/lib/systemd'
|
|
||||||
else:
|
|
||||||
rougailconfig['tmpfile_dest_dir'] = '/usr/local/lib'
|
|
||||||
rougailconfig['default_systemd_directory'] = '/systemd'
|
|
||||||
if isdir(rougailconfig['destinations_dir']):
|
|
||||||
rmtree(rougailconfig['destinations_dir'])
|
|
||||||
if isdir(rougailconfig['tmp_dir']):
|
|
||||||
rmtree(rougailconfig['tmp_dir'])
|
|
||||||
makedirs(rougailconfig['tmp_dir'])
|
|
||||||
makedirs(rougailconfig['destinations_dir'])
|
|
||||||
await templates(server_name,
|
await templates(server_name,
|
||||||
subconfig,
|
config,
|
||||||
rougailconfig,
|
just_copy=just_copy,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,8 +31,38 @@ class ActionModule(ActionBase):
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
super(ActionModule, self).run(tmp, task_vars)
|
super(ActionModule, self).run(tmp, task_vars)
|
||||||
module_args = self._task.args.copy()
|
module_args = self._task.args.copy()
|
||||||
name = module_args['hostname']
|
root_local = module_args.pop('root_local')
|
||||||
is_host = module_args['is_host']
|
root_remote= module_args.pop('root_remote')
|
||||||
|
name = module_args.pop('hostname')
|
||||||
|
is_host = module_args.pop('is_host')
|
||||||
|
just_copy = module_args.get('just_copy', False)
|
||||||
|
module_args['root'] = root_remote
|
||||||
|
|
||||||
run(build_files(name, is_host))
|
run(build_files(name, just_copy))
|
||||||
return dict(ansible_facts=dict({}))
|
#
|
||||||
|
remote = self._execute_module(module_name='compare', module_args=module_args, task_vars=task_vars)
|
||||||
|
if remote.get('failed'):
|
||||||
|
raise Exception(f'error in remote action: {remote["module_stdout"]}')
|
||||||
|
#
|
||||||
|
module = FakeModule()
|
||||||
|
modified_files = []
|
||||||
|
changed = False
|
||||||
|
for path in module_args['paths']:
|
||||||
|
full_path = join(root_local, path['name'][1:])
|
||||||
|
if remote['compare'].get(path['name']):
|
||||||
|
if remote['compare'][path['name']]['type'] == 'file':
|
||||||
|
if remote['compare'][path['name']]['shasum'] == module.digest_from_file(full_path, 'sha256'):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# it's a symlink
|
||||||
|
if islink(full_path) and remote['compare'][path['name']]['name'] == readlink(full_path):
|
||||||
|
continue
|
||||||
|
changed = True
|
||||||
|
modified_files.append(path['name'])
|
||||||
|
if not is_host:
|
||||||
|
for old_file in remote['old_files']:
|
||||||
|
changed = True
|
||||||
|
# module_args['path'] = old_file
|
||||||
|
# module_args['state'] = 'absent'
|
||||||
|
# self._execute_module(module_name='ansible.builtin.file', module_args=module_args, task_vars=task_vars)
|
||||||
|
return dict(ansible_facts=dict({}), changed=changed)
|
||||||
|
|
|
@ -31,7 +31,8 @@ def fileslist(data, is_host=False, name_only=False, prefix=None):
|
||||||
)
|
)
|
||||||
for service, service_data in data.items():
|
for service, service_data in data.items():
|
||||||
if not service_data['activate']:
|
if not service_data['activate']:
|
||||||
if service_data['engine'] == 'none' and service_data['type'] == 'service' and not 'overrides' in service_data:
|
if service_data['manage']:
|
||||||
|
if not service_data.get('undisable', False) and not service_data['engine'] and not service_data.get('target'):
|
||||||
_add(files,
|
_add(files,
|
||||||
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
||||||
base_systemd + '/systemd/system/' + service_data['doc'],
|
base_systemd + '/systemd/system/' + service_data['doc'],
|
||||||
|
@ -39,14 +40,21 @@ def fileslist(data, is_host=False, name_only=False, prefix=None):
|
||||||
prefix,
|
prefix,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if service_data['activate'] and service_data['engine'] != 'none':
|
if service_data['manage'] and service_data['engine']:
|
||||||
_add(files,
|
_add(files,
|
||||||
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
||||||
base_systemd + '/systemd/system/' + service_data['doc'],
|
base_systemd + '/systemd/system/' + service_data['doc'],
|
||||||
name_only,
|
name_only,
|
||||||
prefix,
|
prefix,
|
||||||
)
|
)
|
||||||
if service_data['activate'] and 'overrides' in service_data:
|
if service_data.get('target'):
|
||||||
|
_add(files,
|
||||||
|
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
||||||
|
f'/systemd/system/{service_data["target"]}.target.wants/{service_data["doc"]}',
|
||||||
|
name_only,
|
||||||
|
prefix,
|
||||||
|
)
|
||||||
|
if 'overrides' in service_data:
|
||||||
for override_data in service_data['overrides'].values():
|
for override_data in service_data['overrides'].values():
|
||||||
_add(files,
|
_add(files,
|
||||||
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
||||||
|
@ -54,6 +62,13 @@ def fileslist(data, is_host=False, name_only=False, prefix=None):
|
||||||
name_only,
|
name_only,
|
||||||
prefix,
|
prefix,
|
||||||
)
|
)
|
||||||
|
if 'ip' in service_data:
|
||||||
|
_add(files,
|
||||||
|
{'owner': 'root', 'group': 'root', 'mode': '0755'},
|
||||||
|
base_systemd + '/systemd/system/' + service_data['doc'] + '.d/rougail_ip.conf',
|
||||||
|
name_only,
|
||||||
|
prefix,
|
||||||
|
)
|
||||||
if 'files' not in service_data:
|
if 'files' not in service_data:
|
||||||
continue
|
continue
|
||||||
for file_data in service_data['files'].values():
|
for file_data in service_data['files'].values():
|
||||||
|
@ -88,6 +103,8 @@ def directorieslist(data):
|
||||||
def machineslist(data, only=None, only_name=False):
|
def machineslist(data, only=None, only_name=False):
|
||||||
srv = []
|
srv = []
|
||||||
if only is not None:
|
if only is not None:
|
||||||
|
if only not in data:
|
||||||
|
raise Exception(f"cannot find {only} but only {data.keys()}")
|
||||||
if only_name:
|
if only_name:
|
||||||
srv.append(only)
|
srv.append(only)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -2,6 +2,36 @@
|
||||||
- name: "Populate service facts"
|
- name: "Populate service facts"
|
||||||
service_facts:
|
service_facts:
|
||||||
|
|
||||||
|
- name: "Packages installation"
|
||||||
|
apt:
|
||||||
|
pkg: "{{ vars[inventory_hostname]['general']['host_packages'] }}"
|
||||||
|
update_cache: yes
|
||||||
|
state: latest
|
||||||
|
|
||||||
|
- name: "Build host files"
|
||||||
|
rougail:
|
||||||
|
paths: "{{ vars[inventory_hostname]['services'] | fileslist(is_host=True) }}"
|
||||||
|
root_local: "{{ host_install_dir }}"
|
||||||
|
root_remote: "/"
|
||||||
|
hostname: "{{ inventory_hostname }}"
|
||||||
|
is_host: True
|
||||||
|
|
||||||
|
- name: "Create /usr/local/lib/systemd/system"
|
||||||
|
file:
|
||||||
|
path: /usr/local/lib/systemd/system
|
||||||
|
state: directory
|
||||||
|
mode: 0755
|
||||||
|
|
||||||
|
- name: "Copy service file only if not exists"
|
||||||
|
when: item.value['manage'] and item.value['activate'] and item.value['doc'].endswith('.service') and not item.value['doc'].endswith('@.service') and item.value['engine'] and item.value['engine'] != 'none'
|
||||||
|
copy:
|
||||||
|
src: '{{ host_install_dir }}/usr/local/lib/systemd/system/{{ item.value["doc"] }}'
|
||||||
|
force: no
|
||||||
|
dest: '/usr/local/lib/systemd/system/{{ item.value["doc"] }}'
|
||||||
|
loop: "{{ vars[inventory_hostname]['services'] | dict2items }}"
|
||||||
|
loop_control:
|
||||||
|
label: "{{ item.value['doc'] }}"
|
||||||
|
|
||||||
- name: "Stop services"
|
- name: "Stop services"
|
||||||
when: item.value['manage'] and item.value['activate'] and item.value['doc'].endswith('.service') and not item.value['doc'].endswith('@.service') and item.value['engine'] != 'none'
|
when: item.value['manage'] and item.value['activate'] and item.value['doc'].endswith('.service') and not item.value['doc'].endswith('@.service') and item.value['engine'] != 'none'
|
||||||
ansible.builtin.service:
|
ansible.builtin.service:
|
||||||
|
@ -11,18 +41,6 @@
|
||||||
loop_control:
|
loop_control:
|
||||||
label: "{{ item.value['doc'] }}"
|
label: "{{ item.value['doc'] }}"
|
||||||
|
|
||||||
- name: "Packages installation"
|
|
||||||
apt:
|
|
||||||
pkg: "{{ vars[inventory_hostname]['general']['host_packages'] }}"
|
|
||||||
update_cache: yes
|
|
||||||
state: latest
|
|
||||||
|
|
||||||
- name: "Build host files"
|
|
||||||
local_action:
|
|
||||||
module: rougail
|
|
||||||
hostname: "{{ inventory_hostname }}"
|
|
||||||
is_host: True
|
|
||||||
|
|
||||||
- name: "Create host directories"
|
- name: "Create host directories"
|
||||||
file: path={{ item }} state=directory mode=0755
|
file: path={{ item }} state=directory mode=0755
|
||||||
loop: "{{ vars[inventory_hostname]['services'] | directorieslist }}"
|
loop: "{{ vars[inventory_hostname]['services'] | directorieslist }}"
|
||||||
|
@ -30,7 +48,7 @@
|
||||||
- name: "Copy systemd-tmpfiles"
|
- name: "Copy systemd-tmpfiles"
|
||||||
when: item.name.startswith('/usr/local/lib/risotto-tmpfiles.d')
|
when: item.name.startswith('/usr/local/lib/risotto-tmpfiles.d')
|
||||||
ansible.builtin.copy:
|
ansible.builtin.copy:
|
||||||
src: installations/{{ item.name }}
|
src: "{{ host_install_dir }}/{{ item.name }}"
|
||||||
dest: "{{ item.name }}"
|
dest: "{{ item.name }}"
|
||||||
owner: "{{ item.owner }}"
|
owner: "{{ item.owner }}"
|
||||||
group: "{{ item.group }}"
|
group: "{{ item.group }}"
|
||||||
|
@ -49,7 +67,7 @@
|
||||||
- name: "Copy host files"
|
- name: "Copy host files"
|
||||||
when: not item.name.startswith('/usr/local/lib/tmpfiles.d')
|
when: not item.name.startswith('/usr/local/lib/tmpfiles.d')
|
||||||
ansible.builtin.copy:
|
ansible.builtin.copy:
|
||||||
src: installations/{{ item.name }}
|
src: "{{ host_install_dir }}/{{ item.name }}"
|
||||||
dest: "{{ item.name }}"
|
dest: "{{ item.name }}"
|
||||||
owner: "{{ item.owner }}"
|
owner: "{{ item.owner }}"
|
||||||
group: "{{ item.group }}"
|
group: "{{ item.group }}"
|
||||||
|
@ -105,7 +123,7 @@
|
||||||
owner: "root"
|
owner: "root"
|
||||||
group: "root"
|
group: "root"
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
loop: "{{ lookup('fileglob', '../sbin/*', wantlist=True) | list }}"
|
loop: "{{ lookup('fileglob', 'sbin/*', wantlist=True) | list }}"
|
||||||
|
|
||||||
# Images informations
|
# Images informations
|
||||||
- name: "Remove images tar"
|
- name: "Remove images tar"
|
||||||
|
@ -141,3 +159,9 @@
|
||||||
unarchive:
|
unarchive:
|
||||||
src: "/tmp/risotto/images.tar"
|
src: "/tmp/risotto/images.tar"
|
||||||
dest: "/var/lib/risotto/images_files"
|
dest: "/var/lib/risotto/images_files"
|
||||||
|
|
||||||
|
- name: "Create versions directory"
|
||||||
|
file:
|
||||||
|
path: /var/lib/risotto/machines_versions
|
||||||
|
state: directory
|
||||||
|
mode: "0700"
|
||||||
|
|
|
@ -5,21 +5,21 @@ Example custom dynamic inventory script for Ansible, in Python.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from json import dumps, JSONEncoder
|
from json import load as json_load, dumps, JSONEncoder
|
||||||
from os import remove
|
from os import remove
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from asyncio import run
|
from asyncio import run
|
||||||
|
from traceback import print_exc
|
||||||
|
|
||||||
from risotto.machine import load
|
from risotto.machine import load, TIRAMISU_CACHE, VALUES_CACHE, INFORMATIONS_CACHE, ROUGAIL_NAMESPACE, ROUGAIL_NAMESPACE_DESCRIPTION
|
||||||
from risotto.image import load_config
|
from tiramisu import Config
|
||||||
from risotto.utils import SERVERS
|
|
||||||
from tiramisu.error import PropertiesOptionError
|
from tiramisu.error import PropertiesOptionError
|
||||||
from rougail.utils import normalize_family
|
from rougail.utils import normalize_family
|
||||||
from rougail import RougailSystemdTemplate
|
from rougail import RougailSystemdTemplate, RougailConfig
|
||||||
from rougail.template.base import RougailLeader, RougailExtra
|
from rougail.template.base import RougailLeader, RougailExtra
|
||||||
|
|
||||||
TIRAMISU_CACHE = 'tiramisu_cache.py'
|
|
||||||
VALUES_CACHE = 'values_cache.py'
|
DEBUG = False
|
||||||
|
|
||||||
|
|
||||||
class RougailEncoder(JSONEncoder):
|
class RougailEncoder(JSONEncoder):
|
||||||
|
@ -38,36 +38,43 @@ class RisottoInventory(object):
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
parser.add_argument('--list', action='store_true')
|
parser.add_argument('--list', action='store_true')
|
||||||
parser.add_argument('--host', action='store')
|
parser.add_argument('--host', action='store')
|
||||||
|
parser.add_argument('--nocache', action='store_true')
|
||||||
|
parser.add_argument('--debug', action='store_true')
|
||||||
self.args = parser.parse_args()
|
self.args = parser.parse_args()
|
||||||
|
if self.args.debug:
|
||||||
|
global DEBUG
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
if self.args.list:
|
if self.args.list and self.args.host:
|
||||||
|
raise Exception('cannot have --list and --host together')
|
||||||
|
if self.args.list or self.args.nocache:
|
||||||
if isfile(TIRAMISU_CACHE):
|
if isfile(TIRAMISU_CACHE):
|
||||||
remove(TIRAMISU_CACHE)
|
remove(TIRAMISU_CACHE)
|
||||||
if isfile(VALUES_CACHE):
|
if isfile(VALUES_CACHE):
|
||||||
remove(VALUES_CACHE)
|
remove(VALUES_CACHE)
|
||||||
return await self.do_inventory()
|
if isfile(INFORMATIONS_CACHE):
|
||||||
|
remove(INFORMATIONS_CACHE)
|
||||||
|
config = await load(TIRAMISU_CACHE,
|
||||||
|
VALUES_CACHE,
|
||||||
|
INFORMATIONS_CACHE,
|
||||||
|
)
|
||||||
|
if self.args.list:
|
||||||
|
return await self.do_inventory(config)
|
||||||
elif self.args.host:
|
elif self.args.host:
|
||||||
return await self.get_vars(self.args.host)
|
return await self.get_vars(config, self.args.host)
|
||||||
raise Exception('pfff')
|
raise Exception('pfff')
|
||||||
|
|
||||||
async def do_inventory(self):
|
async def do_inventory(self,
|
||||||
module_infos = load_config(True,
|
config: Config,
|
||||||
True,
|
) -> dict:
|
||||||
True,
|
servers = [await subconfig.option.doc() for subconfig in await config.option.list('optiondescription') if await subconfig.information.get('module') == 'host']
|
||||||
)
|
|
||||||
servers = []
|
|
||||||
for server_name, server in SERVERS.items():
|
|
||||||
module_name = server['module']
|
|
||||||
if module_name != 'host':
|
|
||||||
continue
|
|
||||||
servers.append(server_name)
|
|
||||||
return dumps({
|
return dumps({
|
||||||
'group': {
|
'group': {
|
||||||
'hosts': servers,
|
'hosts': servers,
|
||||||
'vars': {
|
'vars': {
|
||||||
# FIXME
|
# FIXME
|
||||||
'ansible_ssh_host': '192.168.56.156',
|
# 'ansible_ssh_host': '192.168.0.100',
|
||||||
'ansible_ssh_user': 'root',
|
'ansible_ssh_user': 'root',
|
||||||
'ansible_python_interpreter': '/usr/bin/python3'
|
'ansible_python_interpreter': '/usr/bin/python3'
|
||||||
}
|
}
|
||||||
|
@ -75,41 +82,42 @@ class RisottoInventory(object):
|
||||||
})
|
})
|
||||||
|
|
||||||
async def get_vars(self,
|
async def get_vars(self,
|
||||||
|
config: Config,
|
||||||
host_name: str,
|
host_name: str,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
try:
|
|
||||||
module_infos, rougailconfig, config = await load(TIRAMISU_CACHE,
|
|
||||||
VALUES_CACHE,
|
|
||||||
)
|
|
||||||
except Exception as err:
|
|
||||||
# import traceback
|
|
||||||
# traceback.print_exc()
|
|
||||||
print(err)
|
|
||||||
exit(1)
|
|
||||||
ret = {}
|
ret = {}
|
||||||
modules = set()
|
rougailconfig = RougailConfig.copy()
|
||||||
for server_name, server in SERVERS.items():
|
rougailconfig['variable_namespace'] = ROUGAIL_NAMESPACE
|
||||||
if server['module'] == 'host' and server_name != host_name:
|
rougailconfig['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
|
||||||
|
for subconfig in await config.option.list('optiondescription'):
|
||||||
|
server_name = await subconfig.option.description()
|
||||||
|
module_name = await subconfig.option(await subconfig.information.get('provider:global:module_name')).value.get()
|
||||||
|
if module_name == 'host' and server_name != host_name:
|
||||||
continue
|
continue
|
||||||
modules.add(server['module'])
|
|
||||||
subconfig = config.option(normalize_family(server_name))
|
|
||||||
engine = RougailSystemdTemplate(subconfig, rougailconfig)
|
engine = RougailSystemdTemplate(subconfig, rougailconfig)
|
||||||
await engine.load_variables()
|
await engine.load_variables()
|
||||||
if server['module'] != 'host' and engine.rougail_variables_dict['general']['host'] != host_name:
|
if module_name != 'host' and engine.rougail_variables_dict['general']['host'] != host_name:
|
||||||
continue
|
continue
|
||||||
ret[server_name] = engine.rougail_variables_dict
|
ret[server_name] = engine.rougail_variables_dict
|
||||||
ret['modules'] = {module_name: module_info['infos'].depends for module_name, module_info in module_infos.items() if module_name in modules}
|
ret['modules'] = await config.information.get('modules')
|
||||||
ret['delete_old_image'] = False
|
ret['delete_old_image'] = False
|
||||||
ret['configure_host'] = True
|
ret['configure_host'] = True
|
||||||
ret['only_machine'] = None
|
ret['only_machine'] = None
|
||||||
|
ret['copy_template'] = False
|
||||||
|
ret['host_install_dir'] = ret[host_name].pop('host_install_dir')
|
||||||
return dumps(ret, cls=RougailEncoder)
|
return dumps(ret, cls=RougailEncoder)
|
||||||
|
|
||||||
|
|
||||||
# Get the inventory.
|
# Get the inventory.
|
||||||
async def main():
|
async def main():
|
||||||
|
try:
|
||||||
inv = RisottoInventory()
|
inv = RisottoInventory()
|
||||||
values = await inv.run()
|
values = await inv.run()
|
||||||
print(values)
|
print(values)
|
||||||
|
except Exception as err:
|
||||||
|
if DEBUG:
|
||||||
|
print_exc()
|
||||||
|
exit(err)
|
||||||
|
|
||||||
|
|
||||||
run(main())
|
run(main())
|
||||||
|
|
68
ansible/library/compare.py
Normal file
68
ansible/library/compare.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from os import fdopen, walk, readlink
|
||||||
|
from os.path import join, islink
|
||||||
|
from dbus import SystemBus, Array
|
||||||
|
from dbus.exceptions import DBusException
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
|
def run_module():
|
||||||
|
# define available arguments/parameters a user can pass to the module
|
||||||
|
module_args = dict(
|
||||||
|
root=dict(type='str', required=True),
|
||||||
|
paths=dict(type='list', required=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# seed the result dict in the object
|
||||||
|
# we primarily care about changed and state
|
||||||
|
# changed is if this module effectively modified the target
|
||||||
|
# state will include any data that you want your module to pass back
|
||||||
|
# for consumption, for example, in a subsequent task
|
||||||
|
result = dict(
|
||||||
|
changed=False,
|
||||||
|
compare={},
|
||||||
|
symlink={},
|
||||||
|
old_files=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
# the AnsibleModule object will be our abstraction working with Ansible
|
||||||
|
# this includes instantiation, a couple of common attr would be the
|
||||||
|
# args/params passed to the execution, as well as if the module
|
||||||
|
# supports check mode
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=module_args,
|
||||||
|
supports_check_mode=True
|
||||||
|
)
|
||||||
|
|
||||||
|
root = module.params['root']
|
||||||
|
if root != '/':
|
||||||
|
paths = {join(root, path['name'][1:]): path['name'] for path in module.params['paths']}
|
||||||
|
search_paths = [join(directory, f) for directory, subdirectories, files in walk(root) for f in files]
|
||||||
|
else:
|
||||||
|
paths = {path['name']: path['name'] for path in module.params['paths']}
|
||||||
|
search_paths = paths
|
||||||
|
for path in search_paths:
|
||||||
|
if path in paths:
|
||||||
|
if not islink(path):
|
||||||
|
result['compare'][paths[path]] = {'type': 'file',
|
||||||
|
'shasum': module.digest_from_file(path, 'sha256'),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
result['compare'][paths[path]] = {'type': 'symlink',
|
||||||
|
'name': readlink(path),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
result['old_files'].append(path)
|
||||||
|
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
run_module()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -82,6 +82,7 @@ def start(bus, machines):
|
||||||
error = False
|
error = False
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
ret = []
|
||||||
res = remote_object.OpenMachineShell(host,
|
res = remote_object.OpenMachineShell(host,
|
||||||
'',
|
'',
|
||||||
cmd[0],
|
cmd[0],
|
||||||
|
@ -91,7 +92,6 @@ def start(bus, machines):
|
||||||
)
|
)
|
||||||
fd = res[0].take()
|
fd = res[0].take()
|
||||||
fh = fdopen(fd)
|
fh = fdopen(fd)
|
||||||
ret = []
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
ret.append(fh.readline().strip())
|
ret.append(fh.readline().strip())
|
||||||
|
@ -114,7 +114,7 @@ def start(bus, machines):
|
||||||
break
|
break
|
||||||
if error:
|
if error:
|
||||||
continue
|
continue
|
||||||
if ret[0] == 'running':
|
if ret and ret[0] == 'running':
|
||||||
continue
|
continue
|
||||||
cmd = ['/usr/bin/systemctl', '--state=failed', '--no-legend', '--no-page']
|
cmd = ['/usr/bin/systemctl', '--state=failed', '--no-legend', '--no-page']
|
||||||
res = remote_object.OpenMachineShell(host,
|
res = remote_object.OpenMachineShell(host,
|
||||||
|
|
|
@ -6,65 +6,69 @@
|
||||||
file: path=/var/lib/risotto/journals/{{ item.name }} state=directory mode=0755
|
file: path=/var/lib/risotto/journals/{{ item.name }} state=directory mode=0755
|
||||||
|
|
||||||
- name: "Build machine files for {{ item.name }}"
|
- name: "Build machine files for {{ item.name }}"
|
||||||
local_action:
|
rougail:
|
||||||
module: rougail
|
paths: "{{ vars[item.name]['services'] | fileslist }}"
|
||||||
|
root_local: "{{ host_install_dir }}"
|
||||||
|
root_remote: "/var/lib/risotto/configurations/{{ item.name }}"
|
||||||
hostname: "{{ item.name}}"
|
hostname: "{{ item.name}}"
|
||||||
is_host: False
|
is_host: False
|
||||||
|
|
||||||
- name: "Get local informations for {{ item.name }} configuration's file"
|
|
||||||
local_action:
|
|
||||||
module: stat
|
|
||||||
path: "installations{{ file.name }}"
|
|
||||||
checksum: sha256
|
|
||||||
get_checksum: yes
|
|
||||||
follow: true
|
|
||||||
loop: "{{ vars[item.name]['services'] | fileslist }}"
|
|
||||||
loop_control:
|
|
||||||
loop_var: file
|
|
||||||
label: "{{ file.name }}"
|
|
||||||
register: local_configuration
|
|
||||||
|
|
||||||
- name: "Get remote informations for {{ item.name }} configuration's file"
|
|
||||||
stat:
|
|
||||||
path: "/var/lib/risotto/configurations/{{ item.name }}{{ file.name }}"
|
|
||||||
checksum: sha256
|
|
||||||
get_checksum: yes
|
|
||||||
follow: true
|
|
||||||
loop: "{{ vars[item.name]['services'] | fileslist }}"
|
|
||||||
loop_control:
|
|
||||||
loop_var: file
|
|
||||||
label: "{{ file.name }}"
|
|
||||||
register: remote_configuration
|
|
||||||
|
|
||||||
- name: "Configuration's file is up to date in {{ item.name }}"
|
|
||||||
debug:
|
|
||||||
msg: "file is {{ 'out of date' if not file[1].stat.exists or (not 'checksum' in file[0].stat and 'checksum' in file[1].stat) or ('checksum' in file[0].stat and not 'checksum' in file[1].stat) or ('checksum' in file[0].stat and 'checksum' in file[1].stat and file[0].stat.checksum != file[1].stat.checksum) else 'up to date' }}"
|
|
||||||
changed_when: not file[1].stat.exists or (not 'checksum' in file[0].stat and 'checksum' in file[1].stat) or ('checksum' in file[0].stat and not 'checksum' in file[1].stat) or ('checksum' in file[0].stat and 'checksum' in file[1].stat and file[0].stat.checksum != file[1].stat.checksum)
|
|
||||||
loop: "{{ local_configuration.results | zip(remote_configuration.results) | list }}"
|
|
||||||
loop_control:
|
|
||||||
loop_var: file
|
|
||||||
label: "{{ file[0]['file']['name'] }}"
|
|
||||||
ignore_errors: true
|
|
||||||
register: up_to_date_configuration
|
register: up_to_date_configuration
|
||||||
|
|
||||||
- name: "Remove Compressed files for {{ item.name }}"
|
- name: "Change secrets right"
|
||||||
local_action:
|
local_action:
|
||||||
module: file
|
module: file
|
||||||
path: /tmp/new_configurations/{{ item.name }}
|
path: "{{ host_install_dir }}/secrets"
|
||||||
state: absent
|
state: directory
|
||||||
when: up_to_date_configuration.changed
|
mode: 0700
|
||||||
|
|
||||||
- name: "Compress files for {{ item.name }}"
|
- name: "Compress files for {{ item.name }}"
|
||||||
local_action:
|
local_action:
|
||||||
module: archive
|
module: archive
|
||||||
path: "installations/"
|
path: "{{ host_install_dir }}/"
|
||||||
dest: /tmp/new_configurations/{{ item.name }}
|
dest: /tmp/new_configurations/{{ item.name }}
|
||||||
format: tar
|
format: tar
|
||||||
when: up_to_date_configuration.changed
|
when: up_to_date_configuration.changed
|
||||||
|
|
||||||
|
- name: "Build machine templates for {{ item.name }}"
|
||||||
|
rougail:
|
||||||
|
paths: "{{ vars[item.name]['services'] | fileslist }}"
|
||||||
|
root_local: "{{ host_install_dir }}"
|
||||||
|
root_remote: "/var/lib/risotto/configurations/{{ item.name }}"
|
||||||
|
hostname: "{{ item.name}}"
|
||||||
|
just_copy: true
|
||||||
|
is_host: False
|
||||||
|
when: copy_template
|
||||||
|
register: up_to_date_configuration
|
||||||
|
|
||||||
|
- name: "Compress templates for {{ item.name }}"
|
||||||
|
local_action:
|
||||||
|
module: archive
|
||||||
|
path: "../templates/"
|
||||||
|
dest: /tmp/new_templates/{{ item.name }}
|
||||||
|
format: tar
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
|
- name: "Remove templates directory for {{ item.name }}"
|
||||||
|
file:
|
||||||
|
path: "/var/lib/risotto/templates/{{ item.name }}"
|
||||||
|
state: absent
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
|
- name: "Create templates directory for {{ item.name }}"
|
||||||
|
file:
|
||||||
|
path: "/var/lib/risotto/templates/{{ item.name }}"
|
||||||
|
state: directory
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
|
- name: "Copy templates for {{ item.name }}"
|
||||||
|
unarchive:
|
||||||
|
src: "/tmp/new_templates/{{ item.name }}"
|
||||||
|
dest: "/var/lib/risotto/templates/{{ item.name }}/"
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
- name: "Remove old image {{ vars | modulename(item.name) }}"
|
- name: "Remove old image {{ vars | modulename(item.name) }}"
|
||||||
file:
|
file:
|
||||||
path: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}.tar"
|
path: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}"
|
||||||
state: absent
|
state: absent
|
||||||
when: delete_old_image == true
|
when: delete_old_image == true
|
||||||
|
|
||||||
|
@ -88,27 +92,30 @@
|
||||||
|
|
||||||
- name: "Check image for {{ item.name }}"
|
- name: "Check image for {{ item.name }}"
|
||||||
stat:
|
stat:
|
||||||
path: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}.tar"
|
path: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}"
|
||||||
follow: true
|
follow: true
|
||||||
register: register_name
|
register: register_name
|
||||||
when: system_directory_created.changed
|
when: system_directory_created.changed
|
||||||
|
|
||||||
|
#- name: Print return information from the previous task
|
||||||
|
# ansible.builtin.debug:
|
||||||
|
# var: register_name
|
||||||
|
|
||||||
- name: "Build image for {{ item.name }}"
|
- name: "Build image for {{ item.name }}"
|
||||||
ansible.builtin.shell: "/usr/local/sbin/build_image {{ vars | modulename(item.name) }}"
|
ansible.builtin.shell: "/usr/local/sbin/build_image {{ vars | modulename(item.name) }}"
|
||||||
when: system_directory_created.changed and not register_name.stat.exists
|
when: "'stat' in register_name and not register_name.stat.exists"
|
||||||
|
register: ret
|
||||||
|
failed_when: ret.rc != 0
|
||||||
|
|
||||||
- name: "Uncompress machine image for {{ item.name }}"
|
- name: "Copy machine image for {{ item.name }}"
|
||||||
unarchive:
|
ansible.builtin.shell: "/usr/bin/cp -a --reflink=auto /var/lib/risotto/images/{{ vars | modulename(item.name) }}/* /var/lib/machines/{{ item.name }}"
|
||||||
src: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}.tar"
|
|
||||||
remote_src: true
|
|
||||||
dest: /var/lib/machines/{{ item.name }}/
|
|
||||||
when: system_directory_created.changed
|
when: system_directory_created.changed
|
||||||
|
|
||||||
- name: "SHA machine image for {{ item.name }}"
|
- name: "Copy machine image version for {{ item.name }}"
|
||||||
ansible.builtin.copy:
|
ansible.builtin.copy:
|
||||||
src: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}.tar.sha"
|
src: "/var/lib/risotto/images/{{ vars | modulename(item.name) }}.version"
|
||||||
remote_src: true
|
remote_src: true
|
||||||
dest: "/var/lib/risotto/configurations/sha/{{ item.name }}.sha"
|
dest: "/var/lib/risotto/machines_versions/{{ item.name }}.version"
|
||||||
owner: "root"
|
owner: "root"
|
||||||
group: "root"
|
group: "root"
|
||||||
when: system_directory_created.changed
|
when: system_directory_created.changed
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
unarchive:
|
unarchive:
|
||||||
src: "{{ item }}"
|
src: "{{ item }}"
|
||||||
dest: /var/lib/risotto/configurations/{{ item | basename }}/
|
dest: /var/lib/risotto/configurations/{{ item | basename }}/
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
loop: "{{ lookup('fileglob', '/tmp/new_configurations/*', wantlist=True) }}"
|
loop: "{{ lookup('fileglob', '/tmp/new_configurations/*', wantlist=True) }}"
|
||||||
|
|
||||||
- name: "Enable machines"
|
- name: "Enable machines"
|
||||||
|
@ -36,3 +38,9 @@
|
||||||
module: file
|
module: file
|
||||||
path: /tmp/new_configurations
|
path: /tmp/new_configurations
|
||||||
state: absent
|
state: absent
|
||||||
|
|
||||||
|
- name: "Remove compressed templates directory"
|
||||||
|
local_action:
|
||||||
|
module: file
|
||||||
|
path: /tmp/new_templates
|
||||||
|
state: absent
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
#FIXME : si on redemarre a appel tmpfiles.d ....
|
|
||||||
- name: Risotto
|
- name: Risotto
|
||||||
hosts: cloud.silique.fr
|
hosts: all
|
||||||
tasks:
|
tasks:
|
||||||
- name: "Configure the host"
|
- name: "Configure the host"
|
||||||
include_tasks: host.yml
|
include_tasks: host.yml
|
||||||
|
@ -20,6 +19,21 @@
|
||||||
state: directory
|
state: directory
|
||||||
mode: 0700
|
mode: 0700
|
||||||
|
|
||||||
|
- name: "Remove compressed templates files directory"
|
||||||
|
local_action:
|
||||||
|
module: file
|
||||||
|
path: /tmp/new_templates
|
||||||
|
state: absent
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
|
- name: "Create compressed templates files directory"
|
||||||
|
local_action:
|
||||||
|
module: file
|
||||||
|
path: /tmp/new_templates
|
||||||
|
state: directory
|
||||||
|
mode: 0700
|
||||||
|
when: copy_template
|
||||||
|
|
||||||
- name: "Prepare machine configuration"
|
- name: "Prepare machine configuration"
|
||||||
include_tasks: machine.yml
|
include_tasks: machine.yml
|
||||||
loop: "{{ vars | machineslist(only=only_machine) }}"
|
loop: "{{ vars | machineslist(only=only_machine) }}"
|
||||||
|
|
|
@ -12,15 +12,13 @@ RISOTTO_DIR="/var/lib/risotto"
|
||||||
RISOTTO_IMAGE_DIR="$RISOTTO_DIR/images"
|
RISOTTO_IMAGE_DIR="$RISOTTO_DIR/images"
|
||||||
# image configuration
|
# image configuration
|
||||||
IMAGE_BASE_RISOTTO_BASE_DIR="$RISOTTO_IMAGE_DIR/image_bases"
|
IMAGE_BASE_RISOTTO_BASE_DIR="$RISOTTO_IMAGE_DIR/image_bases"
|
||||||
|
IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP="$RISOTTO_IMAGE_DIR/tmp/$IMAGE_NAME"
|
||||||
IMAGE_NAME_RISOTTO_IMAGE_DIR="$RISOTTO_IMAGE_DIR/$IMAGE_NAME"
|
IMAGE_NAME_RISOTTO_IMAGE_DIR="$RISOTTO_IMAGE_DIR/$IMAGE_NAME"
|
||||||
IMAGE_NAME_RISOTTO_IMAGE_NAME="$RISOTTO_IMAGE_DIR/$IMAGE_NAME".tar
|
|
||||||
IMAGE_DIR_RECIPIENT_IMAGE="/var/lib/risotto/images_files/$IMAGE_NAME"
|
IMAGE_DIR_RECIPIENT_IMAGE="/var/lib/risotto/images_files/$IMAGE_NAME"
|
||||||
|
|
||||||
|
|
||||||
#FIXME ou ?
|
rm -f /var/log/risotto/build_image.log
|
||||||
|
mkdir -p "$RISOTTO_IMAGE_DIR" "$RISOTTO_IMAGE_DIR/tmp/" /var/log/risotto
|
||||||
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR" "$RISOTTO_IMAGE_DIR/tmp"
|
|
||||||
mkdir -p "$RISOTTO_IMAGE_DIR"
|
|
||||||
PKG=""
|
PKG=""
|
||||||
BASE_DIR=""
|
BASE_DIR=""
|
||||||
for script in $(ls "$IMAGE_DIR_RECIPIENT_IMAGE"/preinstall/*.sh 2> /dev/null); do
|
for script in $(ls "$IMAGE_DIR_RECIPIENT_IMAGE"/preinstall/*.sh 2> /dev/null); do
|
||||||
|
@ -29,26 +27,26 @@ done
|
||||||
|
|
||||||
if [ -z "$OS_NAME" ]; then
|
if [ -z "$OS_NAME" ]; then
|
||||||
echo "NO OS NAME DEFINED"
|
echo "NO OS NAME DEFINED"
|
||||||
exit 0
|
exit 1
|
||||||
fi
|
fi
|
||||||
if [ -z "$RELEASEVER" ]; then
|
if [ -z "$RELEASEVER" ]; then
|
||||||
echo "NO RELEASEVER DEFINED"
|
echo "NO RELEASEVER DEFINED"
|
||||||
exit 0
|
exit 1
|
||||||
fi
|
fi
|
||||||
if [ -z "$INSTALL_TOOL" ]; then
|
if [ -z "$INSTALL_TOOL" ]; then
|
||||||
echo "NO INSTALL TOOL DEFINED"
|
echo "NO INSTALL TOOL DEFINED"
|
||||||
exit 0
|
exit 1
|
||||||
fi
|
fi
|
||||||
BASE_NAME="$OS_NAME-$RELEASEVER"
|
BASE_NAME="$OS_NAME-$RELEASEVER"
|
||||||
BASE_DIR="$IMAGE_BASE_RISOTTO_BASE_DIR/$BASE_NAME"
|
BASE_DIR="$IMAGE_BASE_RISOTTO_BASE_DIR/$BASE_NAME"
|
||||||
BASE_TAR="$IMAGE_BASE_RISOTTO_BASE_DIR-$BASE_NAME".tar
|
TMP_BASE_DIR="$IMAGE_BASE_RISOTTO_BASE_DIR/tmp/$BASE_NAME"
|
||||||
BASE_PKGS_FILE="$IMAGE_BASE_RISOTTO_BASE_DIR-$BASE_NAME.pkgs"
|
BASE_PKGS_FILE="$IMAGE_BASE_RISOTTO_BASE_DIR-$BASE_NAME.pkgs"
|
||||||
BASE_LOCK="$IMAGE_BASE_RISOTTO_BASE_DIR-$BASE_NAME.build"
|
BASE_LOCK="$IMAGE_BASE_RISOTTO_BASE_DIR-$BASE_NAME.build"
|
||||||
|
|
||||||
|
|
||||||
function dnf_opt_base() {
|
function dnf_opt_base() {
|
||||||
INSTALL_DIR=$1
|
INSTALL_DIR=$1
|
||||||
echo "--setopt=install_weak_deps=False --nodocs --noplugins --installroot=$INSTALL_DIR --releasever $RELEASEVER"
|
echo "--setopt=install_weak_deps=False --setopt=fastestmirror=True --nodocs --noplugins --installroot=$INSTALL_DIR --releasever $RELEASEVER"
|
||||||
}
|
}
|
||||||
|
|
||||||
function dnf_opt() {
|
function dnf_opt() {
|
||||||
|
@ -59,63 +57,59 @@ function dnf_opt() {
|
||||||
}
|
}
|
||||||
function new_package_base() {
|
function new_package_base() {
|
||||||
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
||||||
OPT=$(dnf_opt "$BASE_DIR" "$BASE_PKG")
|
OPT=$(dnf_opt "$TMP_BASE_DIR" "$BASE_PKG")
|
||||||
dnf --assumeno $OPT | grep ^" " > "$BASE_PKGS_FILE".new
|
dnf --assumeno $OPT | grep ^" " > "$BASE_PKGS_FILE".new
|
||||||
else
|
else
|
||||||
debootstrap --include="$BASE_PKG" --variant=minbase "$RELEASEVER" "$BASE_DIR" > /dev/null
|
debootstrap --include="$BASE_PKG" --variant=minbase "$RELEASEVER" "$TMP_BASE_DIR" >> /var/log/risotto/build_image.log
|
||||||
chroot "$BASE_DIR" dpkg-query -f '${binary:Package} ${source:Version}\n' -W > "$BASE_PKGS_FILE".new
|
chroot "$TMP_BASE_DIR" dpkg-query -f '${binary:Package} ${source:Version}\n' -W > "$BASE_PKGS_FILE".new
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
function install_base() {
|
function install_base() {
|
||||||
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
||||||
OPT=$(dnf_opt "$BASE_DIR" "$BASE_PKG")
|
OPT=$(dnf_opt "$TMP_BASE_DIR" "$BASE_PKG")
|
||||||
dnf --assumeyes $OPT
|
dnf --assumeyes $OPT
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
function new_package() {
|
function new_package() {
|
||||||
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
||||||
OPT=$(dnf_opt_base "$IMAGE_NAME_RISOTTO_IMAGE_DIR")
|
OPT=$(dnf_opt_base "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP")
|
||||||
dnf $OPT update
|
dnf --assumeno $OPT update >> /var/log/risotto/build_image.log
|
||||||
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR" "$PKG")
|
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" "$PKG")
|
||||||
dnf --assumeno $OPT | grep ^" " > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new
|
dnf --assumeno $OPT | grep ^" " > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new
|
||||||
else
|
else
|
||||||
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR" apt update > /dev/null 2>&1
|
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" apt update >> /var/log/risotto/build_image.log 2>&1
|
||||||
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR" apt install --no-install-recommends --yes $PKG -s 2>/dev/null|grep ^"Inst " > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new
|
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" apt install --no-install-recommends --yes $PKG -s 2>/dev/null|grep ^"Inst " > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
function install_pkg() {
|
function install_pkg() {
|
||||||
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
if [ "$INSTALL_TOOL" = "dnf" ]; then
|
||||||
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR" "$PKG")
|
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" "$PKG")
|
||||||
dnf --assumeyes $OPT
|
dnf --assumeyes $OPT
|
||||||
else
|
else
|
||||||
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR" apt install --no-install-recommends --yes $PKG
|
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" bash -c "export DEBIAN_FRONTEND=noninteractive; apt install --no-install-recommends --yes $PKG"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if [ ! -f "$BASE_LOCK" ] || [ ! -d "$BASE_DIR" ]; then
|
||||||
if [ ! -f "$BASE_LOCK" ] || [ ! -f "$BASE_TAR" ]; then
|
|
||||||
echo " - reinstallation de l'image de base"
|
echo " - reinstallation de l'image de base"
|
||||||
rm -rf "$BASE_DIR"
|
|
||||||
new_package_base
|
new_package_base
|
||||||
diff -u "$BASE_PKGS_FILE" "$BASE_PKGS_FILE".new && NEW_BASE=false || NEW_BASE=true
|
diff -u "$BASE_PKGS_FILE" "$BASE_PKGS_FILE".new && NEW_BASE=false || NEW_BASE=true
|
||||||
if [ ! -f "$BASE_TAR" ] || [ "$NEW_BASE" = true ]; then
|
if [ ! -d "$BASE_DIR" ] || [ "$NEW_BASE" = true ]; then
|
||||||
mkdir -p "$IMAGE_BASE_RISOTTO_BASE_DIR"
|
mkdir -p "$IMAGE_BASE_RISOTTO_BASE_DIR"
|
||||||
|
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP"
|
||||||
install_base
|
install_base
|
||||||
cd "$IMAGE_BASE_RISOTTO_BASE_DIR"
|
|
||||||
tar cf "$BASE_TAR" "$BASE_NAME"
|
|
||||||
cd - > /dev/null
|
|
||||||
if [ -f "$BASE_PKGS_FILE" ]; then
|
if [ -f "$BASE_PKGS_FILE" ]; then
|
||||||
mv "$BASE_PKGS_FILE" "$BASE_PKGS_FILE".old
|
mv "$BASE_PKGS_FILE" "$BASE_PKGS_FILE".old
|
||||||
fi
|
fi
|
||||||
mv "$BASE_PKGS_FILE".new "$BASE_PKGS_FILE"
|
mv "$BASE_PKGS_FILE".new "$BASE_PKGS_FILE"
|
||||||
rm -rf "$IMAGE_BASE_RISOTTO_BASE_DIR"
|
|
||||||
fi
|
|
||||||
rm -rf "$BASE_DIR"
|
rm -rf "$BASE_DIR"
|
||||||
|
mv "$TMP_BASE_DIR" "$BASE_DIR"
|
||||||
|
fi
|
||||||
touch "$BASE_LOCK"
|
touch "$BASE_LOCK"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
tar xf "$BASE_TAR"
|
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP"
|
||||||
mv "$BASE_NAME" "$IMAGE_NAME_RISOTTO_IMAGE_DIR"
|
cp --reflink=auto -a "$BASE_DIR/" "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP"
|
||||||
if [ -n "$COPR" ]; then
|
if [ -n "$COPR" ]; then
|
||||||
#FIXME signature...
|
#FIXME signature...
|
||||||
mkdir -p "$REPO_DIR"
|
mkdir -p "$REPO_DIR"
|
||||||
|
@ -124,17 +118,18 @@ if [ -n "$COPR" ]; then
|
||||||
cd - > /dev/null
|
cd - > /dev/null
|
||||||
fi
|
fi
|
||||||
if [ "$FUSION" = true ]; then
|
if [ "$FUSION" = true ]; then
|
||||||
dnf -y install "https://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$RELEASEVER.noarch.rpm" --installroot="$IMAGE_NAME_RISOTTO_IMAGE_DIR" > /dev/null
|
dnf -y install "https://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$RELEASEVER.noarch.rpm" --installroot="$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" >> /var/log/risotto/build_image.log
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# FIXME verifier s'il y a des modifs sur pre/post
|
# FIXME verifier s'il y a des modifs sur pre/post
|
||||||
if [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs ] && [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs ]; then
|
if [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs ] && [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs ]; then
|
||||||
echo " - différence(s) avec les paquets de base"
|
echo " - différence(s) avec les paquets de base"
|
||||||
diff -u "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs "$BASE_PKGS_FILE" && INSTALL=false || INSTALL=true
|
diff -u "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs "$BASE_PKGS_FILE" && INSTALL=false || INSTALL=true
|
||||||
[ ! -f "$IMAGE_NAME_RISOTTO_IMAGE_NAME" ] && INSTALL=true
|
[ ! -d "$IMAGE_NAME_RISOTTO_IMAGE_DIR" ] && INSTALL=true
|
||||||
else
|
else
|
||||||
INSTALL=true
|
INSTALL=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
new_package
|
new_package
|
||||||
if [ "$INSTALL" = false ]; then
|
if [ "$INSTALL" = false ]; then
|
||||||
echo " - différence(s) avec les paquets de l'image"
|
echo " - différence(s) avec les paquets de l'image"
|
||||||
|
@ -146,26 +141,12 @@ if [ "$INSTALL" = false ]; then
|
||||||
fi
|
fi
|
||||||
if [ "$INSTALL" = true ]; then
|
if [ "$INSTALL" = true ]; then
|
||||||
echo " - installation"
|
echo " - installation"
|
||||||
if [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR"_"$RELEASEVER".version ]; then
|
if [ -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".version ]; then
|
||||||
VERSION=$(cat "$IMAGE_NAME_RISOTTO_IMAGE_DIR"_"$RELEASEVER".version)
|
VERSION=$(cat "$IMAGE_NAME_RISOTTO_IMAGE_DIR".version)
|
||||||
else
|
else
|
||||||
VERSION=0
|
VERSION=0
|
||||||
fi
|
fi
|
||||||
mkdir "$RISOTTO_IMAGE_DIR/tmp"
|
|
||||||
ORI_DIR=$PWD
|
|
||||||
cd "$RISOTTO_IMAGE_DIR/tmp"
|
|
||||||
if [ ! "$VERSION" = 0 ] && [ -f "$IMAGE_NAME_RISOTTO_IMAGE_NAME" ]; then
|
|
||||||
tar xf "$IMAGE_NAME_RISOTTO_IMAGE_NAME"
|
|
||||||
# if [ "$INSTALL_TOOL" = "apt" ]; then
|
|
||||||
# chown _apt "$IMAGE_NAME"
|
|
||||||
# fi
|
|
||||||
# else
|
|
||||||
# mkdir "$IMAGE_NAME"
|
|
||||||
fi
|
|
||||||
#cd "$IMAGE_NAME"
|
|
||||||
make_changelog "$IMAGE_NAME" "$VERSION" "$OS_NAME" "$RELEASEVER" > "$IMAGE_NAME_RISOTTO_IMAGE_DIR"_"$RELEASEVER"_"$VERSION"_changelog.md
|
make_changelog "$IMAGE_NAME" "$VERSION" "$OS_NAME" "$RELEASEVER" > "$IMAGE_NAME_RISOTTO_IMAGE_DIR"_"$RELEASEVER"_"$VERSION"_changelog.md
|
||||||
cd $ORI_DIR
|
|
||||||
rm -rf "$RISOTTO_IMAGE_DIR/tmp"
|
|
||||||
install_pkg
|
install_pkg
|
||||||
sleep 2
|
sleep 2
|
||||||
|
|
||||||
|
@ -173,25 +154,20 @@ if [ "$INSTALL" = true ]; then
|
||||||
. "$script"
|
. "$script"
|
||||||
done
|
done
|
||||||
|
|
||||||
CONTAINER=$IMAGE_NAME make_volatile /etc
|
ROOT=$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP make_volatile /etc
|
||||||
if [ ! "$?" = 0 ]; then
|
if [ ! "$?" = 0 ]; then
|
||||||
echo "make_volatile failed"
|
echo "make_volatile failed"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
cd "$RISOTTO_IMAGE_DIR/$IMAGE_NAME"
|
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR"
|
||||||
if [ -f "$IMAGE_NAME_RISOTTO_IMAGE_NAME" ]; then
|
mv "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" "$IMAGE_NAME_RISOTTO_IMAGE_DIR"
|
||||||
mv -f "$IMAGE_NAME_RISOTTO_IMAGE_NAME" "$IMAGE_NAME_RISOTTO_IMAGE_NAME".old
|
cp --reflink=auto -f "$BASE_PKGS_FILE" "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs
|
||||||
fi
|
|
||||||
tar cf "$IMAGE_NAME_RISOTTO_IMAGE_NAME" .
|
|
||||||
sha256sum "$IMAGE_NAME_RISOTTO_IMAGE_NAME" > "$IMAGE_NAME_RISOTTO_IMAGE_NAME".sha
|
|
||||||
cd - > /dev/null
|
|
||||||
cp -f "$BASE_PKGS_FILE" "$IMAGE_NAME_RISOTTO_IMAGE_DIR".base.pkgs
|
|
||||||
mv -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs
|
mv -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs
|
||||||
mv -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".md5sum.new "$IMAGE_NAME_RISOTTO_IMAGE_DIR".md5sum
|
mv -f "$IMAGE_NAME_RISOTTO_IMAGE_DIR".md5sum.new "$IMAGE_NAME_RISOTTO_IMAGE_DIR".md5sum
|
||||||
VERSION=$((VERSION + 1))
|
VERSION=$((VERSION + 1))
|
||||||
echo "$VERSION" > "$IMAGE_NAME_RISOTTO_IMAGE_DIR"_"$RELEASEVER".version
|
echo "$VERSION" > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".version
|
||||||
fi
|
fi
|
||||||
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR"
|
rm -rf "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP"
|
||||||
|
|
||||||
echo " => OK"
|
echo " => OK"
|
||||||
exit 0
|
exit 0
|
||||||
|
|
22
ansible/sbin/compare_image
Executable file
22
ansible/sbin/compare_image
Executable file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
SRV=$1
|
||||||
|
if [ -z "$SRV" ]; then
|
||||||
|
echo "usage: $0 machine"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
dirname="/var/lib/risotto/templates/$SRV"
|
||||||
|
if [ ! -d "$dirname" ]; then
|
||||||
|
echo "cannot find $dirname"
|
||||||
|
echo "usage: $0 machine"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
cd $dirname
|
||||||
|
find -type f | while read a; do
|
||||||
|
cfile="/var/lib/machines/$SRV/usr/share/factory/$a"
|
||||||
|
if [ -f "$cfile" ]; then
|
||||||
|
diff -u "$cfile" "$a"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
cd - > /dev/null
|
0
ansible/sbin/diagnose
Normal file → Executable file
0
ansible/sbin/diagnose
Normal file → Executable file
|
@ -1,9 +1,8 @@
|
||||||
#!/bin/bash -e
|
#!/bin/bash -e
|
||||||
if [ -z $CONTAINER ]; then
|
if [ -z $ROOT]; then
|
||||||
echo "PAS DE CONTAINER"
|
echo "PAS DE ROOT"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
ROOT="/var/lib/risotto/images/$CONTAINER"
|
|
||||||
echo "$ROOT"
|
echo "$ROOT"
|
||||||
DESTDIR="$ROOT/usr/lib/tmpfiles.d"
|
DESTDIR="$ROOT/usr/lib/tmpfiles.d"
|
||||||
CONF_DST="/usr/share/factory"
|
CONF_DST="/usr/share/factory"
|
||||||
|
|
6
ansible/sbin/update_images
Normal file → Executable file
6
ansible/sbin/update_images
Normal file → Executable file
|
@ -8,13 +8,15 @@ IMAGE_BASE_RISOTTO_BASE_DIR="$RISOTTO_IMAGE_DIR/image_bases"
|
||||||
|
|
||||||
rm -f $IMAGE_BASE_RISOTTO_BASE_DIR*.build
|
rm -f $IMAGE_BASE_RISOTTO_BASE_DIR*.build
|
||||||
|
|
||||||
|
if [ -z "$1" ]; then
|
||||||
ls /var/lib/risotto/images_files/ | while read image; do
|
ls /var/lib/risotto/images_files/ | while read image; do
|
||||||
if [ -d /var/lib/risotto/images_files/"$image" ]; then
|
if [ -d /var/lib/risotto/images_files/"$image" ]; then
|
||||||
echo
|
echo
|
||||||
echo "Install image $image"
|
echo "Install image $image"
|
||||||
/usr/local/sbin/build_image "$image"
|
/usr/local/sbin/build_image "$image" || true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
fi
|
||||||
#rm -f $IMAGE_BASE_RISOTTO_BASE_DIR*.build
|
#rm -f $IMAGE_BASE_RISOTTO_BASE_DIR*.build
|
||||||
|
|
||||||
MACHINES=""
|
MACHINES=""
|
||||||
|
@ -28,7 +30,7 @@ for nspawn in $(ls /etc/systemd/nspawn/*.nspawn); do
|
||||||
IMAGE_NAME_RISOTTO_IMAGE_NAME=${content##* }
|
IMAGE_NAME_RISOTTO_IMAGE_NAME=${content##* }
|
||||||
diff -q "$IMAGE_NAME_RISOTTO_IMAGE_NAME".sha "$SHA_MACHINE" > /dev/null || (
|
diff -q "$IMAGE_NAME_RISOTTO_IMAGE_NAME".sha "$SHA_MACHINE" > /dev/null || (
|
||||||
echo "Reinstall machine $machine"
|
echo "Reinstall machine $machine"
|
||||||
machinectl stop $machine
|
machinectl stop $machine || true
|
||||||
while true; do
|
while true; do
|
||||||
machinectl status "$machine" > /dev/null 2>&1 || break
|
machinectl status "$machine" > /dev/null 2>&1 || break
|
||||||
sleep 1
|
sleep 1
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
[directories]
|
[directories]
|
||||||
dataset = '/home/gnunux/git/risotto/dataset/seed'
|
dataset = '/home/gnunux/git/risotto/dataset/seed'
|
||||||
dest = 'installations'
|
dest = 'installations'
|
||||||
|
dest_templates = 'templates'
|
||||||
|
|
28
sbin/risotto_check_certificates
Executable file
28
sbin/risotto_check_certificates
Executable file
|
@ -0,0 +1,28 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from os import walk
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
week_day = datetime.now().isocalendar().week
|
||||||
|
week_cert = f'certificate_{week_day}.crt'
|
||||||
|
|
||||||
|
|
||||||
|
for p, d, f in walk('pki/x509'):
|
||||||
|
if not d and not f:
|
||||||
|
print('empty dir, you can remove it: ', p)
|
||||||
|
if not f:
|
||||||
|
continue
|
||||||
|
if f == ['serial_number']:
|
||||||
|
continue
|
||||||
|
if not p.endswith('/ca') and not p.endswith('/server') and not p.endswith('/client'):
|
||||||
|
print('unknown directory: ', p)
|
||||||
|
continue
|
||||||
|
if week_cert in f:
|
||||||
|
continue
|
||||||
|
for ff in f:
|
||||||
|
if ff.startswith('certificate_') and ff.endswith('.crt'):
|
||||||
|
print(f'old certificat in: ', p)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print('cannot find certificat in: ', p)
|
235
sbin/risotto_display
Executable file
235
sbin/risotto_display
Executable file
|
@ -0,0 +1,235 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from asyncio import run
|
||||||
|
from tabulate import tabulate
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
from rougail.utils import normalize_family
|
||||||
|
from tiramisu.error import PropertiesOptionError
|
||||||
|
from risotto.machine import load, remove_cache, ROUGAIL_NAMESPACE
|
||||||
|
|
||||||
|
|
||||||
|
HIDE_SECRET = True
|
||||||
|
|
||||||
|
|
||||||
|
def list_to_string(lst):
|
||||||
|
if isinstance(lst, list):
|
||||||
|
return "\n".join([str(val) for val in lst])
|
||||||
|
return lst
|
||||||
|
|
||||||
|
|
||||||
|
async def get_files_subelements(type_name, element, files_subelement, files_cols):
|
||||||
|
data = {}
|
||||||
|
if not await element.option('activate').value.get():
|
||||||
|
return data
|
||||||
|
for subelement in files_subelement.values():
|
||||||
|
if subelement['type'] == 'subelement':
|
||||||
|
try:
|
||||||
|
value = list_to_string(await element.option(subelement['key']).value.get())
|
||||||
|
# FIXME except AttributeError:
|
||||||
|
except Exception:
|
||||||
|
value = ''
|
||||||
|
elif subelement['type'] == 'information':
|
||||||
|
value = await element.information.get(subelement['key'], '')
|
||||||
|
elif subelement['type'] == 'none':
|
||||||
|
value = subelement['value']
|
||||||
|
else:
|
||||||
|
raise Exception('unknown subelement')
|
||||||
|
if value != '':
|
||||||
|
files_cols.add(subelement['key'])
|
||||||
|
data[subelement['key']] = value
|
||||||
|
if type_name == 'overrides':
|
||||||
|
data['name'] = f'/systemd/system/{data["source"]}.d/rougail.conf'
|
||||||
|
if not data['engine']:
|
||||||
|
data['engine'] = 'none'
|
||||||
|
elif not data['engine']:
|
||||||
|
data['engine'] = 'cheetah'
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
async def services(config, values):
|
||||||
|
files_subelement = {'Source': {'key': 'source', 'type': 'information'},
|
||||||
|
'Nom': {'key': 'name', 'type': 'subelement'},
|
||||||
|
'Variable': {'key': 'variable', 'type': 'subelement'},
|
||||||
|
'Propriétaire': {'key': 'owner', 'type': 'subelement'},
|
||||||
|
'Groupe': {'key': 'group', 'type': 'subelement'},
|
||||||
|
'Mode': {'key': 'mode', 'type': 'subelement'},
|
||||||
|
'Moteur': {'key': 'engine', 'type': 'information'},
|
||||||
|
}
|
||||||
|
disabled_services = []
|
||||||
|
for service in await config.option.list(type="all"):
|
||||||
|
doc = await service.option.doc()
|
||||||
|
files_lst = []
|
||||||
|
files_cols = set()
|
||||||
|
if not await service.option('manage').value.get():
|
||||||
|
doc += " - unmanaged"
|
||||||
|
if not await service.option('activate').value.get():
|
||||||
|
disabled_services.append([doc])
|
||||||
|
else:
|
||||||
|
for type in await service.list(type="all"):
|
||||||
|
type_name = await type.option.doc()
|
||||||
|
if type_name in ['files', 'overrides']:
|
||||||
|
for element in await type.list(type="all"):
|
||||||
|
data = await get_files_subelements(type_name, element, files_subelement, files_cols)
|
||||||
|
if data:
|
||||||
|
files_lst.append(data)
|
||||||
|
elif type_name == 'manage':
|
||||||
|
pass
|
||||||
|
elif type_name == 'activate':
|
||||||
|
if not await type.value.get():
|
||||||
|
doc += " - unactivated"
|
||||||
|
else:
|
||||||
|
print("FIXME " + type_name)
|
||||||
|
if files_lst:
|
||||||
|
keys = [key for key, val in files_subelement.items() if val['key'] in files_cols]
|
||||||
|
values[doc] = {'keys': keys, 'lst': []}
|
||||||
|
for lst in files_lst:
|
||||||
|
values[doc]['lst'].append([val for key, val in lst.items() if key in files_cols])
|
||||||
|
if disabled_services:
|
||||||
|
values["Services désactivés"] = {'keys': ['Nom'], 'lst': disabled_services}
|
||||||
|
|
||||||
|
|
||||||
|
async def table_leader(config, read_only):
|
||||||
|
keys = ['Description']
|
||||||
|
if read_only:
|
||||||
|
keys.append('Cachée')
|
||||||
|
leadership_lst = await config.list(type="all")
|
||||||
|
leader = leadership_lst.pop(0)
|
||||||
|
leader_owner = await leader.owner.get()
|
||||||
|
follower_names = [await follower.option.name() for follower in leadership_lst]
|
||||||
|
doc = await leader.option.doc()
|
||||||
|
properties = await leader.property.get()
|
||||||
|
if 'mandatory' in properties:
|
||||||
|
doc += '*'
|
||||||
|
name = await leader.option.name()
|
||||||
|
lst = [[f'{doc} ({name})']]
|
||||||
|
if read_only:
|
||||||
|
if 'hidden' in properties:
|
||||||
|
hidden = 'oui'
|
||||||
|
else:
|
||||||
|
hidden = ''
|
||||||
|
lst[0].append(hidden)
|
||||||
|
for idx, leader_value in enumerate(await leader.value.get()):
|
||||||
|
keys.append(f'Valeur {idx}')
|
||||||
|
keys.append(f'Utilisateur {idx}')
|
||||||
|
lst[0].append(leader_value)
|
||||||
|
lst[0].append(leader_owner)
|
||||||
|
for follower_idx, follower_name in enumerate(follower_names):
|
||||||
|
follower_option = config.option(follower_name, idx)
|
||||||
|
if idx == 0:
|
||||||
|
doc = await follower_option.option.doc()
|
||||||
|
properties = await follower_option.property.get()
|
||||||
|
if 'mandatory' in properties:
|
||||||
|
doc += '*'
|
||||||
|
name = await follower_option.option.name()
|
||||||
|
lst.append([f'{doc} ({name})'])
|
||||||
|
if read_only:
|
||||||
|
if 'hidden' in properties:
|
||||||
|
hidden = 'oui'
|
||||||
|
else:
|
||||||
|
hidden = ''
|
||||||
|
lst[-1].append(hidden)
|
||||||
|
try:
|
||||||
|
lst[follower_idx + 1].append(list_to_string(await follower_option.value.get()))
|
||||||
|
lst[follower_idx + 1].append(await follower_option.owner.get())
|
||||||
|
except PropertiesOptionError:
|
||||||
|
pass
|
||||||
|
# leader = next leader_iter
|
||||||
|
# if master_values is None:
|
||||||
|
# master_values = await subconfig.value.get()
|
||||||
|
return {'keys': keys, 'lst': lst}
|
||||||
|
|
||||||
|
|
||||||
|
async def table(config, prefix_len, values, read_only):
|
||||||
|
lst = []
|
||||||
|
for subconfig in await config.option.list(type="all"):
|
||||||
|
# prefix = prefix_len * 2 * ' '
|
||||||
|
# if await subconfig.option.isoptiondescription():
|
||||||
|
# prefix += '=>'
|
||||||
|
# else:
|
||||||
|
# prefix += '-'
|
||||||
|
# display_str = f'{prefix} {description}'
|
||||||
|
# if name != description:
|
||||||
|
# display_str = f'{display_str} ({name})'
|
||||||
|
name = await subconfig.option.name()
|
||||||
|
doc = await subconfig.option.doc()
|
||||||
|
if prefix_len == 0 and ROUGAIL_NAMESPACE != name:
|
||||||
|
doc = doc.capitalize()
|
||||||
|
if prefix_len == 0 and name == 'services':
|
||||||
|
values['Services'] = {}
|
||||||
|
await services(subconfig, values['Services'])
|
||||||
|
elif await subconfig.option.isoptiondescription():
|
||||||
|
od_name = f'{doc} ({(await subconfig.option.path()).split(".", 1)[1]})'
|
||||||
|
values[od_name] = None
|
||||||
|
if await subconfig.option.isleadership():
|
||||||
|
values[od_name] = await table_leader(subconfig, read_only)
|
||||||
|
else:
|
||||||
|
values[od_name] = await table(subconfig, prefix_len + 1, values, read_only)
|
||||||
|
else:
|
||||||
|
value = list_to_string(await subconfig.value.get())
|
||||||
|
doc = await subconfig.option.doc()
|
||||||
|
properties = await subconfig.property.get()
|
||||||
|
if 'mandatory' in properties:
|
||||||
|
doc += '*'
|
||||||
|
name = await subconfig.option.name()
|
||||||
|
lst.append([f'{doc} ({name})', value])
|
||||||
|
if read_only:
|
||||||
|
if 'hidden' in properties:
|
||||||
|
hidden = 'oui'
|
||||||
|
else:
|
||||||
|
hidden = ''
|
||||||
|
lst[-1].append(hidden)
|
||||||
|
lst[-1].append(await subconfig.owner.get())
|
||||||
|
keys = ['Description', 'Valeur']
|
||||||
|
if read_only:
|
||||||
|
keys.append('Cachée')
|
||||||
|
keys.append('Utilisateur')
|
||||||
|
return {'keys': keys, 'lst': lst}
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument('server_name')
|
||||||
|
parser.add_argument('--read_only', action='store_true')
|
||||||
|
parser.add_argument('--nocache', action='store_true')
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.nocache:
|
||||||
|
remove_cache()
|
||||||
|
|
||||||
|
values = {}
|
||||||
|
server_name = args.server_name
|
||||||
|
config = await load(hide_secret=HIDE_SECRET,
|
||||||
|
original_display_name=True,
|
||||||
|
valid_mandatories=args.read_only,
|
||||||
|
)
|
||||||
|
if not args.read_only:
|
||||||
|
await config.property.read_write()
|
||||||
|
root_option = config.option(normalize_family(server_name))
|
||||||
|
try:
|
||||||
|
await root_option.option.get()
|
||||||
|
except AttributeError:
|
||||||
|
exit(f'Unable to find {server_name} configuration: {[await o.option.name() for o in await config.option.list(type="optiondescription")]}')
|
||||||
|
await table(root_option, 0, values, args.read_only)
|
||||||
|
for title, dico in values.items():
|
||||||
|
if title == 'Services':
|
||||||
|
if not dico:
|
||||||
|
continue
|
||||||
|
print()
|
||||||
|
print(title)
|
||||||
|
print('=' * len(title))
|
||||||
|
print()
|
||||||
|
for subtitle, dic in dico.items():
|
||||||
|
print()
|
||||||
|
print(' ' + subtitle)
|
||||||
|
print(' ' + '-' * len(subtitle))
|
||||||
|
print()
|
||||||
|
print(tabulate(dic['lst'], headers=dic['keys'], tablefmt="fancy_grid"))
|
||||||
|
elif dico['lst']:
|
||||||
|
print()
|
||||||
|
print(title)
|
||||||
|
print('=' * len(title))
|
||||||
|
print()
|
||||||
|
print(tabulate(dico['lst'], headers=dico['keys'], tablefmt="fancy_grid"))
|
||||||
|
|
||||||
|
|
||||||
|
run(main())
|
31
sbin/risotto_templates
Executable file
31
sbin/risotto_templates
Executable file
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from asyncio import run
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from traceback import print_exc
|
||||||
|
|
||||||
|
from risotto.machine import templates, remove_cache, load, INSTALL_DIR
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument('server_name')
|
||||||
|
parser.add_argument('--nocache', action='store_true')
|
||||||
|
parser.add_argument('--debug', action='store_true')
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.nocache:
|
||||||
|
remove_cache()
|
||||||
|
|
||||||
|
config = await load()
|
||||||
|
try:
|
||||||
|
await templates(args.server_name,
|
||||||
|
config,
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
if args.debug:
|
||||||
|
print_exc()
|
||||||
|
exit(err)
|
||||||
|
print(f'templates generated in {INSTALL_DIR} directory')
|
||||||
|
|
||||||
|
|
||||||
|
run(main())
|
|
@ -1,41 +1,46 @@
|
||||||
from shutil import copy2, copytree, rmtree
|
from shutil import copy2, copytree
|
||||||
from os import listdir, makedirs
|
from os import listdir, makedirs
|
||||||
from os.path import join, isdir, isfile, dirname
|
from os.path import join, isdir, isfile, dirname
|
||||||
from yaml import load as yaml_load, SafeLoader
|
from yaml import load as yaml_load, SafeLoader
|
||||||
from json import load as json_load
|
|
||||||
#
|
#
|
||||||
from rougail import RougailConfig # , RougailConvert
|
from .utils import RISOTTO_CONFIG
|
||||||
#
|
|
||||||
from .utils import RISOTTO_CONFIG, SERVERS, MULTI_FUNCTIONS
|
|
||||||
|
|
||||||
|
|
||||||
FUNCTIONS_FILE = 'funcs.py'
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleCfg():
|
class ModuleCfg():
|
||||||
def __init__(self, module_name):
|
def __init__(self, module_name):
|
||||||
self.module_name = module_name
|
self.module_name = module_name
|
||||||
self.dictionaries_dir = []
|
self.dictionaries_dir = []
|
||||||
self.modules = []
|
self.functions_file = []
|
||||||
self.functions_file = [FUNCTIONS_FILE]
|
|
||||||
self.templates_dir = []
|
self.templates_dir = []
|
||||||
|
self.patches_dir = []
|
||||||
self.extra_dictionaries = {}
|
self.extra_dictionaries = {}
|
||||||
self.servers = []
|
self.servers = []
|
||||||
self.depends = []
|
self.depends = []
|
||||||
|
self.manuals = []
|
||||||
|
self.tests = []
|
||||||
|
self.providers = []
|
||||||
|
self.suppliers = []
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(vars(self))
|
return str(vars(self))
|
||||||
|
|
||||||
|
|
||||||
def list_applications() -> dict:
|
class Applications:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.datasets = RISOTTO_CONFIG.get('directories', {}).get('datasets', ['dataset'])
|
||||||
|
self.application_directories = self._load_application_directories()
|
||||||
|
|
||||||
|
def _load_application_directories(self) -> dict:
|
||||||
|
"""List all service applications in datasets
|
||||||
|
Returns something link:
|
||||||
|
{<applicationservice>: seed/<applicationservice>}
|
||||||
"""
|
"""
|
||||||
{<applicationservice>: seed/<applicationservice>
|
|
||||||
"""
|
|
||||||
dataset_directory = RISOTTO_CONFIG['directories']['dataset']
|
|
||||||
applications = {}
|
applications = {}
|
||||||
|
for dataset_directory in self.datasets:
|
||||||
for applicationservice in listdir(dataset_directory):
|
for applicationservice in listdir(dataset_directory):
|
||||||
applicationservice_dir = join(dataset_directory, applicationservice)
|
applicationservice_dir = join(dataset_directory, applicationservice)
|
||||||
if not isdir(applicationservice_dir):
|
if not isdir(applicationservice_dir) or \
|
||||||
|
not isfile(join(applicationservice_dir, 'applicationservice.yml')):
|
||||||
continue
|
continue
|
||||||
if applicationservice in applications:
|
if applicationservice in applications:
|
||||||
raise Exception(f'multi applicationservice: {applicationservice} ({applicationservice_dir} <=> {applications[applicationservice]})')
|
raise Exception(f'multi applicationservice: {applicationservice} ({applicationservice_dir} <=> {applications[applicationservice]})')
|
||||||
|
@ -43,39 +48,110 @@ def list_applications() -> dict:
|
||||||
return applications
|
return applications
|
||||||
|
|
||||||
|
|
||||||
def applicationservice_copy(src_file: str,
|
class Modules:
|
||||||
dst_file: str,
|
"""Modules are defined by the end user
|
||||||
copy_if_not_exists: bool,
|
A module is the a list of service applications
|
||||||
|
The class collects all the useful information for the module
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
host_applicationsservices: list,
|
||||||
|
applicationservices: Applications,
|
||||||
|
applicationservice_provider: str,
|
||||||
|
modules_name: list,
|
||||||
|
modules: dict,
|
||||||
) -> None:
|
) -> None:
|
||||||
if isdir(src_file):
|
self.application_directories = applicationservices.application_directories
|
||||||
if not isdir(dst_file):
|
self.module_infos = {}
|
||||||
makedirs(dst_file)
|
self.module_infos['host'] = self._load_module_informations('host',
|
||||||
for subfilename in listdir(src_file):
|
host_applicationsservices,
|
||||||
if not copy_if_not_exists or not isfile(dst_file):
|
is_host=True,
|
||||||
src = join(src_file, subfilename)
|
)
|
||||||
dst = join(dst_file, subfilename)
|
for module_name in modules_name:
|
||||||
if isfile(src):
|
if modules_name == 'host':
|
||||||
copy2(src, dst)
|
raise Exception('forbidden module name: "host"')
|
||||||
else:
|
self.module_infos[module_name] = self._load_module_informations(module_name,
|
||||||
copytree(src, dst)
|
[applicationservice_provider] + modules[module_name],
|
||||||
elif not copy_if_not_exists or not isfile(dst_file):
|
is_host=False,
|
||||||
dst = dirname(dst_file)
|
)
|
||||||
if not isdir(dst):
|
|
||||||
makedirs(dst)
|
|
||||||
if isfile(src_file):
|
|
||||||
copy2(src_file, dst_file)
|
|
||||||
else:
|
|
||||||
copytree(src_file, dst_file)
|
|
||||||
|
|
||||||
|
def get(self,
|
||||||
|
module_name: str,
|
||||||
|
) -> ModuleCfg:
|
||||||
|
return self.module_infos[module_name]
|
||||||
|
|
||||||
def load_applicationservice_cfg(appname: str,
|
def _load_module_informations(self,
|
||||||
as_dir: str,
|
module_name: str,
|
||||||
install_dir: str,
|
applicationservices: list,
|
||||||
|
is_host: bool,
|
||||||
|
) -> ModuleCfg:
|
||||||
|
"""Create a ModuleCfg object and collect informations
|
||||||
|
A module must depend to an unique distribution
|
||||||
|
"""
|
||||||
|
cfg = ModuleCfg(module_name)
|
||||||
|
distribution = None
|
||||||
|
|
||||||
|
for applicationservice in applicationservices:
|
||||||
|
ret = self._load_applicationservice(applicationservice,
|
||||||
|
cfg,
|
||||||
|
)
|
||||||
|
if ret:
|
||||||
|
if distribution:
|
||||||
|
raise Exception(f'duplicate distribution for {cfg.module_name}: {distribution} and {ret} (dependencies: {cfg.depends}) ')
|
||||||
|
distribution = ret
|
||||||
|
if not is_host and not distribution:
|
||||||
|
raise Exception(f'cannot found any linux distribution for {module_name}')
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def _load_applicationservice(self,
|
||||||
|
appname: str,
|
||||||
|
cfg: ModuleCfg,
|
||||||
|
) -> str:
|
||||||
|
"""extract informations from an application service and load it's dependency
|
||||||
|
informations collected is store to the module
|
||||||
|
|
||||||
|
returns the name of current distribution, if found
|
||||||
|
"""
|
||||||
|
if appname not in self.application_directories:
|
||||||
|
raise Exception(f'cannot find application dependency "{appname}"')
|
||||||
|
cfg.depends.append(appname)
|
||||||
|
as_dir = self.application_directories[appname]
|
||||||
|
self._load_applicationservice_directories(as_dir,
|
||||||
|
cfg,
|
||||||
|
)
|
||||||
|
with open(join(as_dir, 'applicationservice.yml')) as yaml:
|
||||||
|
app = yaml_load(yaml, Loader=SafeLoader)
|
||||||
|
provider = app.get('provider')
|
||||||
|
if provider:
|
||||||
|
cfg.providers.setdefault(provider, [])
|
||||||
|
if appname not in cfg.providers[provider]:
|
||||||
|
cfg.providers[provider].append(appname)
|
||||||
|
supplier = app.get('supplier')
|
||||||
|
if supplier:
|
||||||
|
self.suppliers.setdefault(supplier, [])
|
||||||
|
if appname not in self.suppliers[supplier]:
|
||||||
|
self.suppliers[supplier].append(appname)
|
||||||
|
if 'distribution' in app and app['distribution']:
|
||||||
|
distribution = appname
|
||||||
|
else:
|
||||||
|
distribution = None
|
||||||
|
for depend in app.get('depends', []):
|
||||||
|
if depend in cfg.depends:
|
||||||
|
#this dependancy is already loaded for this module
|
||||||
|
continue
|
||||||
|
ret = self._load_applicationservice(depend,
|
||||||
|
cfg,
|
||||||
|
)
|
||||||
|
if ret:
|
||||||
|
if distribution:
|
||||||
|
raise Exception(f'duplicate distribution for {cfg.module_name}: {distribution} and {ret} (dependencies: {cfg.depends}) ')
|
||||||
|
distribution = ret
|
||||||
|
return distribution
|
||||||
|
|
||||||
|
def _load_applicationservice_directories(self,
|
||||||
|
as_dir: str,
|
||||||
cfg: ModuleCfg,
|
cfg: ModuleCfg,
|
||||||
copy_manual_dir: bool,
|
|
||||||
copy_tests: bool,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
cfg.modules.append(appname)
|
|
||||||
# dictionaries
|
# dictionaries
|
||||||
dictionaries_dir = join(as_dir, 'dictionaries')
|
dictionaries_dir = join(as_dir, 'dictionaries')
|
||||||
if isdir(dictionaries_dir):
|
if isdir(dictionaries_dir):
|
||||||
|
@ -91,6 +167,10 @@ def load_applicationservice_cfg(appname: str,
|
||||||
templates_dir = join(as_dir, 'templates')
|
templates_dir = join(as_dir, 'templates')
|
||||||
if isdir(templates_dir):
|
if isdir(templates_dir):
|
||||||
cfg.templates_dir.append(templates_dir)
|
cfg.templates_dir.append(templates_dir)
|
||||||
|
# patches
|
||||||
|
patches_dir = join(as_dir, 'patches')
|
||||||
|
if isdir(patches_dir):
|
||||||
|
cfg.patches_dir.append(patches_dir)
|
||||||
# extras
|
# extras
|
||||||
extras_dir = join(as_dir, 'extras')
|
extras_dir = join(as_dir, 'extras')
|
||||||
if isdir(extras_dir):
|
if isdir(extras_dir):
|
||||||
|
@ -98,122 +178,40 @@ def load_applicationservice_cfg(appname: str,
|
||||||
extra_dir = join(extras_dir, extra)
|
extra_dir = join(extras_dir, extra)
|
||||||
if isdir(extra_dir):
|
if isdir(extra_dir):
|
||||||
cfg.extra_dictionaries.setdefault(extra, []).append(extra_dir)
|
cfg.extra_dictionaries.setdefault(extra, []).append(extra_dir)
|
||||||
if copy_manual_dir:
|
|
||||||
# manual
|
# manual
|
||||||
for type in ['image', 'install']:
|
for type in ['image', 'install']:
|
||||||
manual_dir = join(as_dir, 'manual', type)
|
manual_dir = join(as_dir, 'manual')
|
||||||
if not isdir(manual_dir):
|
if isdir(join(manual_dir, type)):
|
||||||
continue
|
cfg.manuals.append(manual_dir)
|
||||||
for filename in listdir(manual_dir):
|
break
|
||||||
src_file = join(manual_dir, filename)
|
# tests
|
||||||
if type == 'image':
|
|
||||||
dst_file = join(install_dir, 'manual', filename)
|
|
||||||
copy_if_not_exists = False
|
|
||||||
else:
|
|
||||||
dst_file= join(install_dir, '..', filename)
|
|
||||||
copy_if_not_exists = True
|
|
||||||
applicationservice_copy(src_file,
|
|
||||||
dst_file,
|
|
||||||
copy_if_not_exists,
|
|
||||||
)
|
|
||||||
if copy_tests:
|
|
||||||
tests_dir = join(as_dir, 'tests')
|
tests_dir = join(as_dir, 'tests')
|
||||||
if isdir(tests_dir):
|
if isdir(tests_dir):
|
||||||
for filename in listdir(tests_dir):
|
cfg.tests.append(tests_dir)
|
||||||
src_file = join(tests_dir, filename)
|
|
||||||
dst_file = join(install_dir, 'tests', filename)
|
|
||||||
applicationservice_copy(src_file,
|
|
||||||
dst_file,
|
|
||||||
False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def load_applicationservice(appname: str,
|
def applicationservice_copy(src_file: str,
|
||||||
install_dir: str,
|
dst_file: str,
|
||||||
cfg: ModuleCfg,
|
|
||||||
applications: dict,
|
|
||||||
copy_manual_dir: bool,
|
|
||||||
copy_tests: bool,
|
|
||||||
providers: dict,
|
|
||||||
suppliers: dict,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
if appname not in applications:
|
if isdir(src_file):
|
||||||
raise Exception(f'cannot find application dependency "{appname}"')
|
if not isdir(dst_file):
|
||||||
as_dir = applications[appname]
|
makedirs(dst_file)
|
||||||
applicationservice_file = join(as_dir, 'applicationservice.yml')
|
for subfilename in listdir(src_file):
|
||||||
if not isfile(applicationservice_file):
|
#if not copy_if_not_exists or not isfile(dst_file):
|
||||||
raise Exception(f'cannot find application service file "{applicationservice_file}"')
|
src = join(src_file, subfilename)
|
||||||
load_applicationservice_cfg(appname,
|
dst = join(dst_file, subfilename)
|
||||||
as_dir,
|
if isfile(src):
|
||||||
install_dir,
|
copy2(src, dst)
|
||||||
cfg,
|
|
||||||
copy_manual_dir,
|
|
||||||
copy_tests,
|
|
||||||
)
|
|
||||||
cfg.depends.append(appname)
|
|
||||||
with open(applicationservice_file) as yaml:
|
|
||||||
app = yaml_load(yaml, Loader=SafeLoader)
|
|
||||||
provider = app.get('provider')
|
|
||||||
if provider:
|
|
||||||
providers.setdefault(provider, [])
|
|
||||||
if appname not in providers[provider]:
|
|
||||||
providers[provider].append(appname)
|
|
||||||
supplier = app.get('supplier')
|
|
||||||
if supplier:
|
|
||||||
suppliers.setdefault(supplier, [])
|
|
||||||
if appname not in suppliers[supplier]:
|
|
||||||
suppliers[supplier].append(appname)
|
|
||||||
if 'distribution' in app and app['distribution']:
|
|
||||||
distribution = appname
|
|
||||||
else:
|
else:
|
||||||
distribution = None
|
copytree(src, dst)
|
||||||
for xml in app.get('depends', []):
|
else:
|
||||||
if xml in cfg.depends:
|
dst = dirname(dst_file)
|
||||||
continue
|
if not isdir(dst):
|
||||||
ret = load_applicationservice(xml,
|
makedirs(dst)
|
||||||
install_dir,
|
if isfile(src_file):
|
||||||
cfg,
|
copy2(src_file, dst_file)
|
||||||
applications,
|
else:
|
||||||
copy_manual_dir,
|
copytree(src_file, dst_file)
|
||||||
copy_tests,
|
|
||||||
providers,
|
|
||||||
suppliers,
|
|
||||||
)
|
|
||||||
if ret:
|
|
||||||
if distribution:
|
|
||||||
raise Exception(f'duplicate distribution for {cfg.module_name}: {distribution} and {ret} (dependencies: {cfg.depends}) ')
|
|
||||||
distribution = ret
|
|
||||||
return distribution
|
|
||||||
|
|
||||||
|
|
||||||
def load_image_informations(module_name: str,
|
|
||||||
install_dir: str,
|
|
||||||
datas: dict,
|
|
||||||
applications: dict,
|
|
||||||
copy_manual_dir: bool,
|
|
||||||
copy_tests: bool,
|
|
||||||
providers: dict,
|
|
||||||
suppliers: dict,
|
|
||||||
) -> ModuleCfg:
|
|
||||||
cfg = ModuleCfg(module_name)
|
|
||||||
distribution = None
|
|
||||||
for applicationservice in datas['applicationservices']:
|
|
||||||
ret = load_applicationservice(applicationservice,
|
|
||||||
install_dir,
|
|
||||||
cfg,
|
|
||||||
applications,
|
|
||||||
copy_manual_dir,
|
|
||||||
copy_tests,
|
|
||||||
providers,
|
|
||||||
suppliers,
|
|
||||||
)
|
|
||||||
if ret:
|
|
||||||
if distribution:
|
|
||||||
raise Exception(f'duplicate distribution for {cfg.module_name}: {distribution} and {ret} (dependencies: {cfg.depends}) ')
|
|
||||||
distribution = ret
|
|
||||||
if module_name != 'host' and not distribution:
|
|
||||||
raise Exception(f'cannot found any linux distribution for {module_name}')
|
|
||||||
return cfg
|
|
||||||
|
|
||||||
|
|
||||||
async def valid_mandatories(config):
|
async def valid_mandatories(config):
|
||||||
|
@ -232,60 +230,3 @@ async def valid_mandatories(config):
|
||||||
# await value_pprint(await config.value.dict(), config)
|
# await value_pprint(await config.value.dict(), config)
|
||||||
exit(1)
|
exit(1)
|
||||||
#raise Exception('configuration has mandatories variables without values')
|
#raise Exception('configuration has mandatories variables without values')
|
||||||
|
|
||||||
|
|
||||||
def load_config(copy_manual_dir=False,
|
|
||||||
copy_tests=False,
|
|
||||||
clean_directories=False,
|
|
||||||
):
|
|
||||||
module_infos = {}
|
|
||||||
applications = list_applications()
|
|
||||||
with open('servers.json', 'r') as server_fh:
|
|
||||||
jsonfile = json_load(server_fh)
|
|
||||||
SERVERS.update(jsonfile['servers'])
|
|
||||||
modules = jsonfile['modules']
|
|
||||||
for module_name, datas in modules.items():
|
|
||||||
providers = {}
|
|
||||||
suppliers = {}
|
|
||||||
install_dir = join(RISOTTO_CONFIG['directories']['dest'], module_name)
|
|
||||||
if clean_directories:
|
|
||||||
if isdir(install_dir):
|
|
||||||
rmtree(install_dir)
|
|
||||||
makedirs(install_dir)
|
|
||||||
module_infos[module_name] = {'infos': load_image_informations(module_name,
|
|
||||||
install_dir,
|
|
||||||
datas,
|
|
||||||
applications,
|
|
||||||
copy_manual_dir,
|
|
||||||
copy_tests,
|
|
||||||
providers,
|
|
||||||
suppliers,
|
|
||||||
),
|
|
||||||
'providers': providers,
|
|
||||||
'suppliers': suppliers,
|
|
||||||
'install_dir': install_dir,
|
|
||||||
}
|
|
||||||
return module_infos
|
|
||||||
#
|
|
||||||
#
|
|
||||||
def load_module_config(module_name: str,
|
|
||||||
module_info: dict,
|
|
||||||
):
|
|
||||||
cfg = RougailConfig.copy()
|
|
||||||
cfg['variable_namespace'] = ROUGAIL_NAMESPACE
|
|
||||||
cfg['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
|
|
||||||
if module_name == 'host':
|
|
||||||
#FIXME server_name == host ?
|
|
||||||
#FIXME cfg['tmpfile_dest_dir'] = datas['values'][f'{ROUGAIL_NAMESPACE}.host_install_dir'] + '/host/configurations/host'
|
|
||||||
cfg['default_systemd_directory'] = '/usr/local/lib/systemd'
|
|
||||||
cfg['templates_dir'] = module_info['infos'].templates_dir
|
|
||||||
cfg['dictionaries_dir'] = module_info['infos'].dictionaries_dir
|
|
||||||
cfg['functions_file'] = module_info['infos'].functions_file
|
|
||||||
cfg['multi_functions'] = MULTI_FUNCTIONS
|
|
||||||
cfg['extra_dictionaries'] = module_info['infos'].extra_dictionaries
|
|
||||||
cfg['extra_annotators'] = ['risotto.rougail']
|
|
||||||
cfg['internal_functions'] = list(FUNCTIONS.keys())
|
|
||||||
cfg['force_convert_dyn_option_description'] = True
|
|
||||||
cfg['module_name'] = module_name
|
|
||||||
#cfg['patches_dir'] = join(test_dir, 'patches')
|
|
||||||
return cfg
|
|
||||||
|
|
|
@ -1,16 +1,19 @@
|
||||||
from .utils import SERVERS, SERVERS_JSON, MULTI_FUNCTIONS, load_domains
|
from .utils import MULTI_FUNCTIONS, load_zones, value_pprint, RISOTTO_CONFIG
|
||||||
from .image import load_config, valid_mandatories # , load_modules_rougail_config
|
from .image import Applications, Modules, valid_mandatories, applicationservice_copy
|
||||||
from rougail import RougailConfig, RougailConvert
|
|
||||||
from .rougail.annotator import calc_providers, calc_providers_global, calc_providers_dynamic, calc_providers_dynamic_follower, calc_providers_follower
|
from .rougail.annotator import calc_providers, calc_providers_global, calc_providers_dynamic, calc_providers_dynamic_follower, calc_providers_follower
|
||||||
from os.path import isfile
|
|
||||||
|
from rougail import RougailConfig, RougailConvert
|
||||||
|
from os import remove, makedirs, listdir
|
||||||
|
from os.path import isfile, isdir, abspath
|
||||||
from json import dump as json_dump, load as json_load
|
from json import dump as json_dump, load as json_load
|
||||||
|
from yaml import load as yaml_load, SafeLoader
|
||||||
#
|
#
|
||||||
from tiramisu import Config
|
from tiramisu import Config, valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal, calc_value
|
||||||
from .utils import value_pprint
|
|
||||||
from rougail.utils import normalize_family
|
from rougail.utils import normalize_family
|
||||||
from rougail import RougailSystemdTemplate
|
from rougail import RougailSystemdTemplate
|
||||||
#
|
from shutil import rmtree
|
||||||
#
|
|
||||||
|
|
||||||
def tiramisu_display_name(kls,
|
def tiramisu_display_name(kls,
|
||||||
dyn_name: 'Base'=None,
|
dyn_name: 'Base'=None,
|
||||||
suffix: str=None,
|
suffix: str=None,
|
||||||
|
@ -23,49 +26,77 @@ def tiramisu_display_name(kls,
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
async def set_values(server_name, config, datas):
|
CONFIG_FILE = 'servers.yml'
|
||||||
if 'values' not in datas:
|
|
||||||
return
|
|
||||||
server_path = normalize_family(server_name)
|
|
||||||
for vpath, value in datas['values'].items():
|
|
||||||
path = f'{server_path}.{vpath}'
|
|
||||||
try:
|
|
||||||
if isinstance(value, dict):
|
|
||||||
for idx, val in value.items():
|
|
||||||
await config.option(path, int(idx)).value.set(val)
|
|
||||||
else:
|
|
||||||
await config.option(path).value.set(value)
|
|
||||||
except Exception as err:
|
|
||||||
await value_pprint(await config.value.dict(), config)
|
|
||||||
error_msg = f'cannot configure variable {vpath} for server "{server_name}": {err}'
|
|
||||||
raise Exception(error_msg) from err
|
|
||||||
|
|
||||||
|
|
||||||
def get_ip_from_domain(domain):
|
|
||||||
if not domain:
|
|
||||||
return
|
|
||||||
hostname, domainname = domain.split('.', 1)
|
|
||||||
return DOMAINS[domainname][1][DOMAINS[domainname][0].index(hostname)]
|
|
||||||
return optiondescription['option_0']
|
|
||||||
|
|
||||||
|
|
||||||
ROUGAIL_NAMESPACE = 'general'
|
ROUGAIL_NAMESPACE = 'general'
|
||||||
ROUGAIL_NAMESPACE_DESCRIPTION = 'Général'
|
ROUGAIL_NAMESPACE_DESCRIPTION = 'Général'
|
||||||
FUNCTIONS = {'get_ip_from_domain': get_ip_from_domain,
|
TIRAMISU_CACHE = 'tiramisu_cache.py'
|
||||||
'calc_providers': calc_providers,
|
VALUES_CACHE = 'values_cache.json'
|
||||||
|
INFORMATIONS_CACHE = 'informations_cache.json'
|
||||||
|
INSTALL_DIR = RISOTTO_CONFIG['directories']['dest']
|
||||||
|
INSTALL_TEMPLATES_DIR = RISOTTO_CONFIG['directories']['dest_templates']
|
||||||
|
FUNCTIONS = {'calc_providers': calc_providers,
|
||||||
'calc_providers_global': calc_providers_global,
|
'calc_providers_global': calc_providers_global,
|
||||||
'calc_providers_dynamic': calc_providers_dynamic,
|
'calc_providers_dynamic': calc_providers_dynamic,
|
||||||
'calc_providers_dynamic_follower': calc_providers_dynamic_follower,
|
'calc_providers_dynamic_follower': calc_providers_dynamic_follower,
|
||||||
'calc_providers_follower': calc_providers_follower,
|
'calc_providers_follower': calc_providers_follower,
|
||||||
|
'valid_network_netmask': valid_network_netmask,
|
||||||
|
'valid_ip_netmask': valid_ip_netmask,
|
||||||
|
'valid_broadcast': valid_broadcast,
|
||||||
|
'valid_in_network': valid_in_network,
|
||||||
|
'valid_not_equal': valid_not_equal,
|
||||||
|
'calc_value': calc_value,
|
||||||
|
'normalize_family': normalize_family,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def re_create(dirname):
|
||||||
|
if isdir(dirname):
|
||||||
|
rmtree(dirname)
|
||||||
|
makedirs(dirname)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_cache():
|
||||||
|
if isfile(TIRAMISU_CACHE):
|
||||||
|
remove(TIRAMISU_CACHE)
|
||||||
|
if isfile(VALUES_CACHE):
|
||||||
|
remove(VALUES_CACHE)
|
||||||
|
if isfile(INFORMATIONS_CACHE):
|
||||||
|
remove(INFORMATIONS_CACHE)
|
||||||
|
|
||||||
|
|
||||||
async def templates(server_name,
|
async def templates(server_name,
|
||||||
config,
|
config,
|
||||||
templates_informations,
|
|
||||||
just_copy=False,
|
just_copy=False,
|
||||||
):
|
):
|
||||||
engine = RougailSystemdTemplate(config, templates_informations)
|
subconfig = config.option(normalize_family(server_name))
|
||||||
|
try:
|
||||||
|
await subconfig.option.get()
|
||||||
|
except:
|
||||||
|
servers = [await server.option.description() for server in await config.option.list('optiondescription')]
|
||||||
|
raise Exception(f'cannot find server name "{server_name}": {servers}')
|
||||||
|
|
||||||
|
rougailconfig = RougailConfig.copy()
|
||||||
|
rougailconfig['variable_namespace'] = ROUGAIL_NAMESPACE
|
||||||
|
rougailconfig['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
|
||||||
|
rougailconfig['tmp_dir'] = 'tmp'
|
||||||
|
if not just_copy:
|
||||||
|
rougailconfig['destinations_dir'] = INSTALL_DIR
|
||||||
|
else:
|
||||||
|
rougailconfig['destinations_dir'] = INSTALL_TEMPLATES_DIR
|
||||||
|
rougailconfig['templates_dir'] = await subconfig.information.get('templates_dir')
|
||||||
|
rougailconfig['patches_dir'] = await subconfig.information.get('patches_dir')
|
||||||
|
rougailconfig['functions_file'] = await subconfig.information.get('functions_files')
|
||||||
|
is_host = await subconfig.information.get('module') == 'host'
|
||||||
|
if is_host:
|
||||||
|
host_install_dir = f'{ROUGAIL_NAMESPACE}.host_install_dir'
|
||||||
|
rougailconfig['tmpfile_dest_dir'] = await subconfig.option(host_install_dir).value.get()
|
||||||
|
rougailconfig['default_systemd_directory'] = '/usr/local/lib/systemd'
|
||||||
|
else:
|
||||||
|
rougailconfig['tmpfile_dest_dir'] = '/usr/local/lib'
|
||||||
|
rougailconfig['default_systemd_directory'] = '/systemd'
|
||||||
|
re_create(rougailconfig['destinations_dir'])
|
||||||
|
re_create(rougailconfig['tmp_dir'])
|
||||||
|
engine = RougailSystemdTemplate(subconfig, rougailconfig)
|
||||||
if just_copy:
|
if just_copy:
|
||||||
# for all engine to none
|
# for all engine to none
|
||||||
ori_engines = {}
|
ori_engines = {}
|
||||||
|
@ -79,32 +110,40 @@ async def templates(server_name,
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print()
|
print()
|
||||||
print(f'=== Configuration: {server_name} ===')
|
print(f'=== Configuration: {server_name} ===')
|
||||||
values = await config.value.dict()
|
values = await subconfig.value.dict()
|
||||||
await value_pprint(values, config)
|
await value_pprint(values, subconfig)
|
||||||
print(err)
|
|
||||||
print(await config.option('general.nginx.nginx_default_http').value.get())
|
|
||||||
raise err from err
|
raise err from err
|
||||||
if just_copy:
|
if just_copy:
|
||||||
for eng, old_engine in ori_engines.items():
|
for eng, old_engine in ori_engines.items():
|
||||||
engine.engines[eng] = old_engine
|
engine.engines[eng] = old_engine
|
||||||
|
|
||||||
|
|
||||||
async def load(cache_file,
|
class Loader:
|
||||||
|
def __init__(self,
|
||||||
|
cache_file,
|
||||||
cache_values,
|
cache_values,
|
||||||
clean_directories=False,
|
cache_informations,
|
||||||
copy_manual_dir=False,
|
|
||||||
copy_tests=False,
|
|
||||||
hide_secret=False,
|
|
||||||
):
|
|
||||||
display_name=tiramisu_display_name
|
|
||||||
#load_zones()
|
|
||||||
# # load images
|
|
||||||
#FIXME useful
|
|
||||||
module_infos = load_config(copy_manual_dir,
|
|
||||||
copy_tests,
|
|
||||||
clean_directories,
|
clean_directories,
|
||||||
)
|
hide_secret,
|
||||||
# modules_rougail_config = load_modules_rougail_config(module_infos)
|
original_display_name,
|
||||||
|
valid_mandatories,
|
||||||
|
config_file=CONFIG_FILE,
|
||||||
|
):
|
||||||
|
self.cache_file = cache_file
|
||||||
|
self.cache_values = cache_values
|
||||||
|
self.cache_informations = cache_informations
|
||||||
|
self.hide_secret = hide_secret
|
||||||
|
self.original_display_name = original_display_name
|
||||||
|
self.valid_mandatories = valid_mandatories
|
||||||
|
self.config_file = config_file
|
||||||
|
if clean_directories:
|
||||||
|
if isdir(INSTALL_DIR):
|
||||||
|
rmtree(INSTALL_DIR)
|
||||||
|
makedirs(INSTALL_DIR)
|
||||||
|
|
||||||
|
def before(self):
|
||||||
|
with open(self.config_file, 'r') as server_fh:
|
||||||
|
self.servers_json = yaml_load(server_fh, Loader=SafeLoader)
|
||||||
cfg = RougailConfig.copy()
|
cfg = RougailConfig.copy()
|
||||||
cfg['variable_namespace'] = ROUGAIL_NAMESPACE
|
cfg['variable_namespace'] = ROUGAIL_NAMESPACE
|
||||||
cfg['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
|
cfg['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
|
||||||
|
@ -112,62 +151,170 @@ async def load(cache_file,
|
||||||
cfg['extra_annotators'] = ['risotto.rougail']
|
cfg['extra_annotators'] = ['risotto.rougail']
|
||||||
cfg['internal_functions'] = list(FUNCTIONS.keys())
|
cfg['internal_functions'] = list(FUNCTIONS.keys())
|
||||||
cfg['force_convert_dyn_option_description'] = True
|
cfg['force_convert_dyn_option_description'] = True
|
||||||
# cfg['module_name'] = module_name
|
|
||||||
functions_files = set()
|
|
||||||
load_domains()
|
|
||||||
for server_name, datas in SERVERS.items():
|
|
||||||
module_info = module_infos[datas['module']]
|
|
||||||
functions_files |= set(module_info['infos'].functions_file)
|
|
||||||
cfg['functions_file'] = list(functions_files)
|
|
||||||
if not isfile(cache_file):
|
|
||||||
eolobj = RougailConvert(cfg)
|
|
||||||
cfg['risotto_globals'] = {}
|
cfg['risotto_globals'] = {}
|
||||||
for server_name, datas in SERVERS.items():
|
|
||||||
module_info = module_infos[datas['module']]
|
rougail = RougailConvert(cfg)
|
||||||
cfg['dictionaries_dir'] = module_info['infos'].dictionaries_dir
|
self.templates_dir = {}
|
||||||
cfg['extra_dictionaries'] = module_info['infos'].extra_dictionaries
|
self.patches_dir = {}
|
||||||
informations = SERVERS_JSON['servers'][server_name].get('informations')
|
functions_files = set()
|
||||||
if informations:
|
self.functions_files = {}
|
||||||
cfg['risotto_globals'][server_name] = {'global:server_name': server_name,
|
applicationservices = Applications()
|
||||||
'global:zones_name': informations['zones_name'],
|
zones = self.servers_json['zones']
|
||||||
'global:zones_list': list(range(len(informations['zones_name']))),
|
self.modules = {}
|
||||||
|
for host_name, datas in self.servers_json['hosts'].items():
|
||||||
|
modules_name = {mod_datas['module'] for mod_datas in datas['servers'].values()}
|
||||||
|
modules = Modules(datas['applicationservices'],
|
||||||
|
applicationservices,
|
||||||
|
datas['applicationservice_provider'],
|
||||||
|
modules_name,
|
||||||
|
self.servers_json['modules']
|
||||||
|
)
|
||||||
|
module_info = modules.get('host')
|
||||||
|
cfg['risotto_globals'][host_name] = {'global:server_name': host_name,
|
||||||
|
'global:module_name': 'host',
|
||||||
|
'global:host_install_dir': abspath(INSTALL_DIR),
|
||||||
}
|
}
|
||||||
values = []
|
functions_files |= set(module_info.functions_file)
|
||||||
for s_idx in cfg['risotto_globals'][server_name]['global:zones_list']:
|
self.load_dictionaries(cfg, module_info, host_name, rougail)
|
||||||
if not s_idx:
|
modules_info = {}
|
||||||
values.append(server_name)
|
for server_name, server_datas in datas['servers'].items():
|
||||||
else:
|
module_info = modules.get(server_datas['module'])
|
||||||
values.append(informations['extra_domainnames'][s_idx - 1])
|
zones_name = server_datas['informations']['zones_name']
|
||||||
cfg['risotto_globals'][server_name]['global:server_names'] = values
|
values = [f'{server_name}.{zones[zone_name]["domain_name"]}' for zone_name in zones_name]
|
||||||
else:
|
cfg['risotto_globals'][values[0]] = {'global:host_name': host_name,
|
||||||
cfg['risotto_globals'][server_name] = {'global:server_name': server_name}
|
'global:server_name': values[0],
|
||||||
cfg['risotto_globals'][server_name]['global:module_name'] = datas['module']
|
'global:server_names': values,
|
||||||
eolobj.load_dictionaries(path_prefix=server_name)
|
'global:zones_name': zones_name,
|
||||||
tiram_obj = eolobj.save(cache_file)
|
'global:zones_list': list(range(len(zones_name))),
|
||||||
else:
|
'global:module_name': server_datas['module'],
|
||||||
with open(cache_file) as fh:
|
}
|
||||||
tiram_obj = fh.read()
|
server_datas['server_name'] = values[0]
|
||||||
|
functions_files |= set(module_info.functions_file)
|
||||||
|
self.load_dictionaries(cfg, module_info, values[0], rougail)
|
||||||
|
modules_info[module_info.module_name] = module_info.depends
|
||||||
|
self.modules[host_name] = modules_info
|
||||||
|
cfg['functions_file'] = list(functions_files)
|
||||||
|
self.tiram_obj = rougail.save(self.cache_file)
|
||||||
|
|
||||||
|
def load_dictionaries(self, cfg, module_info, server_name, rougail):
|
||||||
|
cfg['dictionaries_dir'] = module_info.dictionaries_dir
|
||||||
|
cfg['extra_dictionaries'] = module_info.extra_dictionaries
|
||||||
|
cfg['functions_file'] = module_info.functions_file
|
||||||
|
rougail.load_dictionaries(path_prefix=server_name)
|
||||||
|
self.templates_dir[server_name] = module_info.templates_dir
|
||||||
|
self.patches_dir[server_name] = module_info.patches_dir
|
||||||
|
self.functions_files[server_name] = module_info.functions_file
|
||||||
|
|
||||||
|
async def load(self):
|
||||||
optiondescription = FUNCTIONS.copy()
|
optiondescription = FUNCTIONS.copy()
|
||||||
try:
|
try:
|
||||||
exec(tiram_obj, None, optiondescription)
|
exec(self.tiram_obj, None, optiondescription)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print(tiram_obj)
|
print(self.tiram_obj)
|
||||||
raise Exception(f'unknown error when load tiramisu object {err}') from err
|
raise Exception(f'unknown error when load tiramisu object {err}') from err
|
||||||
config = await Config(optiondescription['option_0'],
|
if self.original_display_name:
|
||||||
|
display_name = None
|
||||||
|
else:
|
||||||
|
display_name = tiramisu_display_name
|
||||||
|
self.config = await Config(optiondescription['option_0'],
|
||||||
display_name=display_name,
|
display_name=display_name,
|
||||||
)
|
)
|
||||||
if not isfile(cache_values):
|
|
||||||
|
async def after(self):
|
||||||
|
config = self.config
|
||||||
await config.property.pop('validator')
|
await config.property.pop('validator')
|
||||||
await config.property.pop('cache')
|
await config.property.pop('cache')
|
||||||
for server_name, datas in SERVERS.items():
|
load_zones(self.servers_json)
|
||||||
await set_values(server_name, config, datas)
|
await config.information.set('zones', self.servers_json['zones'])
|
||||||
|
for host_name, hosts_datas in self.servers_json['hosts'].items():
|
||||||
|
information = config.option(normalize_family(host_name)).information
|
||||||
|
await information.set('module', 'host')
|
||||||
|
await information.set('templates_dir', self.templates_dir[host_name])
|
||||||
|
await information.set('patches_dir', self.patches_dir[host_name])
|
||||||
|
await information.set('functions_files', self.functions_files[host_name])
|
||||||
|
await self.set_values(host_name, config, hosts_datas)
|
||||||
|
for datas in hosts_datas['servers'].values():
|
||||||
|
server_name = datas['server_name']
|
||||||
|
information = config.option(normalize_family(server_name)).information
|
||||||
|
await information.set('module', datas['module'])
|
||||||
|
await information.set('templates_dir', self.templates_dir[server_name])
|
||||||
|
await information.set('patches_dir', self.patches_dir[server_name])
|
||||||
|
await information.set('functions_files', self.functions_files[server_name])
|
||||||
|
await self.set_values(server_name, config, datas)
|
||||||
|
|
||||||
|
# FIXME only one host_name is supported
|
||||||
|
await config.information.set('modules', self.modules[host_name])
|
||||||
|
# await config.information.set('modules', {module_name: module_info.depends for module_name, module_info in self.module_infos.items() if module_name in modules})
|
||||||
await config.property.read_only()
|
await config.property.read_only()
|
||||||
await config.property.add('cache')
|
await config.property.add('cache')
|
||||||
|
if self.valid_mandatories:
|
||||||
await valid_mandatories(config)
|
await valid_mandatories(config)
|
||||||
with open(cache_values, 'w') as fh:
|
with open(self.cache_values, 'w') as fh:
|
||||||
json_dump(await config.value.exportation(), fh)
|
json_dump(await config.value.exportation(), fh)
|
||||||
|
with open(self.cache_informations, 'w') as fh:
|
||||||
|
json_dump(await config.information.exportation(), fh)
|
||||||
|
|
||||||
|
async def set_values(self,
|
||||||
|
server_name,
|
||||||
|
config,
|
||||||
|
datas,
|
||||||
|
):
|
||||||
|
if 'values' not in datas:
|
||||||
|
return
|
||||||
|
server_path = normalize_family(server_name)
|
||||||
|
await config.owner.set(self.config_file)
|
||||||
|
for vpath, value in datas['values'].items():
|
||||||
|
path = f'{server_path}.{vpath}'
|
||||||
|
try:
|
||||||
|
if isinstance(value, dict):
|
||||||
|
for idx, val in value.items():
|
||||||
|
await config.option(path, int(idx)).value.set(val)
|
||||||
else:
|
else:
|
||||||
with open(cache_values, 'r') as fh:
|
await config.option(path).value.set(value)
|
||||||
await config.value.importation(json_load(fh))
|
except Exception as err:
|
||||||
await config.property.read_only()
|
await value_pprint(await config.value.dict(), config)
|
||||||
return module_infos, cfg, config
|
error_msg = f'cannot configure variable {vpath} for server "{server_name}": {err}'
|
||||||
|
raise Exception(error_msg) from err
|
||||||
|
await config.owner.set('user')
|
||||||
|
|
||||||
|
async def finish(self):
|
||||||
|
await self.config.property.read_only()
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderCache(Loader):
|
||||||
|
def before(self):
|
||||||
|
with open(self.cache_file) as fh:
|
||||||
|
self.tiram_obj = fh.read()
|
||||||
|
|
||||||
|
async def after(self):
|
||||||
|
with open(self.cache_values, 'r') as fh:
|
||||||
|
await self.config.value.importation(json_load(fh))
|
||||||
|
with open(self.cache_informations, 'r') as fh:
|
||||||
|
informations = json_load(fh)
|
||||||
|
# null is not a valid key in json => 'null'
|
||||||
|
informations[None] = informations.pop('null')
|
||||||
|
await self.config.information.importation(informations)
|
||||||
|
|
||||||
|
|
||||||
|
async def load(clean_directories=False,
|
||||||
|
hide_secret=False,
|
||||||
|
original_display_name: bool=False,
|
||||||
|
valid_mandatories: bool=True,
|
||||||
|
):
|
||||||
|
if isfile(TIRAMISU_CACHE) and isfile(VALUES_CACHE) and isfile(INFORMATIONS_CACHE):
|
||||||
|
loader_obj = LoaderCache
|
||||||
|
else:
|
||||||
|
loader_obj = Loader
|
||||||
|
loader = loader_obj(TIRAMISU_CACHE,
|
||||||
|
VALUES_CACHE,
|
||||||
|
INFORMATIONS_CACHE,
|
||||||
|
clean_directories,
|
||||||
|
hide_secret,
|
||||||
|
original_display_name,
|
||||||
|
valid_mandatories,
|
||||||
|
)
|
||||||
|
loader.before()
|
||||||
|
await loader.load()
|
||||||
|
await loader.after()
|
||||||
|
await loader.finish()
|
||||||
|
return loader.config
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
from rougail.annotator.variable import Walk
|
from rougail.annotator.variable import Walk
|
||||||
|
from rougail.error import DictConsistencyError
|
||||||
from risotto.utils import _, multi_function
|
from risotto.utils import _, multi_function
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_kwargs(provider, dns, kwargs, index=None):
|
def _parse_kwargs(provider, dns, kwargs, index=None):
|
||||||
|
if not isinstance(dns, list):
|
||||||
|
raise Exception('pfff')
|
||||||
values = {}
|
values = {}
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
if '_' not in key:
|
if '_' not in key:
|
||||||
|
@ -14,7 +16,15 @@ def _parse_kwargs(provider, dns, kwargs, index=None):
|
||||||
for idx, data in values.items():
|
for idx, data in values.items():
|
||||||
if index is not None and int(idx) != index:
|
if index is not None and int(idx) != index:
|
||||||
continue
|
continue
|
||||||
if 'dns' not in data or (isinstance(data['dns'], list) and dns not in data['dns']) or (not isinstance(data['dns'], list) and data['dns'] != dns):
|
if 'dns' not in data:
|
||||||
|
continue
|
||||||
|
if isinstance(data['dns'], list):
|
||||||
|
for ddns in data['dns']:
|
||||||
|
if ddns in dns:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif data['dns'] not in dns:
|
||||||
continue
|
continue
|
||||||
del data['dns']
|
del data['dns']
|
||||||
yield data
|
yield data
|
||||||
|
@ -142,7 +152,13 @@ class Annotator(Walk):
|
||||||
continue
|
continue
|
||||||
nf_dns = variable.path.split('.', 1)[0]
|
nf_dns = variable.path.split('.', 1)[0]
|
||||||
server_name = self.objectspace.space.variables[nf_dns].doc
|
server_name = self.objectspace.space.variables[nf_dns].doc
|
||||||
self.suppliers.setdefault(variable.supplier, []).append({'option': variable, 'dns': server_name, 'path_prefix': nf_dns, 'zones': set(self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name'])})
|
self.suppliers.setdefault(variable.supplier, []).append({'option': variable,
|
||||||
|
'dns': server_name,
|
||||||
|
'path_prefix': nf_dns,
|
||||||
|
'server_names': self.objectspace.rougailconfig['risotto_globals'][server_name]['global:server_names'],
|
||||||
|
'zone_names': self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name'],
|
||||||
|
'zones': set(self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name'])
|
||||||
|
})
|
||||||
|
|
||||||
def convert_suppliers(self):
|
def convert_suppliers(self):
|
||||||
for supplier, data in self.suppliers.items():
|
for supplier, data in self.suppliers.items():
|
||||||
|
@ -152,10 +168,16 @@ class Annotator(Walk):
|
||||||
if supplier not in self.providers:
|
if supplier not in self.providers:
|
||||||
continue
|
continue
|
||||||
for p_dico in self.providers[supplier]:
|
for p_dico in self.providers[supplier]:
|
||||||
if s_dico['zones'] & p_dico['zones']:
|
common_zones = s_dico['zones'] & p_dico['zones']
|
||||||
s_dico['option'].value = p_dico['dns']
|
if common_zones:
|
||||||
|
for idx, zone in enumerate(p_dico['zone_names']):
|
||||||
|
if zone in common_zones:
|
||||||
|
break
|
||||||
|
dns = p_dico['server_names'][idx]
|
||||||
|
# dns = p_dico["dns"]
|
||||||
|
s_dico['option'].value = dns
|
||||||
new_value = self.objectspace.value(None)
|
new_value = self.objectspace.value(None)
|
||||||
new_value.name = p_dico['dns']
|
new_value.name = dns
|
||||||
s_dico['option'].value = [new_value]
|
s_dico['option'].value = [new_value]
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -168,6 +190,10 @@ class Annotator(Walk):
|
||||||
nf_dns = variable.path.split('.', 1)[0]
|
nf_dns = variable.path.split('.', 1)[0]
|
||||||
server_name = self.objectspace.space.variables[nf_dns].doc
|
server_name = self.objectspace.space.variables[nf_dns].doc
|
||||||
provider_name = variable.provider
|
provider_name = variable.provider
|
||||||
|
if self.objectspace.rougailconfig['risotto_globals'][server_name]['global:module_name'] == 'host':
|
||||||
|
server_names = [server_name]
|
||||||
|
else:
|
||||||
|
server_names = self.objectspace.rougailconfig['risotto_globals'][server_name]['global:server_names']
|
||||||
if ':' in provider_name:
|
if ':' in provider_name:
|
||||||
key_name, key_type = provider_name.rsplit(':', 1)
|
key_name, key_type = provider_name.rsplit(':', 1)
|
||||||
is_provider = False
|
is_provider = False
|
||||||
|
@ -175,7 +201,13 @@ class Annotator(Walk):
|
||||||
key_name = key_type = provider_name
|
key_name = key_type = provider_name
|
||||||
is_provider = True
|
is_provider = True
|
||||||
if provider_name != 'Host':
|
if provider_name != 'Host':
|
||||||
self.providers.setdefault(provider_name, []).append({'option': variable, 'dns': server_name, 'path_prefix': nf_dns, 'zones': set(self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name'])})
|
self.providers.setdefault(provider_name, []).append({'option': variable,
|
||||||
|
'dns': server_name,
|
||||||
|
'path_prefix': nf_dns,
|
||||||
|
'server_names': server_names,
|
||||||
|
'zone_names': self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name'],
|
||||||
|
'zones': set(self.objectspace.rougailconfig['risotto_globals'][server_name]['global:zones_name']),
|
||||||
|
})
|
||||||
if key_name != 'global' and key_name not in self.suppliers:
|
if key_name != 'global' and key_name not in self.suppliers:
|
||||||
#warn(f'cannot find supplier "{key_name}" for "{server_name}"')
|
#warn(f'cannot find supplier "{key_name}" for "{server_name}"')
|
||||||
continue
|
continue
|
||||||
|
@ -234,10 +266,12 @@ class Annotator(Walk):
|
||||||
if key_name != 'global':
|
if key_name != 'global':
|
||||||
param = self.objectspace.param(variable.xmlfiles)
|
param = self.objectspace.param(variable.xmlfiles)
|
||||||
param.name = 'dns'
|
param.name = 'dns'
|
||||||
param.text = server_name
|
param.text = server_names
|
||||||
fill.param.append(param)
|
fill.param.append(param)
|
||||||
if key_name == 'global':
|
if key_name == 'global':
|
||||||
param = self.objectspace.param(variable.xmlfiles)
|
param = self.objectspace.param(variable.xmlfiles)
|
||||||
|
if provider_name not in self.objectspace.rougailconfig['risotto_globals'][server_name]:
|
||||||
|
raise DictConsistencyError(f'cannot find provider "{provider_name}" for variable "{variable.name}"', 200, variable.xmlfiles)
|
||||||
param.text = self.objectspace.rougailconfig['risotto_globals'][server_name][provider_name]
|
param.text = self.objectspace.rougailconfig['risotto_globals'][server_name][provider_name]
|
||||||
param.name = 'value'
|
param.name = 'value'
|
||||||
fill.param.append(param)
|
fill.param.append(param)
|
||||||
|
@ -296,48 +330,3 @@ class Annotator(Walk):
|
||||||
if not hasattr(self.objectspace.space.variables[nf_dns].constraints, 'fill'):
|
if not hasattr(self.objectspace.space.variables[nf_dns].constraints, 'fill'):
|
||||||
self.objectspace.space.variables[nf_dns].constraints.fill = []
|
self.objectspace.space.variables[nf_dns].constraints.fill = []
|
||||||
self.objectspace.space.variables[nf_dns].constraints.fill.append(fill)
|
self.objectspace.space.variables[nf_dns].constraints.fill.append(fill)
|
||||||
|
|
||||||
# def convert_get_linked_information(self):
|
|
||||||
# if not hasattr(self.objectspace.space, 'constraints') or \
|
|
||||||
# not hasattr(self.objectspace.space.constraints, 'fill'):
|
|
||||||
# return
|
|
||||||
# for fill in self.objectspace.space.constraints.fill:
|
|
||||||
# if fill.name == 'get_linked_configuration':
|
|
||||||
# # add server_name
|
|
||||||
# param = self.objectspace.param(fill.xmlfiles)
|
|
||||||
# param.name = 'server_name'
|
|
||||||
# param.type = 'information'
|
|
||||||
# param.text = 'server_name'
|
|
||||||
# fill.param.append(param)
|
|
||||||
# # add current_user
|
|
||||||
# param = self.objectspace.param(fill.xmlfiles)
|
|
||||||
# param.name = 'current_user'
|
|
||||||
# param.type = 'information'
|
|
||||||
# param.text = 'current_user'
|
|
||||||
# fill.param.append(param)
|
|
||||||
# # add test
|
|
||||||
# param = self.objectspace.param(fill.xmlfiles)
|
|
||||||
# param.name = 'test'
|
|
||||||
# param.type = 'target_information'
|
|
||||||
# param.text = 'test'
|
|
||||||
# fill.param.append(param)
|
|
||||||
#
|
|
||||||
# def convert_provider(self):
|
|
||||||
# if not hasattr(self.objectspace.space, 'variables'):
|
|
||||||
# return
|
|
||||||
# for family in self.get_families():
|
|
||||||
# if not hasattr(family, 'provider'):
|
|
||||||
# continue
|
|
||||||
# if 'dynamic' not in vars(family):
|
|
||||||
# raise Exception(_(f'{family.name} is not a dynamic family so cannot have provider attribute'))
|
|
||||||
# if not hasattr(family, 'information'):
|
|
||||||
# family.information = self.objectspace.information(family.xmlfiles)
|
|
||||||
# family.information.provider = family.provider
|
|
||||||
# del family.provider
|
|
||||||
# for variable in self.get_variables():
|
|
||||||
# if not hasattr(variable, 'provider'):
|
|
||||||
# continue
|
|
||||||
# if not hasattr(variable, 'information'):
|
|
||||||
# variable.information = self.objectspace.information(variable.xmlfiles)
|
|
||||||
# variable.information.provider = variable.provider
|
|
||||||
# del variable.provider
|
|
||||||
|
|
|
@ -1,18 +1,11 @@
|
||||||
from os import environ
|
from os import environ
|
||||||
from json import load
|
|
||||||
from typing import List
|
from typing import List
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
from toml import load as toml_load
|
from toml import load as toml_load
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
|
|
||||||
SETTINGS = {'config': None}
|
|
||||||
MULTI_FUNCTIONS = []
|
MULTI_FUNCTIONS = []
|
||||||
DOMAINS = {}
|
|
||||||
ZONES = {}
|
|
||||||
SERVERS_JSON = {}
|
|
||||||
SERVERS = {}
|
|
||||||
CONFIGS = {}
|
|
||||||
|
|
||||||
|
|
||||||
with open(environ.get('CONFIG_FILE', 'risotto.conf'), 'r') as fh:
|
with open(environ.get('CONFIG_FILE', 'risotto.conf'), 'r') as fh:
|
||||||
|
@ -40,68 +33,20 @@ async def value_pprint(dico, config):
|
||||||
pprint(pprint_dict)
|
pprint(pprint_dict)
|
||||||
|
|
||||||
|
|
||||||
def load_zones_server():
|
def load_zones(servers_json):
|
||||||
if 'zones' in SERVERS_JSON:
|
zones = servers_json['zones']
|
||||||
return
|
for host_name, hosts in servers_json['hosts'].items():
|
||||||
with open('servers.json', 'r') as server_fh:
|
for server_name, server in hosts['servers'].items():
|
||||||
SERVERS_JSON.update(load(server_fh))
|
|
||||||
|
|
||||||
|
|
||||||
def load_zones():
|
|
||||||
global ZONES
|
|
||||||
if ZONES:
|
|
||||||
return
|
|
||||||
|
|
||||||
load_zones_server()
|
|
||||||
ZONES.update(SERVERS_JSON['zones'])
|
|
||||||
for server_name, server in SERVERS_JSON['servers'].items():
|
|
||||||
if 'informations' not in server:
|
|
||||||
continue
|
|
||||||
server_zones = server['informations']['zones_name']
|
server_zones = server['informations']['zones_name']
|
||||||
server_extra_domainnames = server['informations'].get('extra_domainnames', [])
|
|
||||||
if len(server_zones) > 1 and len(server_zones) != len(server_extra_domainnames) + 1:
|
|
||||||
raise Exception(f'the server "{server_name}" has more that one zone, please set correct number of extra_domainnames ({len(server_zones) - 1} instead of {len(server_extra_domainnames)})')
|
|
||||||
|
|
||||||
for idx, zone_name in enumerate(server_zones):
|
for idx, zone_name in enumerate(server_zones):
|
||||||
zone_domain_name = ZONES[zone_name]['domain_name']
|
zone = zones[zone_name]
|
||||||
if idx == 0:
|
zone.setdefault('hosts', {})
|
||||||
zone_server_name = server_name
|
zone['hosts'][server_name] = _get_ip(server_name, zone)
|
||||||
else:
|
|
||||||
zone_server_name = server_extra_domainnames[idx - 1]
|
|
||||||
server_domain_name = zone_server_name.split('.', 1)[1]
|
|
||||||
if zone_domain_name and zone_domain_name != server_domain_name:
|
|
||||||
raise Exception(f'wrong server_name "{zone_server_name}" in zone "{zone_name}" should ends with "{zone_domain_name}"')
|
|
||||||
ZONES[zone_name].setdefault('hosts', []).append(server_name)
|
|
||||||
|
|
||||||
|
|
||||||
def load_domains():
|
|
||||||
global DOMAINS
|
|
||||||
if DOMAINS:
|
|
||||||
return
|
|
||||||
load_zones()
|
|
||||||
for zone_name, zone in SERVERS_JSON['zones'].items():
|
|
||||||
if 'domain_name' in zone:
|
|
||||||
hosts = []
|
|
||||||
ips = []
|
|
||||||
for host in ZONES[zone_name].get('hosts', []):
|
|
||||||
hosts.append(host.split('.', 1)[0])
|
|
||||||
ips.append(_get_ip(host, [zone_name], 0))
|
|
||||||
DOMAINS[zone['domain_name']] = (tuple(hosts), tuple(ips))
|
|
||||||
|
|
||||||
|
|
||||||
def _get_ip(server_name: str,
|
def _get_ip(server_name: str,
|
||||||
zones_name: List[str],
|
zone: dict,
|
||||||
index: str,
|
|
||||||
) -> str:
|
) -> str:
|
||||||
if server_name is None or zones_name is None:
|
# FIXME make a cache, machine should not change IP
|
||||||
return
|
server_index = len(zone['hosts'])
|
||||||
load_zones()
|
|
||||||
index = int(index)
|
|
||||||
zone_name = zones_name[index]
|
|
||||||
if zone_name not in ZONES:
|
|
||||||
raise ValueError(f"cannot set IP in unknown zone '{zone_name}'")
|
|
||||||
zone = ZONES[zone_name]
|
|
||||||
if server_name not in zone['hosts']:
|
|
||||||
raise ValueError(f"cannot set IP in unknown server '{server_name}'")
|
|
||||||
server_index = zone['hosts'].index(server_name)
|
|
||||||
return str(ip_address(zone['start_ip']) + server_index)
|
return str(ip_address(zone['start_ip']) + server_index)
|
||||||
|
|
Loading…
Reference in a new issue