This commit is contained in:
egarette@silique.fr 2023-02-27 14:03:56 +01:00
parent afd28627f3
commit bb35d6cf3e
14 changed files with 565 additions and 290 deletions

View file

@ -2,10 +2,11 @@
from asyncio import run
from os import readlink, walk, chdir, getcwd, makedirs
from os.path import join, islink, isdir
from risotto.machine import load, templates, INSTALL_DIR, INSTALL_CONFIG_DIR, INSTALL_TMPL_DIR, INSTALL_IMAGES_DIR, INSTALL_TESTS_DIR
from rougail.utils import normalize_family
from risotto.machine import build_files, INSTALL_DIR, INSTALL_CONFIG_DIR, INSTALL_TMPL_DIR, INSTALL_IMAGES_DIR, INSTALL_TESTS_DIR
from shutil import rmtree
import tarfile
try:
from ansible.plugins.action import ActionBase
from ansible.module_utils.basic import AnsibleModule
@ -21,40 +22,10 @@ except:
ARCHIVES_DIR = '/tmp/new_configurations'
async def build_files(hostname: str,
only_machine: str,
just_copy: bool,
copy_tests: bool,
) -> None:
config = await load(copy_tests=copy_tests)
if only_machine:
machines = [only_machine]
else:
machines = [await subconfig.option.description() for subconfig in await config.option.list(type='optiondescription')]
# shasums = {}
directories = {}
for machine in machines:
if just_copy and hostname == machine:
continue
await templates(machine,
config,
just_copy=just_copy,
copy_manuals=True,
)
#FIXME dest_dir?
is_host = machine == hostname
# shasums[machine] = {'shasums': get_shasums(dest_dir, is_host)}
if is_host:
# shasums[machine]['config_dir'] = '/usr/local/lib'
directories[machine] = '/usr/local/lib'
else:
# shasums[machine]['config_dir'] = await config.option(normalize_family(machine)).option('general.config_dir').value.get()
directories[machine] = await config.option(normalize_family(machine)).option('general.config_dir').value.get()
return directories
def is_diff(server_name, remote_directories):
def is_diff(server_name,
remote_directories,
certificates,
):
ret = {}
module = FakeModule()
current_path = getcwd()
@ -72,6 +43,14 @@ def is_diff(server_name, remote_directories):
elif remote_directories[path] != readlink(full_path):
return True
remote_directories.pop(path)
if remote_directories:
for certificate in certificates:
for typ in ['name', 'private', 'authority']:
if not typ in certificate:
continue
name = certificate[typ][1:]
if name in remote_directories:
remote_directories.pop(name)
if remote_directories:
return True
return False
@ -89,7 +68,7 @@ class ActionModule(ActionBase):
copy_templates = module_args.pop('copy_templates')
else:
copy_templates = False
directories = run(build_files(hostname,
directories, certificates = run(build_files(hostname,
only_machine,
False,
copy_tests,
@ -114,11 +93,17 @@ class ActionModule(ActionBase):
))
machines_changed = []
tls_machine = None
for machine, directory in directories.items():
if machine.startswith('tls.'):
tls_machine = machine
if directory not in remote['directories']:
machines_changed.append(machine)
continue
if is_diff(machine, remote['directories'][directory]):
if is_diff(machine,
remote['directories'][directory],
certificates['certificates'].get(machine, []),
):
machines_changed.append(machine)
current_path = getcwd()
if isdir(ARCHIVES_DIR):
@ -201,4 +186,9 @@ class ActionModule(ActionBase):
)
chdir(current_path)
changed = machines_changed != []
return dict(ansible_facts=dict({}), changed=changed, machines_changed=machines, host_changed=self._task.args['hostname'] in machines_changed)
return dict(ansible_facts=dict({}),
changed=changed,
machines_changed=machines,
host_changed=self._task.args['hostname'] in machines_changed,
tls_machine=tls_machine,
)

View file

@ -57,3 +57,9 @@
src: "/tmp/new_configurations/tests.tar"
dest: "/var/lib/risotto/tests"
when: copy_tests
- name: "Create TLS directory"
file:
path: /var/lib/risotto/tls
state: directory
mode: "755"

View file

@ -154,6 +154,7 @@ def run_module():
module_args = dict(
state=dict(type='str', required=True),
machines=dict(type='list', required=True),
tls_machine=dict(type='str', required=True),
)
# seed the result dict in the object
@ -183,7 +184,10 @@ def run_module():
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
machines = module.params['machines']
tls_machine = module.params['tls_machine']
if module.params['state'] == 'stopped':
if tls_machine and tls_machine not in machines:
machines.append(tls_machine)
bus = SystemBus()
result['changed'], errors = stop(bus, machines)
if errors:

View file

@ -1,16 +1,42 @@
#- name: Print return information from the previous task
# ansible.builtin.debug:
# var: build_host.machines_changed
- name: "Rebuild images"
ansible.builtin.shell: "/usr/local/sbin/update_images just_need_images"
register: ret
failed_when: ret.rc != 0
- name: "Stop machine TLS"
machinectl:
state: stopped
machines: "{{ build_host.tls_machine }}"
tls_machine: "{{ build_host.tls_machine }}"
when: build_host.tls_machine in build_host.machines_changed
- name: "Remove TLS files directory"
file:
path: "/var/lib/risotto/configurations/{{ build_host.tls_machine }}"
state: absent
when: build_host.tls_machine in build_host.machines_changed
- name: "Copy TLS configuration"
unarchive:
src: /tmp/new_configurations/machines.tar
dest: "/var/lib/risotto/configurations/"
include: "{{ build_host.tls_machine }}"
owner: root
group: root
when: build_host.tls_machine in build_host.machines_changed
- name: "Start machine TLS"
machinectl:
state: started
machines: "{{ build_host.tls_machine }}"
tls_machine: "{{ build_host.tls_machine }}"
when: build_host.tls_machine in build_host.machines_changed
- name: "Stop machines with new configuration {{ build_host.machines_changed }}"
machinectl:
state: stopped
machines: "{{ build_host.machines_changed }}"
tls_machine: "{{ build_host.tls_machine }}"
- name: "Remove files directory"
file:
@ -30,11 +56,13 @@
machinectl:
state: enabled
machines: "{{ vars | machineslist(only_name=True) }}"
tls_machine: "{{ build_host.tls_machine }}"
- name: "Start machines"
machinectl:
state: started
machines: "{{ vars | machineslist(only_name=True) }}"
tls_machine: "{{ build_host.tls_machine }}"
- name: "Remove compressed files directory"
local_action:

View file

@ -1 +0,0 @@
../pki/

View file

@ -11,6 +11,10 @@
copy_templates: "{{ copy_templates }}"
register: build_host
- name: Print return information from the previous task
ansible.builtin.debug:
var: build_host
- name: "Configure the host"
include_tasks: host.yml
when: configure_host == true
@ -19,7 +23,7 @@
include_tasks: machine.yml
when: item.name in build_host.machines_changed
loop: "{{ vars | machineslist(only=only_machine) }}"
#
# - name: "Remove images"
# include_tasks: remove_image.yml
# loop: "{{ vars | machineslist(only=only_machine) }}"

View file

@ -1,6 +1,6 @@
#!/bin/bash -e
START=$1
#START=$1
BACKUP_DIR="/root/backup"
MACHINES=""
@ -15,18 +15,23 @@ done
cd /var/lib/risotto/srv/
mkdir -p "$BACKUP_DIR"
for machine in $MACHINES; do
machinectl stop $machine || true
while true; do
machinectl status "$machine" > /dev/null 2>&1 || break
sleep 1
done
# machinectl stop $machine || true
# while true; do
# machinectl status "$machine" > /dev/null 2>&1 || break
# sleep 1
# done
BACKUP_FILE="$BACKUP_DIR/backup_$machine.tar.bz2"
rm -f "$BACKUP_FILE"
tar -cvJf $BACKUP_FILE $machine
if [ -f "/var/lib/risotto/configurations/$machine/sbin/risotto_backup" ]; then
machinectl -q shell $machine /usr/local/lib/sbin/risotto_backup
tar -cJf $BACKUP_FILE $machine/backup
else
tar -cJf $BACKUP_FILE $machine
fi
done
if [ -z "$START" ]; then
machinectl start $MACHINES
fi
#if [ -z "$START" ]; then
# machinectl start $MACHINES
#fi
exit 0

View file

@ -79,7 +79,9 @@ function install_base() {
function new_package() {
if [ "$INSTALL_TOOL" = "dnf" ]; then
OPT=$(dnf_opt_base "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP")
set +e
dnf --assumeno $OPT update >> /var/log/risotto/build_image.log
set -e
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" "$PKG")
dnf --assumeno $OPT | grep ^" " > "$IMAGE_NAME_RISOTTO_IMAGE_DIR".pkgs.new
else
@ -92,6 +94,9 @@ function install_pkg() {
OPT=$(dnf_opt "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" "$PKG")
dnf --assumeyes $OPT
else
if [ "$ONLY_IF_DATASET_MODIF" = true ]; then
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" apt update
fi
chroot "$IMAGE_NAME_RISOTTO_IMAGE_DIR_TMP" bash -c "export DEBIAN_FRONTEND=noninteractive; apt install --no-install-recommends --yes $PKG"
fi
}

View file

@ -2,6 +2,7 @@
from os import listdir
from os.path import isdir, join
from tabulate import tabulate
from sys import argv
from rougail import RougailConfig
from rougail.convert import RougailConvert
@ -16,7 +17,7 @@ rougailconfig['variable_namespace_description'] = ROUGAIL_NAMESPACE_DESCRIPTION
DEFAULT_TYPE = 'string'
ROUGAIL_VARIABLE_TYPE = 'https://cloud.silique.fr/gitea/risotto/rougail/src/branch/main/doc/variable/README.md#le-type-de-la-variable'
ROUGAIL_VARIABLE_TYPE = 'https://forge.cloud.silique.fr/risotto/rougail/src/branch/main/doc/variable/README.md#le-type-de-la-variable'
def add_title_family(elts, dico):
@ -39,7 +40,7 @@ def add_title_family(elts, dico):
def parse(applicationservice, elts, dico, providers_suppliers, hidden):
def parse(applicationservice, elts, dico, providers_suppliers, hidden, objectspace):
elt = elts[-1]
first_variable = True
if not hidden:
@ -105,19 +106,57 @@ def parse(applicationservice, elts, dico, providers_suppliers, hidden):
if ':' not in supplier:
providers_suppliers['suppliers'].setdefault(supplier, []).append(applicationservice)
else:
parse(applicationservice, elts + [child], dico, providers_suppliers, hidden)
parse(applicationservice, elts + [child], dico, providers_suppliers, hidden, objectspace)
applicationservices = listdir('seed')
#applicationservices = ['speedtest-rs']
#applicationservices = ['redis']
applicationservices_data = {}
def build_dependencies_tree(applicationservice, applicationservice_data, applicationservices_data, applicationservices_data_ext, space):
depends = []
if applicationservice_data['depends']:
if applicationservice in applicationservices_data:
app_data = applicationservices_data[applicationservice]
else:
for url, apps_data in applicationservices_data_ext.items():
if applicationservice in apps_data:
app_data = apps_data[applicationservice]
break
else:
raise Exception(f'cannot find applicationservice "{applicationservice}"')
for idx, depend in enumerate(app_data['depends']):
if depend in applicationservices_data:
url = '..'
ext = False
else:
for url, apps_data in applicationservices_data_ext.items():
if depend in apps_data:
break
else:
raise Exception(f'cannot find applicationservice "{applicationservice}"')
ext = True
subdepends = build_dependencies_tree(depend, applicationservice_data, applicationservices_data, applicationservices_data_ext, space + 2)
if not idx or subdepends:
title = '\n'
else:
title = ''
depend_desc = depend
if ext:
depend_desc += ' (in external dataset)'
title = ' ' * space + f'- [{depend_desc}]({url}/{depend}/README.md)'
depends.append(title)
depends.extend(subdepends)
return depends
def load_data(url, directory, applicationservices_data, global_data={}):
root_path = join(directory, 'seed')
applicationservices = listdir(root_path)
tmps = {}
for applicationservice in applicationservices:
as_dir = join('seed', applicationservice)
as_dir = join(root_path, applicationservice)
if not isdir(as_dir):
continue
applicationservice_data = load_application_service(as_dir)
if not applicationservice_data.get('documentation', True):
continue
applicationservices_data[applicationservice] = {'description': applicationservice_data['description'],
'website': applicationservice_data.get('website'),
'as_dir': as_dir,
@ -125,15 +164,25 @@ for applicationservice in applicationservices:
'used_by': [],
}
if applicationservice in tmps:
applicationservices_data[applicationservice]['used_by'] = tmps.pop(applicationservice)
for app in tmps.pop(applicationservice):
used_by = f'[{app}](../{app}/README.md)'
applicationservices_data[applicationservice]['used_by'].append(used_by)
if 'depends' in applicationservice_data:
for depend in applicationservice_data['depends']:
applicationservices_data[applicationservice]['depends'].append(depend)
if depend in applicationservices_data:
applicationservices_data[depend]['used_by'].append(applicationservice)
used_by = f'[{applicationservice}](../{applicationservice}/README.md)'
applicationservices_data[depend]['used_by'].append(used_by)
else:
tmps.setdefault(depend, []).append(applicationservice)
if tmps and global_data:
for depend, applications in tmps.items():
for app in applications:
used_by = f'[{app} (in external dataset)]({url}/{app}/README.md)'
global_data[depend]['used_by'].append(used_by)
def write_data(applicationservices_data, applicationservices_data_ext):
dico = {}
providers_suppliers = {'providers': {}, 'suppliers': {}}
for applicationservice, applicationservice_data in applicationservices_data.items():
@ -160,30 +209,12 @@ for applicationservice, applicationservice_data in applicationservices_data.item
if hasattr(objectspace.space, 'variables'):
dico[applicationservice] = {}
for name, elt in objectspace.space.variables.items():
parse(applicationservice, [elt], dico[applicationservice], providers_suppliers, False)
def build_dependencies_tree(applicationservice, space):
depends = []
if applicationservice_data['depends']:
for idx, depend in enumerate(applicationservices_data[applicationservice]['depends']):
subdepends = build_dependencies_tree(depend, space + 2)
if not idx or subdepends:
title = '\n'
else:
title = ''
title = ' ' * space + f'- [{depend}](../{depend}/README.md)'
depends.append(title)
depends.extend(subdepends)
return depends
parse(applicationservice, [elt], dico[applicationservice], providers_suppliers, False, objectspace)
for applicationservice, applicationservice_data in applicationservices_data.items():
as_dir = applicationservice_data['as_dir']
with open(join(as_dir, 'README.md'), 'w') as as_fh:
as_fh.write(f'---\ngitea: none\ninclude_toc: true\n---\n\n')
as_fh.write(f'# {applicationservice}\n\n')
as_fh.write(f'[All applications services for this dataset.](../README.md)\n\n')
as_fh.write(f'## Description\n\n')
description = applicationservice_data['description'] + '.\n'
if applicationservice_data['website']:
@ -191,7 +222,7 @@ for applicationservice, applicationservice_data in applicationservices_data.item
as_fh.write(description)
if applicationservice_data['depends']:
as_fh.write(f'\n## Dependances\n\n')
for depend in build_dependencies_tree(applicationservice, 0):
for depend in build_dependencies_tree(applicationservice, applicationservice_data, applicationservices_data, applicationservices_data_ext, 0):
as_fh.write(f'{depend}\n')
if applicationservice in dico and dico[applicationservice]:
as_fh.write('\n## Variables\n\n')
@ -235,10 +266,10 @@ for applicationservice, applicationservice_data in applicationservices_data.item
as_fh.write('\n## Used by\n\n')
if len(applicationservice_data['used_by']) == 1:
link = applicationservice_data['used_by'][0]
as_fh.write(f'[{link}](../{link}/README.md)\n')
as_fh.write(f'{link}\n')
else:
for link in applicationservice_data['used_by']:
as_fh.write(f'- [{link}](../{link}/README.md)\n')
as_fh.write(f'- {link}\n')
linked = []
for provider, provider_as in providers_suppliers['providers'].items():
if not applicationservice in provider_as:
@ -273,7 +304,7 @@ for applicationservice, applicationservice_data in applicationservices_data.item
as_fh.write('\n## Providers\n\n')
for provider in linked:
as_fh.write(f'- [{provider}](../{provider}/README.md)\n')
as_fh.write(f'\n[All applications services for this dataset.](../README.md)\n')
with open('seed/README.md', 'w') as as_fh:
as_fh.write('# Application services\n\n')
@ -294,9 +325,10 @@ with open('seed/README.md', 'w') as as_fh:
for applicationservice in applicationservices_:
applicationservice_data = applicationservices_data[applicationservice]
as_fh.write(f' - [{applicationservice}]({applicationservice}/README.md): {applicationservice_data["description"]}\n')
as_fh.write('\n# Providers and suppliers\n\n')
providers = list(providers_suppliers['providers'].keys())
providers.sort()
if providers:
as_fh.write('\n# Providers and suppliers\n\n')
for provider in providers:
as_fh.write(f'- {provider}:\n')
if providers_suppliers['providers'][provider]:
@ -315,3 +347,22 @@ with open('seed/README.md', 'w') as as_fh:
as_fh.write(f' - Suppliers:\n')
for applicationservice in providers_suppliers['suppliers'][provider]:
as_fh.write(f' - [{applicationservice}]({applicationservice}/README.md)\n')
def main():
applicationservices_data = {}
load_data('..', '', applicationservices_data)
applicationservices_data_ext = {}
for arg in argv[1:]:
if '=' not in arg:
raise Exception(f'cannot parse argument "{arg}", should be dataset_path=url')
path, url = arg.split('=', 1)
if url in applicationservices_data_ext:
raise Exception(f'duplicate url "{url}" in arguments')
applicationservices_data_ext[url] = {}
load_data(url, path, applicationservices_data_ext[url], applicationservices_data)
write_data(applicationservices_data, applicationservices_data_ext)
if __name__ == '__main__':
main()

View file

@ -192,6 +192,7 @@ async def main():
parser.add_argument('server_name')
parser.add_argument('--read_only', action='store_true')
parser.add_argument('--nocache', action='store_true')
parser.add_argument('--debug', action='store_true')
args = parser.parse_args()
if args.nocache:
remove_cache()

View file

@ -4,12 +4,12 @@ from asyncio import run
from argparse import ArgumentParser
from traceback import print_exc
from risotto.machine import templates, remove_cache, load, INSTALL_DIR
from risotto.machine import remove_cache, build_files, INSTALL_DIR
async def main():
parser = ArgumentParser()
parser.add_argument('server_name')
parser.add_argument('server_name', nargs='?')
parser.add_argument('--nocache', action='store_true')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--copy_tests', action='store_true')
@ -18,19 +18,18 @@ async def main():
if args.nocache:
remove_cache()
config = await load(copy_tests=args.copy_tests, clean_directories=True)
print('fin')
print(await config.option('host_example_net.general.copy_tests').value.get())
try:
await templates(args.server_name,
config,
template=args.template
await build_files(None,
args.server_name,
False,
args.copy_tests,
template=args.template,
)
except Exception as err:
if args.debug:
print_exc()
exit(err)
print(f'templates generated in {INSTALL_DIR} directory')
print(f'templates generated in "{INSTALL_DIR}" directory')
run(main())

View file

@ -72,7 +72,7 @@ class Modules:
is_host=True,
)
for module_name in modules_name:
if modules_name == 'host':
if module_name == 'host':
raise Exception('forbidden module name: "host"')
self.module_infos[module_name] = self._load_module_informations(module_name,
[applicationservice_provider] + modules[module_name],

View file

@ -92,6 +92,7 @@ async def templates(server_name,
just_copy=False,
copy_manuals=False,
template=None,
extra_variables=None,
):
subconfig = config.option(normalize_family(server_name))
try:
@ -123,7 +124,9 @@ async def templates(server_name,
re_create(rougailconfig['destinations_dir'])
re_create(rougailconfig['tmp_dir'])
engine = RougailSystemdTemplate(subconfig, rougailconfig)
engine = RougailSystemdTemplate(subconfig,
rougailconfig,
)
if just_copy:
# for all engine to none
ori_engines = {}
@ -134,14 +137,17 @@ async def templates(server_name,
engine.engines[eng] = engine.engines['none']
try:
if not template:
await engine.instance_files()
await engine.instance_files(extra_variables=extra_variables)
else:
await engine.instance_file(template)
await engine.instance_file(template, extra_variables=extra_variables)
except Exception as err:
print()
print(f'=== Configuration: {server_name} ===')
try:
values = await subconfig.value.dict()
await value_pprint(values, subconfig)
except:
pass
raise err from err
if just_copy:
for eng, old_engine in ori_engines.items():
@ -191,6 +197,7 @@ class Loader:
"""
with open(self.config_file, 'r') as server_fh:
self.servers_json = yaml_load(server_fh, Loader=SafeLoader)
self.add_tls()
# set global rougail configuration
cfg = RougailConfig.copy()
cfg['variable_namespace'] = ROUGAIL_NAMESPACE
@ -244,22 +251,27 @@ class Loader:
)
# load servers
modules_info = {}
#FIXME ADD TLS in servers !!!
for server_name, server_datas in datas['servers'].items():
module_info = modules.get(server_datas['module'])
zones_name = server_datas['informations']['zones_name']
values = [f'{server_name}.{zones[zone_name]["domain_name"]}' for zone_name in zones_name]
cfg['risotto_globals'][values[0]] = {'global:host_name': host_name,
'global:server_name': values[0],
if server_datas['module'] == 'tls':
true_host_name = f'{server_name}.{zones[list(zones)[0]]["domain_name"]}'
else:
true_host_name = values[0]
cfg['risotto_globals'][true_host_name] = {'global:host_name': host_name,
'global:server_name': true_host_name,
'global:server_names': values,
'global:zones_name': zones_name,
'global:zones_list': list(range(len(zones_name))),
'global:module_name': server_datas['module'],
}
server_datas['server_name'] = values[0]
server_datas['server_name'] = true_host_name
functions_files |= set(module_info.functions_file)
self.load_dictionaries(cfg,
module_info,
values[0],
true_host_name,
rougail,
)
modules_info[module_info.module_name] = module_info.depends
@ -267,6 +279,45 @@ class Loader:
cfg['functions_file'] = list(functions_files)
self.tiram_obj = rougail.save(TIRAMISU_CACHE)
def add_tls(self):
zones = set()
rp_module_name = None
dns_module_name = None
for module_name, applicationservices in self.servers_json['modules'].items():
if 'nginx-reverse-proxy' in applicationservices:
rp_module_name = module_name
if dns_module_name:
break
if 'unbound' in applicationservices:
dns_module_name = module_name
if rp_module_name:
break
if not rp_module_name or not dns_module_name:
rp_module_name = dns_module_name = None
for host_name, host_datas in self.servers_json['hosts'].items():
zones = [None, None]
for server_name, datas in host_datas['servers'].items():
if datas['module'] == 'tls':
raise Exception(f'forbidden module name "tls" for server {server_name}')
if datas['module'] == rp_module_name and len(datas['informations']['zones_name']) > 0:
# always add tls machine in second zone of reverse proxy
zones[1] = datas['informations']['zones_name'][0]
if datas['module'] == dns_module_name:
# always add tls machine in second zone of reverse proxy
zones[0] = datas['informations']['zones_name'][0]
if None in zones:
zones = []
else:
if zones[0] == zones[1]:
zones = [zones[0]]
host_datas['servers']['tls'] = {'module': 'tls',
'informations': {'zones_name': list(zones)},
}
for module_name in self.servers_json['modules']:
if module_name == 'tls':
raise Exception('forbidden module name: "tls"')
self.servers_json['modules']['tls'] = ['tls']
def load_dictionaries(self, cfg, module_info, server_name, rougail):
cfg['dictionaries_dir'] = module_info.dictionaries_dir
cfg['extra_dictionaries'] = module_info.extra_dictionaries
@ -402,3 +453,118 @@ async def load(clean_directories=False,
await config.information.set('copy_tests', copy_tests)
await config.cache.reset()
return config
async def build_files(hostname: str,
only_machine: str,
just_copy: bool,
copy_tests: bool,
template: str=None,
) -> None:
with open(CONFIG_FILE, 'r') as server_fh:
servers_json = yaml_load(server_fh, Loader=SafeLoader)
config = await load(copy_tests=copy_tests)
machines = [await subconfig.option.description() for subconfig in await config.option.list(type='optiondescription')]
certificates = {'certificates': {},
'configuration': servers_json['certificates'],
}
# get certificates informations
tls_machine = None
for machine in machines:
if machine.startswith('tls.'):
tls_machine = machine
continue
if hostname is None:
# FIXME multi host!
hostname = await config.option(normalize_family(machine)).option('general.host_name').value.get()
if just_copy:
continue
is_host = machine == hostname
if is_host:
continue
machine_config = config.option(normalize_family(machine))
certificate_names = []
private_names = []
for service in await machine_config.option('services').option.list('optiondescription'):
if not await service.option('activate').value.get():
continue
# if await service.option('manage').value.get():
# certificate_type = 'server'
# else:
# certificate_type = 'client'
tls_ca_directory = await machine_config.option('general.tls_ca_directory').value.get()
tls_cert_directory = await machine_config.option('general.tls_cert_directory').value.get()
tls_key_directory = await machine_config.option('general.tls_key_directory').value.get()
try:
for certificate in await service.option('certificates').option.list('all'):
if not await certificate.option('activate').value.get():
continue
certificate_data = await certificate.value.dict()
certificate_data['type'] = await certificate.information.get('type')
certificate_data['authority'] = join(tls_ca_directory, await certificate.information.get('authority') + '.crt')
certificate_data['format'] = await certificate.information.get('format')
is_list_name = isinstance(certificate_data['name'], list)
is_list_domain = isinstance(certificate_data['domain'], list)
if is_list_name != is_list_domain:
raise Exception('certificate name and domain name must be a list together')
if 'provider' not in certificate_data:
certificate_data['provider'] = 'autosigne'
if is_list_name:
if len(certificate_data['name']) != len(certificate_data['domain']):
raise Exception('certificate name and domain name must have same lenght')
for idx, certificate_name in enumerate(certificate_data['name']):
cert_data = certificate_data.copy()
if certificate_data['format'] == 'cert_key':
cert_data['name'] = join(tls_cert_directory, certificate_name + '.crt')
private = join(tls_key_directory, certificate_name + '.key')
if private in private_names:
raise Exception(f'duplicate private key {private} for {machine}')
cert_data['private'] = private
private_names.append(private)
else:
cert_data['name'] = join(tls_key_directory, certificate_name + '.pem')
cert_data['domain'] = certificate_data['domain'][idx]
if cert_data['name'] in certificate_names:
raise Exception(f'duplicate certificate {cert_data["name"]} for {machine}')
certificates['certificates'].setdefault(machine, []).append(cert_data)
certificate_names.append(cert_data['name'])
else:
name = certificate_data['name']
if certificate_data['format'] == 'cert_key':
certificate_data['name'] = join(tls_cert_directory, name + '.crt')
private = join(tls_key_directory, name + '.key')
if private in private_names:
raise Exception(f'duplicate private key {private} for {machine}')
certificate_data['private'] = private
else:
certificate_data['name'] = join(tls_key_directory, name + '.pem')
if certificate_data['name'] in certificate_names:
raise Exception(f'duplicate certificate {certificate_data["name"]} for {machine}')
certificate_names.append(certificate_data['name'])
certificates['certificates'].setdefault(machine, []).append(certificate_data)
except AttributeError:
pass
directories = {}
for machine in machines:
if just_copy and hostname == machine:
continue
if only_machine and only_machine != machine:
continue
await templates(machine,
config,
just_copy=just_copy,
copy_manuals=True,
template=template,
extra_variables=certificates,
)
is_host = machine == hostname
if is_host:
directories[machine] = '/usr/local/lib'
elif not just_copy:
machine_config = config.option(normalize_family(machine))
directories[machine] = await machine_config.option('general.config_dir').value.get()
if only_machine:
return directories
if only_machine:
raise Exception(f'cannot find machine {only_machine}: {machines}')
return directories, certificates

View file

@ -1,8 +1,9 @@
from os import environ
from os.path import isfile
from os import environ, makedirs
from os.path import isfile, join, isdir
from typing import List
from ipaddress import ip_address
from toml import load as toml_load
from json import load, dump
from pprint import pprint
@ -10,6 +11,8 @@ MULTI_FUNCTIONS = []
EXTRA_ANNOTATORS = ['risotto.rougail']
ROUGAIL_NAMESPACE = 'general'
ROUGAIL_NAMESPACE_DESCRIPTION = 'Général'
HERE = environ['PWD']
IP_DIR = join(HERE, 'ip')
config_file = environ.get('CONFIG_FILE', 'risotto.conf')
@ -42,19 +45,33 @@ async def value_pprint(dico, config):
def load_zones(servers_json):
if not isdir(IP_DIR):
makedirs(IP_DIR)
zones = servers_json['zones']
json_file = join(IP_DIR, 'zones.json')
if isfile(json_file):
with open(json_file, 'r') as fh:
zones_ip = load(fh)
else:
zones_ip = {}
for host_name, hosts in servers_json['hosts'].items():
for server_name, server in hosts['servers'].items():
server_zones = server['informations']['zones_name']
for idx, zone_name in enumerate(server_zones):
zone = zones[zone_name]
zone.setdefault('hosts', {})
zone['hosts'][server_name] = _get_ip(server_name, zone)
def _get_ip(server_name: str,
zone: dict,
) -> str:
# FIXME make a cache, machine should not change IP
server_index = len(zone['hosts'])
return str(ip_address(zone['start_ip']) + server_index)
if zone_name not in zones_ip:
zones_ip[zone_name] = {}
if server_name in zones_ip[zone_name]:
server_index = zones_ip[zone_name][server_name]
elif not zones_ip[zone_name]:
server_index = 0
else:
# it's the last ip + 1
server_index = zones_ip[zone_name][list(zones_ip[zone_name].keys())[-1]] + 1
ip = str(ip_address(zone['start_ip']) + server_index)
zone['hosts'][server_name] = ip
zones_ip[zone_name][server_name] = server_index
with open(json_file, 'w') as fh:
dump(zones_ip, fh)