for creole's zephir2 branch
This commit is contained in:
commit
841643e76e
700 changed files with 68183 additions and 0 deletions
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Backup and swap files
|
||||
*~
|
||||
*#
|
||||
*.swp
|
24
Makefile
Normal file
24
Makefile
Normal file
|
@ -0,0 +1,24 @@
|
|||
################################
|
||||
# Makefile pour creole
|
||||
################################
|
||||
|
||||
SOURCE=creole
|
||||
EOLE_VERSION=2.7
|
||||
EOLE_RELEASE=2.7.0
|
||||
|
||||
################################
|
||||
# Début de zone à ne pas éditer
|
||||
################################
|
||||
|
||||
include eole.mk
|
||||
include apps.mk
|
||||
|
||||
################################
|
||||
# Fin de zone à ne pas éditer
|
||||
################################
|
||||
|
||||
# Makefile rules dedicated to application
|
||||
# if exists
|
||||
ifneq (, $(strip $(wildcard $(SOURCE).mk)))
|
||||
include $(SOURCE).mk
|
||||
endif
|
64
apps.mk
Normal file
64
apps.mk
Normal file
|
@ -0,0 +1,64 @@
|
|||
#
|
||||
# NE PAS EDITER CE FICHIER
|
||||
#
|
||||
# Voir Makefile
|
||||
|
||||
|
||||
##########################
|
||||
# Application web envole #
|
||||
##########################
|
||||
ifneq (, $(filter oui web, $(PKGAPPS)))
|
||||
#
|
||||
# Sanity check
|
||||
#
|
||||
ifeq (, $(filter-out X.X, $(strip $(VERSION))))
|
||||
$(error $$(VERSION) variable has incorrect value '$(VERSION)')
|
||||
endif
|
||||
|
||||
# Where to store web application files
|
||||
WEB_PATH := $(DESTDIR)/var/www/html
|
||||
|
||||
# Envole
|
||||
sharenvole_PROG_DIR := $(DESTDIR)/usr/share/envole/$(SOURCE)
|
||||
|
||||
src_$(SOURCE)-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)
|
||||
src_plugins-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)/plugin
|
||||
src_lang-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)/lang
|
||||
|
||||
endif
|
||||
|
||||
##########################
|
||||
# Application EOLE flask #
|
||||
##########################
|
||||
ifneq (, $(filter flask, $(PKGAPPS)))
|
||||
#
|
||||
# Sanity check
|
||||
#
|
||||
ifeq (, $(filter-out XXX, $(strip $(FLASK_MODULE))))
|
||||
$(error $$(FLASK_MODULE) variable has incorrect value '$(FLASK_MODULE)')
|
||||
endif
|
||||
|
||||
ifeq (, $(strip $(wildcard src/$(FLASK_MODULE).conf)))
|
||||
$(error missing eoleflask configuration file 'src/$(FLASK_MODULE).conf')
|
||||
endif
|
||||
|
||||
# Everything is related to mount point
|
||||
APPS_MOUNT_POINT := $(shell sed -ne 's|^"MOUNT_POINT"[[:space:]]*:[[:space:]]*"/\([^"]*\)",|\1|p' \
|
||||
src/$(FLASK_MODULE).conf)
|
||||
|
||||
ifeq (, $(strip $(APPS_MOUNT_POINT)))
|
||||
$(error no "MOUNT_POINT" in eoleflask configuration file 'src/$(FLASK_MODULE).conf')
|
||||
endif
|
||||
|
||||
# eole-flask configuration
|
||||
src_DATA_DIR := $(DESTDIR)/etc/eole/flask/available
|
||||
|
||||
# Where to store flask application files
|
||||
FLASK_PATH := $(eole_DIR)/flask/$(APPS_MOUNT_POINT)
|
||||
|
||||
# static files
|
||||
src_$(FLASK_MODULE)_static_REC_DIR := $(FLASK_PATH)/static
|
||||
src_$(FLASK_MODULE)_templates_REC_DIR := $(FLASK_PATH)/templates
|
||||
src_$(FLASK_MODULE)_instance_REC_DIR := $(FLASK_PATH)/resources
|
||||
|
||||
endif
|
153
bin/CreoleCat
Executable file
153
bin/CreoleCat
Executable file
|
@ -0,0 +1,153 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Run templatisation on a template name or file
|
||||
|
||||
`CreoleCat` support two modes:
|
||||
|
||||
- run on a template name with option -t: the name is looked up in
|
||||
``/usr/share/eole/creole/distrib/``. The output files are
|
||||
calculated unless you explicitely specify ``-o``.
|
||||
|
||||
- run on a file with options -s: this mode requires the use of
|
||||
``-o`` option.
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from os.path import basename, join, split
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
|
||||
from creole.template import CreoleTemplateEngine
|
||||
import creole.config as cfg
|
||||
from creole.client import CreoleClient, CreoleClientError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Instancie un template creole",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging()])
|
||||
parser.add_argument("-t", "--template", metavar="NAME",
|
||||
help=u"nom du fichier template creole présent "
|
||||
"dans /usr/share/eole/creole/distrib")
|
||||
parser.add_argument("-s", "--source", metavar="PATH",
|
||||
help=u"chemin d’un fichier template")
|
||||
parser.add_argument("-o", "--output", metavar="OUTPUTFILE",
|
||||
help=u"chemin du fichier généré")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (opts.template is None and opts.source is None) \
|
||||
or (opts.template and opts.source):
|
||||
parser.error("Vous devez spécifier une des options"
|
||||
"'--template' ou '--source'.")
|
||||
|
||||
if opts.source is not None and not os.access(opts.source, os.F_OK):
|
||||
parser.error("Fichier source inexistant"
|
||||
" ou illisible: {0}".format(opts.source))
|
||||
|
||||
if opts.output is None:
|
||||
if opts.source is not None:
|
||||
opts.output = ""
|
||||
else:
|
||||
if opts.template is not None \
|
||||
and opts.output == join(cfg.distrib_dir, opts.template):
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier template: {0}".format(opts.output) )
|
||||
if opts.source is not None and opts.output == opts.source:
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier source: {0}".format(opts.output) )
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def _find_file(name, ctx):
|
||||
candidates = client.to_grouped_lists(ctx['files'], keyname='source')
|
||||
for source, filevar in candidates.items():
|
||||
if name != basename(source):
|
||||
continue
|
||||
elif filevar[0].get('activate', False):
|
||||
return filevar[0]
|
||||
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
options = parse_cmdline()
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
|
||||
engine = CreoleTemplateEngine()
|
||||
|
||||
filevar = { 'source': options.source,
|
||||
'name': options.output,
|
||||
'full_name': options.output,
|
||||
'activate' : True,
|
||||
'del_comment': u'',
|
||||
'mkdir' : False,
|
||||
'rm' : False,
|
||||
}
|
||||
|
||||
if options.container is not None:
|
||||
# force container context
|
||||
groups = [client.get_container_infos(options.container)]
|
||||
elif options.output is not None:
|
||||
# Source without container, for root context
|
||||
groups = [client.get_container_infos('root')]
|
||||
else:
|
||||
groups = []
|
||||
for group in client.get_groups():
|
||||
groups.append(client.get_group_infos(group))
|
||||
|
||||
instanciated_files = []
|
||||
for group in groups:
|
||||
if filevar['source'] is not None:
|
||||
instanciated_files.append(filevar)
|
||||
engine.process(filevar, group)
|
||||
elif options.template is not None:
|
||||
found_file = _find_file(options.template, group)
|
||||
if found_file:
|
||||
instanciated_files.append(found_file)
|
||||
if options.output is None:
|
||||
engine._instance_file(found_file, group)
|
||||
else:
|
||||
# Override output
|
||||
found_file['name'] = options.output
|
||||
found_file['full_name'] = options.output
|
||||
# Do not get through verify and
|
||||
# change_properties
|
||||
engine._copy_to_template_dir(found_file)
|
||||
engine.process(found_file, group)
|
||||
|
||||
if not instanciated_files:
|
||||
# No file get instanciated
|
||||
raise CreoleClientError("Fichier template inexistant:"
|
||||
" {0}".format(options.template))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
130
bin/CreoleGet
Executable file
130
bin/CreoleGet
Executable file
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Get a creole variable value.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from creole.client import CreoleClient
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.encode import normalize
|
||||
|
||||
_RETURN_VALUES = u"""Multiple values are separated with NEWLINE character '\\n',
|
||||
or SPACE character if several variables are displayed."""
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Get creole variable",
|
||||
epilog=_RETURN_VALUES,
|
||||
parents=[scriptargs.logging()])
|
||||
|
||||
parser.add_argument('variable', nargs='?',
|
||||
help=u"Nom de variable creole")
|
||||
parser.add_argument('default', nargs='?',
|
||||
help=u"Valeur par défaut si la variable n’existe pas")
|
||||
|
||||
incompatible_options = parser.add_mutually_exclusive_group()
|
||||
|
||||
incompatible_options.add_argument('--groups', action="store_true", default=False,
|
||||
help=u"Liste les groupes de conteneurs")
|
||||
|
||||
incompatible_options.add_argument('--list', action="store_true", default=False,
|
||||
help=u"Liste l'ensemble des variables creole")
|
||||
|
||||
incompatible_options.add_argument('--reload', action="store_true", default=False,
|
||||
help=u"Recharge toute la configuration creole")
|
||||
|
||||
incompatible_options.add_argument('--reload-eol', action="store_true", default=False,
|
||||
help=u"Recharge les valeurs de configuration creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
def output(value, strip_master=False):
|
||||
"""
|
||||
formatage de l'affichage
|
||||
"""
|
||||
if isinstance(value, list):
|
||||
#FIXME: ['val1', None, 'val2']
|
||||
for val in value:
|
||||
if isinstance(val, dict):
|
||||
sys.stderr.write(u'{}\n'.format(val['err']))
|
||||
else:
|
||||
sys.stdout.write(u'{}\n'.format(val))
|
||||
elif isinstance(value, dict):
|
||||
# in case several keys/values are returned
|
||||
list_keys = value.keys()
|
||||
list_keys.sort()
|
||||
for var in list_keys:
|
||||
values = value[var]
|
||||
if isinstance(values, list):
|
||||
values_ = u''
|
||||
for val in values:
|
||||
if val and not isinstance(val, dict):
|
||||
values_ += u" {}".format(val)
|
||||
values = values_
|
||||
elif values is None:
|
||||
values = u''
|
||||
else:
|
||||
values = u'{}'.format(values)
|
||||
if strip_master:
|
||||
varname = var.split('.')[-1]
|
||||
else:
|
||||
varname = var
|
||||
sys.stdout.write(u'{}="{}"\n'.format(varname, values.strip()))
|
||||
elif value is None or value == u'':
|
||||
sys.stdout.write(u'\n')
|
||||
else:
|
||||
sys.stdout.write(u'{0}\n'.format(value))
|
||||
#return ret.rstrip('\n')
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
client = CreoleClient()
|
||||
var = options.variable
|
||||
if options.groups:
|
||||
output(client.get_groups())
|
||||
elif options.list:
|
||||
output(client.get_creole(), True)
|
||||
elif options.reload:
|
||||
client.reload_config()
|
||||
elif options.reload_eol:
|
||||
client.reload_eol()
|
||||
elif not var:
|
||||
raise Exception(u"Veuillez spécifier un nom de variable Creole")
|
||||
else:
|
||||
if options.default is not None:
|
||||
kwargs = {'default':options.default}
|
||||
else:
|
||||
kwargs = {}
|
||||
if '.' in var:
|
||||
output(client.get(var))
|
||||
else:
|
||||
output(client.get_creole(var, **kwargs))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(normalize(err), exc_info=True)
|
||||
else:
|
||||
log.error(normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
#Fix #18701
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('UTF8')
|
||||
main()
|
||||
|
68
bin/CreoleLint
Executable file
68
bin/CreoleLint
Executable file
|
@ -0,0 +1,68 @@
|
|||
#! /usr/bin/python
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from creole.lint.creolelint import validate
|
||||
from creole.lint.ansiwriter import AnsiWriter
|
||||
|
||||
def parse_cmdline():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option("-t", "--template", dest="tmpl",
|
||||
default=None, help="nom du template Creole")
|
||||
parser.add_option("-l", "--level", dest="writelevel", default='warning',
|
||||
help="level d'affichage des messages")
|
||||
parser.add_option("-n", "--name", dest="name",
|
||||
default=None, help="nom du lint a tester")
|
||||
parser.add_option("-d", "--dico-only", action="store_true",
|
||||
dest="only_on_dico",
|
||||
default=False, help="lint uniquement sur les dicos")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
options, args = parse_cmdline()
|
||||
tmpl = options.tmpl
|
||||
writelevel = options.writelevel
|
||||
|
||||
#if options.writelevel not in errorlevel.values():
|
||||
# raise Exception('Niveau %s inconnu'% options.writelevel)
|
||||
only_on_template = False
|
||||
only_on_dico = options.only_on_dico
|
||||
if tmpl is not None:
|
||||
only_on_template = True
|
||||
if options.name:
|
||||
keywords = [options.name]
|
||||
writelevel = 'info'
|
||||
else:
|
||||
keywords = []
|
||||
if not only_on_template:
|
||||
# keywords.extend(['orphans_def',
|
||||
# 'orphans_set', 'orphans_for', 'orphans_tmpl_files',
|
||||
# 'define', 'syntax_for', 'syntax_var', 'syntax_var2',
|
||||
# 'syntax_function', 'valid_client_option'])
|
||||
keywords.extend(['valid_dtd', 'wrong_dicos_name',
|
||||
'tabs_in_dicos', 'hidden_if_in_dicos',
|
||||
'condition_without_target',
|
||||
'obligatoire_in_dicos',
|
||||
'valid_slave_value',
|
||||
'valid_var_label', 'valid_separator_label',
|
||||
'valid_help_label',
|
||||
'activation_var_without_help',
|
||||
'family_without_help',
|
||||
'family_without_icon',
|
||||
'old_fw_file'])
|
||||
if not only_on_dico:
|
||||
keywords.extend(['valid_parse_tmpl'])
|
||||
keywords.append('builtins')
|
||||
ansi = AnsiWriter(writelevel)
|
||||
try:
|
||||
for keyword in keywords:
|
||||
validate(keyword, ansi, tmpl)
|
||||
except Exception, err:
|
||||
from traceback import print_exc
|
||||
print_exc()
|
||||
#print u"Erreur : {0}".format(err)
|
||||
sys.exit(1)
|
||||
|
||||
main()
|
16
bin/CreoleLock
Executable file
16
bin/CreoleLock
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from sys import argv
|
||||
from os import getppid
|
||||
from importlib import import_module
|
||||
from pyeole.command_line import ArgumentParser
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
allowed_functions = ('acquire', 'release', 'is_locked')
|
||||
module = import_module('pyeole.lock')
|
||||
module.PID = getppid()
|
||||
arguments = ArgumentParser(module, allowed_functions, argv[0])
|
||||
arguments.parse_args(argv[1:])
|
||||
arguments.trigger_callback()
|
54
bin/CreoleRun
Executable file
54
bin/CreoleRun
Executable file
|
@ -0,0 +1,54 @@
|
|||
#!/bin/bash
|
||||
|
||||
# exécute une commande dans un conteneur
|
||||
|
||||
SSHCMD="ssh -q -o LogLevel=ERROR -o StrictHostKeyChecking=no"
|
||||
|
||||
commande=$1
|
||||
container=$2
|
||||
# ne lancer la commande que si dans un conteneur (ssh)
|
||||
onlyifcontainer=$3
|
||||
silent=$4
|
||||
CMD='eval'
|
||||
|
||||
ExecContainer()
|
||||
{
|
||||
ip="$1"
|
||||
cmd="$2"
|
||||
tcpcheck 2 $ip:22 &>/dev/null || return 1
|
||||
$SSHCMD root@$ip "$cmd"
|
||||
}
|
||||
|
||||
if [[ ${container} == "all" ]]
|
||||
then
|
||||
if [[ $(CreoleGet mode_conteneur_actif) == "oui" ]]
|
||||
then
|
||||
for grp in $(CreoleGet --groups)
|
||||
do
|
||||
if [[ ${grp} != 'root' ]] && [[ ${grp} != 'all' ]]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_${grp}")
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo "Exécution de la commande [${commande}] dans le conteneur ${grp}"
|
||||
echo
|
||||
fi
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
else
|
||||
if [ -n "$container" ]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_$container")
|
||||
fi
|
||||
if [ -n "$container_ip" ] && [ ! "$container_ip" = "127.0.0.1" ]
|
||||
then
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
elif [ "$onlyifcontainer" != "yes" ]
|
||||
then
|
||||
eval "$commande"
|
||||
fi
|
||||
fi
|
71
bin/CreoleService
Executable file
71
bin/CreoleService
Executable file
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.service import manage_services
|
||||
from creole.reconfigure import services
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
def parse_cmdline():
|
||||
|
||||
service_actions=['apply', 'configure', 'enable', 'disable', 'status',
|
||||
'start', 'stop', 'restart', 'reload']
|
||||
|
||||
parser = argparse.ArgumentParser(description="Action sur les services",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging('info')])
|
||||
parser.add_argument('service', help="Nom du service")
|
||||
parser.add_argument('action', choices=service_actions,
|
||||
help="Action à effectuer")
|
||||
parser.add_argument("-f", "--force", action="store_true", default=False,
|
||||
help="Ne pas valider l'état de service")
|
||||
parser.add_argument("-s", "--silent", action="store_true", default=False,
|
||||
help="Ne pas affichier sur la console")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
if opts.silent:
|
||||
opts.log_level = 'error'
|
||||
|
||||
|
||||
return opts
|
||||
|
||||
def main():
|
||||
options = parse_cmdline()
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
display = 'console'
|
||||
if options.silent:
|
||||
display = 'log'
|
||||
if options.service == 'all':
|
||||
if options.action == 'restart':
|
||||
services('stop', display_title=False, try_restart_lxc=False)
|
||||
services('start', display_title=False, try_restart_lxc=False)
|
||||
else:
|
||||
services(options.action, display_title=False, try_restart_lxc=False)
|
||||
ret = True
|
||||
else:
|
||||
ret = manage_services(options.action, options.service,
|
||||
container=options.container, force=options.force,
|
||||
display=display)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
92
bin/CreoleSet
Executable file
92
bin/CreoleSet
Executable file
|
@ -0,0 +1,92 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
from sys import exit
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.ansiprint import print_red
|
||||
from pyeole.log import init_logging
|
||||
from creole.var_loader import convert_value
|
||||
from creole.loader import creole_loader, config_save_values
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Set Creole variable",
|
||||
parents=[scriptargs.logging()])
|
||||
parser.add_argument("--default", action="store_true", default=False,
|
||||
help=u"remettre à la valeur par défaut")
|
||||
parser.add_argument('variable', nargs=1,
|
||||
help=u"Nom de variable Creole")
|
||||
parser.add_argument('value', nargs='?',
|
||||
help=u"Valeur de la variable Creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
if options.default and options.value:
|
||||
print_red("En cas de remise à la valeur par défaut, il ne faut pas spécifier de valeur")
|
||||
exit(1)
|
||||
|
||||
if not options.default and options.value is None:
|
||||
print_red("Veuiller spécifier la valeur")
|
||||
exit(1)
|
||||
|
||||
def main():
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
config = creole_loader(rw=True, owner='creoleset', load_extra=True)
|
||||
var = options.variable[0]
|
||||
if '.' in var:
|
||||
if var.startswith('.'):
|
||||
var = var[1:]
|
||||
namespace = var.split('.')[0]
|
||||
else:
|
||||
namespace = 'creole'
|
||||
var = config.find_first(byname=var, type_='path',
|
||||
force_permissive=True)
|
||||
if options.default:
|
||||
homeconfig, name = config.cfgimpl_get_home_by_path(var)
|
||||
homeconfig.__delattr__(name)
|
||||
else:
|
||||
option = config.unwrap_from_path(var)
|
||||
value = options.value
|
||||
if option.impl_is_multi():
|
||||
values = []
|
||||
for val in value.split('\n'):
|
||||
values.append(convert_value(option, val))
|
||||
value = values
|
||||
else:
|
||||
value = convert_value(option, value)
|
||||
setattr(config, var, value)
|
||||
config_save_values(config, namespace)
|
||||
except PropertiesOptionError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red(u"Erreur de propriété : {0}".format(err))
|
||||
exit(1)
|
||||
except ValueError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Valeur invalide : {0}".format(err))
|
||||
exit(1)
|
||||
except AttributeError:
|
||||
if options.debug:
|
||||
log.debug("AttributeError", exc_info=True)
|
||||
print_red("Nom de variable inconnue : {0}".format(options.variable[0]))
|
||||
exit(1)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Erreur inconnue : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
454
bin/Maj-Auto
Executable file
454
bin/Maj-Auto
Executable file
|
@ -0,0 +1,454 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import atexit
|
||||
import time
|
||||
import locale
|
||||
|
||||
from os import unlink, environ, system
|
||||
from subprocess import Popen, PIPE
|
||||
from os.path import basename, isfile
|
||||
|
||||
from creole import reconfigure, fonctionseole
|
||||
from creole.client import CreoleClient, TimeoutCreoleClientError, NotFoundError, CreoleClientError
|
||||
from creole.error import UserExit, UserExitError
|
||||
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
|
||||
from pyeole.lock import acquire, release, is_locked
|
||||
from pyeole.log import init_logging, set_formatter
|
||||
from pyeole.ihm import question_ouinon, only_root, catch_signal
|
||||
from pyeole.encode import normalize
|
||||
|
||||
from pyeole.pkg import EolePkg, _configure_sources_mirror, report
|
||||
|
||||
from pyeole.diagnose import test_tcp
|
||||
from pyeole import scriptargs
|
||||
|
||||
from pyeole.i18n import i18n
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
#import logging
|
||||
|
||||
log = None
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# FIXME : refactorer le système de lock de zephir-client (ref #6660)
|
||||
from zephir.lib_zephir import lock, unlock
|
||||
zephir_libs = True
|
||||
except Exception:
|
||||
zephir_libs = False
|
||||
|
||||
def release_lock():
|
||||
if zephir_libs:
|
||||
unlock('maj')
|
||||
if is_locked('majauto', level='system'):
|
||||
release('majauto', level='system')
|
||||
|
||||
def user_exit(*args, **kwargs):
|
||||
"""
|
||||
sortie utilisateur "propre"
|
||||
"""
|
||||
log.warn(_(u'! Abandoning configuration !'))
|
||||
log.warn(_(u'System may be in an incoherent state.\n\n'))
|
||||
raise UserExitError()
|
||||
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(prog='Maj-Auto|Query-Auto',
|
||||
description=_(u"Manage EOLE server automatic update"),
|
||||
parents=[scriptargs.logging('info')],
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
action='store_true',
|
||||
help=_(u"run in dry-run mode (force to True when using Query-Auto)."))
|
||||
parser.add_argument('-f', '--force',
|
||||
action='store_true',
|
||||
help=_(u"bypass Zephir authorizations."))
|
||||
parser.add_argument('-F', '--force-update',
|
||||
action='store_true',
|
||||
help=_(u"update your server without any confirmation."))
|
||||
|
||||
parser.add_argument('-s', '--simulate',
|
||||
action='store_true',
|
||||
help=_(u"ask apt-get to simulate packages installation"))
|
||||
|
||||
# Level of upgrade
|
||||
maj_level = parser.add_mutually_exclusive_group()
|
||||
maj_level.add_argument('-C', '--candidat', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use testing packages."))
|
||||
maj_level.add_argument('-D', '--devel', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use development packages."))
|
||||
|
||||
parser.add_argument('--release',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
# Action when upgrade is OK
|
||||
parser.add_argument('-r', '--reconfigure',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade."))
|
||||
|
||||
parser.add_argument('-R', '--reboot',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade and reboot if necessary (implies -r)."))
|
||||
parser.add_argument('--download', action='store_true',
|
||||
help=_(u'only download packages in cache.'))
|
||||
# Mirror selection
|
||||
parser.add_argument('-S', '--eole-mirror',
|
||||
help=_(u"EOLE repository server."))
|
||||
parser.add_argument('-U', '--ubuntu-mirror',
|
||||
help=_(u"Ubuntu repository server."))
|
||||
parser.add_argument('-V', '--envole-mirror',
|
||||
help=_(u"Envole repository server."))
|
||||
parser.add_argument('-c', '--cdrom', action="store_true",
|
||||
help=_(u"use CDROM as source."))
|
||||
|
||||
# sortie EAD
|
||||
parser.add_argument('-W', action='store_true',
|
||||
help=_(u"specific output for EAD."))
|
||||
# mode sans creoled
|
||||
parser.add_argument('-i', '--ignore', action='store_true',
|
||||
help=_(u"ignore local configuration if creoled not responding."))
|
||||
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if getattr(opts, 'level', None) is None:
|
||||
opts.level = u'updates'
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
if opts.reboot:
|
||||
opts.reconfigure = True
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
global log
|
||||
opts = parse_cmdline()
|
||||
if opts.W:
|
||||
# variable set for pyeole.ansiprint
|
||||
environ['ModeTxt'] = 'yes'
|
||||
reporting = not (opts.dry_run or opts.simulate or opts.download)
|
||||
if not reporting:
|
||||
z_proc = 'QUERY-MAJ'
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level)
|
||||
else:
|
||||
z_proc = 'MAJ'
|
||||
report_file = '/var/lib/eole/reports/rapport-maj.log'
|
||||
if isfile(report_file):
|
||||
unlink(report_file)
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level,
|
||||
filename=report_file)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level,
|
||||
filename=report_file)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level,
|
||||
filename=report_file)
|
||||
set_formatter(log, u'file', u'brief')
|
||||
set_formatter(log, u'file', u'with-levelname-date')
|
||||
set_formatter(pkg_log, u'file', u'with-levelname-date')
|
||||
set_formatter(diag_log, u'file', u'with-levelname-date')
|
||||
report(2)
|
||||
locale.setlocale(locale.LC_TIME, "fr_FR.utf8")
|
||||
log.info(_(u'Update at {0}').format(time.strftime("%A %d %B %Y %H:%M:%S")))
|
||||
raised_err = None
|
||||
error_msg = None
|
||||
try:
|
||||
# gestion du ctrl+c
|
||||
catch_signal(user_exit)
|
||||
acquire('majauto', level='system')
|
||||
atexit.register(release_lock)
|
||||
client = CreoleClient()
|
||||
eole_level = 'stable'
|
||||
envole_level = 'stable'
|
||||
try:
|
||||
version = client.get_creole('eole_release')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
version = EOLE_RELEASE
|
||||
else:
|
||||
raise err
|
||||
if opts.candidat is not False:
|
||||
z_level = " en candidate"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.candidat) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde va en candidat
|
||||
eole_level = 'proposed'
|
||||
envole_level = 'proposed'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.candidat:
|
||||
eole_level = 'proposed'
|
||||
if 'envole' in opts.candidat:
|
||||
envole_level = 'proposed'
|
||||
elif opts.devel is not False:
|
||||
z_level = " en devel"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.devel) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde vas en candidat
|
||||
eole_level = 'unstable'
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.devel:
|
||||
eole_level = 'unstable'
|
||||
if 'envole' in opts.devel:
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
z_level = ""
|
||||
if opts.release:
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
new_release = opts.release.split('.')
|
||||
if len(new_release) != 3 or \
|
||||
u'.'.join(new_release[0:2]) != EOLE_VERSION or \
|
||||
int(new_release[2]) not in range(current_release+1, int(LAST_RELEASE) + 1):
|
||||
raise Exception(_('Unknown release number'))
|
||||
z_level += " en {0}".format(opts.release)
|
||||
version = opts.release
|
||||
if opts.cdrom:
|
||||
z_level += " via le CDROM"
|
||||
#distro = 'stable'
|
||||
fonctionseole.zephir("INIT", "Début{0}".format(z_level), z_proc)
|
||||
if zephir_libs and not fonctionseole.init_proc('MAJ'):
|
||||
if opts.force:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour forcée par l'utilisateur",
|
||||
z_proc)
|
||||
else:
|
||||
log.warn(_(u"Update is locked, please contact Zéphir administrator"))
|
||||
log.warn(_(u"Use -f option if you want to force execution"))
|
||||
raise UserExitError()
|
||||
lock('maj')
|
||||
PKGMGR = EolePkg('apt', ignore=opts.ignore)
|
||||
if opts.dry_run:
|
||||
PKGMGR.set_option('APT::Get::Simulate', 'true')
|
||||
|
||||
try:
|
||||
module = client.get_creole('eole_module')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
module = 'module'
|
||||
else:
|
||||
raise err
|
||||
try:
|
||||
uai = client.get_creole('numero_etab')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
uai = None
|
||||
else:
|
||||
raise err
|
||||
|
||||
head = "*** {0} {1}"
|
||||
if uai:
|
||||
head += " ({2})"
|
||||
head += " ***\n"
|
||||
|
||||
log.info(head.format(module, version, uai))
|
||||
|
||||
if not opts.force_update:
|
||||
raising_level = u''
|
||||
if opts.release:
|
||||
raising_level = _(u"(CHANGE RELEASE LEVEL)")
|
||||
elif u'unstable' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(UNSTABLE VERSION)")
|
||||
elif u'proposed' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(TESTING VERSION)")
|
||||
|
||||
if raising_level != u'':
|
||||
log.warn(_(u"{0} - Raising update level may prevent "
|
||||
u"lowering back to stable version.").format(raising_level))
|
||||
try:
|
||||
assert question_ouinon(_(u"Do you wish to proceed?")) == 'oui'
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour{0} forcée par l'utilisateur".format(z_level),
|
||||
z_proc)
|
||||
except (AssertionError, EOFError) as err:
|
||||
log.warn(_(u"Cancelling!"))
|
||||
raise UserExit()
|
||||
|
||||
PKGMGR.check()
|
||||
|
||||
#serveurs à utiliser pour les dépôts Ubuntu et EOLE
|
||||
_configure_sources_mirror(PKGMGR.pkgmgr, ubuntu=opts.ubuntu_mirror,
|
||||
eole=opts.eole_mirror, envole=opts.envole_mirror,
|
||||
ignore=opts.ignore, cdrom=opts.cdrom,
|
||||
release=version, eole_level=eole_level,
|
||||
envole_level=envole_level)
|
||||
|
||||
|
||||
PKGMGR.update(silent=True)
|
||||
upgrades = PKGMGR.get_upgradable_list()
|
||||
|
||||
install = 0
|
||||
upgrade = 0
|
||||
delete = 0
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
if isInstalled:
|
||||
if candidateVersion is None:
|
||||
delete += 1
|
||||
else:
|
||||
upgrade += 1
|
||||
else:
|
||||
install += 1
|
||||
|
||||
total_pkg = install+upgrade
|
||||
|
||||
headers = []
|
||||
if total_pkg == 0:
|
||||
log.info(_(u"Update successful."))
|
||||
log.info(_(u"Nothing to install."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"Aucun paquet à installer{0}".format(z_level),
|
||||
z_proc)
|
||||
if reporting:
|
||||
report(3)
|
||||
sys.exit(0)
|
||||
|
||||
headers.append(_(u"{0} new,", u"{0} news,", install).format(install))
|
||||
headers.append(_(u"{0} upgrade,", u"{0} upgrades,", upgrade).format(upgrade))
|
||||
headers.append(_(u"{0} delete", u"{0} deletes", delete).format(delete))
|
||||
log.info(' '.join(headers))
|
||||
|
||||
for line in PKGMGR.list_upgrade(upgrades=upgrades):
|
||||
log.info(line)
|
||||
|
||||
if opts.dry_run:
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets à mettre à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
sys.exit(0)
|
||||
|
||||
if opts.download:
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
pkgs = []
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
pkgs.append(name)
|
||||
PKGMGR.fetch_archives(container=container, packages=pkgs)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets téléchargés{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
elif opts.simulate:
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour (simulation){1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
else:
|
||||
PKGMGR.download_upgrade()
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
log.info(_(u"Update successful."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
if opts.release:
|
||||
ret_code = system('/usr/share/zephir/scripts/upgrade_distrib.py --auto')
|
||||
if ret_code != 0:
|
||||
error_msg = str('erreur à la mise à jour vers la release {0}'.format(opts.release))
|
||||
else:
|
||||
log.info(_('Upgrade post Maj-Release, please wait'))
|
||||
release('majauto', level='system')
|
||||
cmd = ['/usr/bin/Maj-Auto', '-F']
|
||||
process = Popen(cmd, stdin=PIPE, stderr=PIPE, stdout=PIPE, shell=False)
|
||||
ret_code = process.wait()
|
||||
if ret_code != 0:
|
||||
error_msg = str(_('error in post maj release'))
|
||||
if opts.reconfigure:
|
||||
# rechargement des modules python (#7832)
|
||||
# cf. http://code.activestate.com/recipes/81731-reloading-all-modules/
|
||||
if globals().has_key('init_modules'):
|
||||
for m in [x for x in sys.modules.keys() if x not in init_modules]:
|
||||
del(sys.modules[m])
|
||||
else:
|
||||
init_modules = sys.modules.keys()
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration automatique",
|
||||
z_proc)
|
||||
elif not opts.release:
|
||||
log.warn(_(u"At least one packages has been updated,"
|
||||
u" use command [reconfigure] to apply modifications."))
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration du serveur à planifier",
|
||||
z_proc)
|
||||
|
||||
except (UserExit, UserExitError) as err:
|
||||
if reporting:
|
||||
report(1, 'Stopped by user')
|
||||
fonctionseole.zephir("FIN", "Abandon par l'utilisateur", z_proc)
|
||||
sys.exit(1)
|
||||
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
clue = _(". If restarting creoled service does not help, try {} command with '-i' option.")
|
||||
error_msg = str(err) + clue.format('Query-Auto' if opts.dry_run else 'Maj-Auto')
|
||||
raised_err = err
|
||||
|
||||
except Exception as err:
|
||||
error_msg = str(err)
|
||||
raised_err = err
|
||||
else:
|
||||
if reporting:
|
||||
report(0, reconf=opts.reconfigure)
|
||||
|
||||
if error_msg is not None:
|
||||
fonctionseole.zephir("ERR", error_msg, z_proc, console=False)
|
||||
if reporting:
|
||||
if raised_err is not None:
|
||||
report(1, normalize(err))
|
||||
else:
|
||||
report(1, error_msg)
|
||||
if log is None:
|
||||
# No logger defined, error in argument parsing
|
||||
raise
|
||||
if opts.log_level == 'debug' and raised_err is not None:
|
||||
log.error(err, exc_info=True)
|
||||
else:
|
||||
log.error(error_msg)
|
||||
sys.exit(1)
|
||||
|
||||
if opts.reconfigure:
|
||||
try:
|
||||
reconfigure.main(force_options={'auto': opts.reboot, 'log_level': opts.log_level},
|
||||
force_args=[], need_lock=False)
|
||||
except Exception as err:
|
||||
fonctionseole.zephir("ERR", str(err), z_proc, console=False)
|
||||
if reporting:
|
||||
report(1, normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
3
bin/Maj-Cd
Executable file
3
bin/Maj-Cd
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --cdrom $@
|
116
bin/Maj-Release
Executable file
116
bin/Maj-Release
Executable file
|
@ -0,0 +1,116 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2015 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
from os import system
|
||||
from sys import exit
|
||||
import re
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
from pyeole.i18n import i18n
|
||||
from pyeole.ihm import print_red
|
||||
|
||||
import argparse
|
||||
from pyeole import scriptargs
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
description = _(u"This script will upgrade to a new release of this distribution")
|
||||
parser = argparse.ArgumentParser(prog='Maj-Release',
|
||||
description=description,
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
|
||||
parser.add_argument('--release', help=_(u"Target release number"))
|
||||
|
||||
parser.add_argument('-f', '--force', action='store_true',
|
||||
help=_(u"Do not ask confirmation"))
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
opts = parse_cmdline()
|
||||
|
||||
print(_(u"This script will upgrade to a new release of this distribution"))
|
||||
all_releases = []
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
choices = range(current_release+1, int(LAST_RELEASE)+1)
|
||||
# Last is firt displayed
|
||||
if choices == []:
|
||||
print_red(_(u"No stable new release available"))
|
||||
exit(1)
|
||||
choices.reverse()
|
||||
for release_suffix in choices:
|
||||
all_releases.append(EOLE_VERSION + '.' + str(release_suffix))
|
||||
|
||||
while True:
|
||||
if opts.release is not None:
|
||||
choice = opts.release
|
||||
else:
|
||||
for idx, release in enumerate(all_releases):
|
||||
print("{0}: {1}".format(idx+1, release))
|
||||
print(_(u"q|quit: abort"))
|
||||
|
||||
try:
|
||||
choice = raw_input("[1] : ")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
print_red(_("\nUpgrade aborted by user"))
|
||||
exit(0)
|
||||
|
||||
if choice == '':
|
||||
# User hit enter
|
||||
choice = 1
|
||||
elif choice in all_releases:
|
||||
# User entrer release number
|
||||
choice = all_releases.index(choice) + 1
|
||||
else:
|
||||
try:
|
||||
choice = int(choice)
|
||||
except ValueError:
|
||||
if re.match(r'^q(uit)?', choice):
|
||||
print_red(_(u"Voluntary stay of proceedings"))
|
||||
exit(0)
|
||||
else:
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
|
||||
if not 1 <= choice <= len(choices):
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
release = all_releases[choice - 1]
|
||||
if opts.force:
|
||||
force = '--force-update'
|
||||
else:
|
||||
force = ''
|
||||
|
||||
majrel = system('/usr/bin/Maj-Auto --release {0} {1}'.format(release, force))
|
||||
|
||||
exit(majrel)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
3
bin/Query-Auto
Executable file
3
bin/Query-Auto
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --dry-run $@
|
3
bin/Query-Cd
Executable file
3
bin/Query-Cd
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Cd --dry-run $@
|
7
bin/StartAll
Executable file
7
bin/StartAll
Executable file
|
@ -0,0 +1,7 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Stop all services
|
||||
CreoleService all stop
|
||||
|
||||
# Start only enabled ones
|
||||
CreoleService all start
|
5
bin/Upgrade-Auto
Executable file
5
bin/Upgrade-Auto
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "La commande Upgrade-Auto ne permet plus de changer de sous-version du serveur EOLE."
|
||||
echo "Merci d'utiliser la commande Maj-Release à la place."
|
||||
exit 1
|
65
bin/diagnose
Executable file
65
bin/diagnose
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# diagnose
|
||||
#
|
||||
# Verifie l'instanciation d'un serveur
|
||||
#
|
||||
###########################################################################
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
CREOLE_FILE="/etc/eole/config.eol"
|
||||
RELEASE_FILE="/etc/eole/release"
|
||||
DIAG_DIR="/usr/share/eole/diagnose"
|
||||
err_prefix="Diagnose impossible"
|
||||
|
||||
TestFile $CREOLE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit 1
|
||||
fi
|
||||
TestFile $RELEASE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit
|
||||
fi
|
||||
TestDir $DIAG_DIR
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : pas script diagnose disponible"
|
||||
exit 1
|
||||
fi
|
||||
TestCreoled
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : creoled est arrêté"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Opt=""
|
||||
while getopts "LWT" Option
|
||||
do
|
||||
case $Option in
|
||||
L ) export Verbose="yes";;
|
||||
W ) export ModeEad="yes";;
|
||||
T ) export ModeTxt="yes";;
|
||||
* ) exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
. $RELEASE_FILE
|
||||
DETAILS="$(CreoleGet nom_machine) $(CreoleGet numero_etab)"
|
||||
[ -z "$EOLE_RELEASE" ] && EOLE_RELEASE=$EOLE_VERSION
|
||||
EchoGras "*** Test du module $EOLE_MODULE version $EOLE_RELEASE ($DETAILS) ***"
|
||||
echo
|
||||
run-parts $DIAG_DIR
|
||||
EchoGras "*** FIN DU DIAGNOSTIC ***"
|
27
bin/gen_patch
Executable file
27
bin/gen_patch
Executable file
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
from os import listdir, system, chdir
|
||||
from os.path import isfile, join, basename
|
||||
from creole import config
|
||||
|
||||
modif_dir = basename(config.modif_dir)
|
||||
distrib_dir = basename(config.distrib_dir)
|
||||
patch_dir = basename(config.patch_dir)
|
||||
|
||||
system('clear')
|
||||
|
||||
# on travaille dans le répertoire eole
|
||||
chdir(config.eoleroot)
|
||||
|
||||
print "** Génération des patches à partir de %s **\n" % modif_dir
|
||||
for modfile in listdir(modif_dir):
|
||||
if modfile.endswith('~'):
|
||||
continue
|
||||
if not isfile(join(distrib_dir, modfile)):
|
||||
print "ATTENTION : le fichier original %s n'existe pas !" % join(distrib_dir, modfile)
|
||||
continue
|
||||
print "Génération du patch %s.patch" % modfile
|
||||
system("diff -uNr %s %s > %s.patch" % (join(distrib_dir,modfile), join(modif_dir,modfile), join(patch_dir,modfile)))
|
||||
|
||||
print "\n** Fin de la génération des patch **\n"
|
137
bin/gen_rpt
Executable file
137
bin/gen_rpt
Executable file
|
@ -0,0 +1,137 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# EOLE - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# gen_rpt
|
||||
#
|
||||
# Génère un rapport d'anomalie
|
||||
#
|
||||
###########################################################################
|
||||
|
||||
TestConf()
|
||||
{
|
||||
[ -e "$1" ] && return 0
|
||||
tput setaf 3
|
||||
echo "* Erreur $0 : le fichier de configuration $1 absent"
|
||||
echo "* Instanciez votre serveur"
|
||||
tput sgr0
|
||||
exit 1
|
||||
}
|
||||
|
||||
clear
|
||||
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
numero_etab=$(CreoleGet numero_etab)
|
||||
CONFIGEOL='/etc/eole/config.eol'
|
||||
EOLEDIRS="/usr/share/eole/creole/dicos"
|
||||
PATCHDIR="/usr/share/eole/creole/patch"
|
||||
TestConf $CONFIGEOL
|
||||
EOLERELEASE="/etc/eole/release"
|
||||
if [ ! -e $EOLERELEASE ]; then
|
||||
EchoRouge "Fichier $EOLERELEASE est introuvable"
|
||||
exit 1
|
||||
fi
|
||||
. $EOLERELEASE
|
||||
Module="${EOLE_MODULE}-${EOLE_VERSION}"
|
||||
echo "Récupération des informations ..."
|
||||
RepRpt="/tmp/GenRpt"
|
||||
rm -fr $RepRpt 2> /dev/null
|
||||
mkdir $RepRpt
|
||||
mkdir $RepRpt/log
|
||||
mkdir $RepRpt/eole
|
||||
mkdir $RepRpt/system
|
||||
Rpt=$RepRpt"/Rpt-"$Module"-"$numero_etab
|
||||
Mel="eole@ac-dijon.fr"
|
||||
|
||||
# les fichiers texte
|
||||
echo "Config.eol"
|
||||
/bin/cp -f $CONFIGEOL $RepRpt/eole
|
||||
echo "Diagnose"
|
||||
/usr/bin/diagnose -LT >> $RepRpt/diagnose.txt 2>&1
|
||||
echo Pstree
|
||||
pstree >> $RepRpt/system/pstree.txt 2>&1
|
||||
echo Lshw
|
||||
lshw >> $RepRpt/system/lshw.txt 2>&1
|
||||
echo Lsusb
|
||||
lsusb >> $RepRpt/system/lsusb.txt 2>&1
|
||||
echo Lspci
|
||||
lspci >> $RepRpt/system/lspci.txt 2>&1
|
||||
echo Iptables
|
||||
iptables -nvL > $RepRpt/system/iptables.txt 2>&1
|
||||
iptables -nvL -t nat >> $RepRpt/system/iptables.txt 2>&1
|
||||
echo History
|
||||
grep -v "^#" /root/.bash_history > $RepRpt/system/history.txt
|
||||
echo Paquets
|
||||
dpkg-query -W > $RepRpt/system/packages.txt 2>&1
|
||||
# les gz
|
||||
echo Syslog
|
||||
for log in rsyslog su sudo kernel cron auth chpasswd exim ; do
|
||||
[ -d /var/log/rsyslog/local/$log ] && gzip -rc /var/log/rsyslog/local/$log > $RepRpt/log/$log.gz
|
||||
done
|
||||
echo Dmesg
|
||||
dmesg > $RepRpt/log/dmesg.log 2>&1
|
||||
gzip $RepRpt/log/dmesg.log
|
||||
echo Creole.log
|
||||
gzip -c /var/log/reconfigure.log > $RepRpt/log/reconfigure.log.gz
|
||||
echo Dicos
|
||||
gzip -rc $EOLEDIRS > $RepRpt/eole/dicos.gz
|
||||
echo Patch
|
||||
gzip -rc $PATCHDIR > $RepRpt/eole/patch.gz
|
||||
echo Stats
|
||||
gzip -rc /usr/share/zephir/monitor/stats > $RepRpt/stats.gz
|
||||
|
||||
# spécifique Scribe
|
||||
if [ -f /var/www/ead/extraction/tmp/rapport.txt ];then
|
||||
echo "Rapport d'extraction"
|
||||
gzip -rc /var/www/ead/extraction/tmp/rapport.txt > $RepRpt/log/extraction.log.gz
|
||||
fi
|
||||
if [ -f /var/log/controle-vnc/main.log ];then
|
||||
echo 'Log client scribe'
|
||||
gzip -rc /var/log/controle-vnc/main.log > $RepRpt/log/controle-vnc.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Scribe/Horus/Eclair
|
||||
if [ -d /var/lib/eole/reports ];then
|
||||
echo "Rapport (sauvegarde/maj/...)"
|
||||
gzip -rc /var/lib/eole/reports > $RepRpt/log/rapport.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Amon
|
||||
if [ -f '/usr/share/eole/test-rvp' ];then
|
||||
echo 'Rvp'
|
||||
/usr/sbin/ipsec status &> $RepRpt/ipsec.status 2>&1
|
||||
fi
|
||||
|
||||
# Rapport debsums
|
||||
if [ -x '/usr/share/eole/debsums/show-reports.py' ]; then
|
||||
echo "Rapport debsums"
|
||||
/usr/share/eole/debsums/show-reports.py > ${RepRpt}/log/rapport-debsums.log 2>&1
|
||||
fi
|
||||
|
||||
echo
|
||||
Archive=$Module-$numero_etab".tar.gz"
|
||||
echo "Création de l'archive locale $Archive"
|
||||
tar -C /tmp -czf $Archive GenRpt
|
||||
echo
|
||||
|
||||
Question_ouinon "Envoyer l'archive par email ?"
|
||||
if [ $? -eq 1 ];then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Destinataire du message : "
|
||||
echo -n "[$Mel] : "
|
||||
read mail
|
||||
if [ "$mail" == "" ];then
|
||||
mail=$Mel
|
||||
fi
|
||||
echo -n "Commentaire : "
|
||||
read comment
|
||||
echo "$comment"|mutt -a $Archive -s "Rapport $Module de $numero_etab" -c $mail -e "set copy=no"
|
24
bin/instance
Executable file
24
bin/instance
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# Force interactive mode
|
||||
main(force_options={'interactive': True})
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
173
bin/manage-eole
Executable file
173
bin/manage-eole
Executable file
|
@ -0,0 +1,173 @@
|
|||
#!/bin/bash
|
||||
##########################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# Gestion des modules en mode dialogue
|
||||
# avec les comptes système eole, eole2
|
||||
#
|
||||
##########################################################
|
||||
|
||||
# un seul manage ?
|
||||
pmanage=`pgrep manage-eole`
|
||||
nbmanage=`echo $pmanage | wc -w`
|
||||
|
||||
# fichiers temporaires
|
||||
temp="/tmp/InBox-Eol-"
|
||||
tempfile="$temp$$"
|
||||
|
||||
TitreGen="Eole - Gestion du Serveur"
|
||||
|
||||
##########################################################
|
||||
# Fonctions reprises de FonctionsEole
|
||||
##########################################################
|
||||
|
||||
MenuBox()
|
||||
{
|
||||
#${1="Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
NBlignes=${NBlignes=5}
|
||||
Menu="$3"
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--menu "$1" 16 50 $NBlignes \
|
||||
$Menu 2> $tempfile
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="`cat $tempfile`";;
|
||||
1) # Cancel
|
||||
eval $2="CANCEL";;
|
||||
255) # ESC
|
||||
if test -s $tempfile ;
|
||||
then
|
||||
eval $2=`cat $tempfile`
|
||||
else
|
||||
eval $2="ESC"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
OkBox()
|
||||
{
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --cancel-label Abandon\
|
||||
--msgbox "$1" 0 0
|
||||
}
|
||||
|
||||
QuestionBox()
|
||||
{
|
||||
#${1=Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--yesno "$1" 16 50
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="OUI";;
|
||||
1) # Cancel
|
||||
eval $2="NON";;
|
||||
255) # ESC
|
||||
eval $2="ESC" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
Entree(){
|
||||
echo
|
||||
echo "Tapez <Entrée>"
|
||||
read Bidon
|
||||
}
|
||||
|
||||
CleanExit(){
|
||||
echo "Au revoir !"
|
||||
rm -f $tempfile
|
||||
exit $1
|
||||
}
|
||||
|
||||
##########################################################
|
||||
# Programme principal
|
||||
##########################################################
|
||||
|
||||
if [ $nbmanage -gt 1 ]
|
||||
then
|
||||
MenuBox "D'autres instances de manage-eole ont été détectées" Rep "1 Quitter_sans_tuer 2 Quitter_et_tuer"
|
||||
rm -f "$temp*"
|
||||
if [ "$Rep" == "2" ]
|
||||
then
|
||||
for pid in $pmanage
|
||||
do
|
||||
kill -9 $pid
|
||||
done
|
||||
fi
|
||||
CleanExit 0
|
||||
fi
|
||||
|
||||
OkBox "Administration EOLE\n\nPour Vous Deplacer sur l'Ecran\nUtiliser votre Souris\nOu la touche tabulation.\n\n"
|
||||
|
||||
Rep=""
|
||||
while [ 1 ]
|
||||
do
|
||||
# FIXME/TODO : ajouter des entrées de menu !
|
||||
MenuBox "Votre Choix" Rep "1 Diagnostic 2 Reconfiguration 3 Paquets_en_Maj 4 Mise_A_Jour 8 Redemarrer_Serveur 9 Arret_Serveur ! Shell_Linux Q Quitter"
|
||||
|
||||
if [ "$Rep" == "CANCEL" ]
|
||||
then
|
||||
CleanExit 1
|
||||
fi
|
||||
|
||||
case $Rep in
|
||||
1)
|
||||
echo "En cours ..."
|
||||
sudo /usr/bin/diagnose
|
||||
Entree
|
||||
;;
|
||||
2)
|
||||
sudo /usr/bin/reconfigure
|
||||
Entree
|
||||
;;
|
||||
3)
|
||||
sudo /usr/bin/Query-Auto
|
||||
Entree
|
||||
;;
|
||||
4)
|
||||
sudo /usr/bin/Maj-Auto
|
||||
Entree
|
||||
;;
|
||||
# TODO : pouvoir inclure des entrées venant d'ailleurs ;)
|
||||
#5)
|
||||
#sudo /usr/share/eole/Maj-blacklist.sh
|
||||
#Entree
|
||||
#;;
|
||||
8)
|
||||
QuestionBox "Vous avez demandé le redémarrage du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/reboot
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
9)
|
||||
QuestionBox "Vous avez demandé un arret total du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/halt -p
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
!)
|
||||
echo "\"exit\" ou \"Ctrl + d\" pour revenir au Menu"
|
||||
/bin/bash
|
||||
;;
|
||||
Q)
|
||||
CleanExit 0
|
||||
;;
|
||||
|
||||
esac
|
||||
done
|
22
bin/reconfigure
Executable file
22
bin/reconfigure
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
main()
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
15
creole.mk
Normal file
15
creole.mk
Normal file
|
@ -0,0 +1,15 @@
|
|||
# creole specific rules
|
||||
|
||||
schedule_PROG_DIR := $(eole_DIR)/schedule
|
||||
upgrade_REC_DIR := $(eole_DIR)/upgrade
|
||||
bin_PROG_DIR := $(DESTDIR)/usr/bin
|
||||
sbin_PROG_DIR := $(DESTDIR)/usr/sbin
|
||||
data_REC_DIR := $(DESTDIR)/usr/share/creole
|
||||
fr.man8_DATA_DIR := $(DESTDIR)/usr/share/man/fr.UTF-8/man8
|
||||
en.man8_DATA_DIR := $(DESTDIR)/usr/share/man/man8
|
||||
motd_PROG_DIR := $(DESTDIR)/etc/update-motd.d
|
||||
local_DATA_DIR := $(DESTDIR)/usr/share/eole/creole/dicos/local
|
||||
|
||||
install-files::
|
||||
# To inform user about coding changes
|
||||
$(INSTALL_DATA) deprecated/FonctionsEoleNg $(eole_DIR)
|
0
creole/__init__.py
Normal file
0
creole/__init__.py
Normal file
1807
creole/annotator.py
Normal file
1807
creole/annotator.py
Normal file
File diff suppressed because it is too large
Load diff
637
creole/cert.py
Normal file
637
creole/cert.py
Normal file
|
@ -0,0 +1,637 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# libsecure.py
|
||||
#
|
||||
# classes utilitaires pour lancement des services en https
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
points d'entrée de l'api
|
||||
|
||||
- gen_certif -> génère **un** certif
|
||||
- gen_certs -> génère tous les certifs
|
||||
|
||||
cf creole/doc/certifs.txt
|
||||
|
||||
"""
|
||||
# certains imports sont utilisés dans les fragments de code installés
|
||||
# dans /usr/share/eole/certs
|
||||
from os.path import join, splitext, basename, dirname, isdir, isfile, islink, exists, realpath
|
||||
from os import unlink, symlink, stat
|
||||
import os, glob, time
|
||||
from shutil import copy
|
||||
from subprocess import Popen, PIPE
|
||||
from OpenSSL import SSL
|
||||
import re
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# chemin du certificat eole par défaut
|
||||
from .config import cert_file, key_file, SSL_LAST_FILE
|
||||
from .client import CreoleClient
|
||||
from pyeole.process import system_out, system_code
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
global regexp_get_subject
|
||||
regexp_get_subject = None
|
||||
|
||||
def prep_dir() :
|
||||
"""
|
||||
Création de l'arborescence pour openssl
|
||||
"""
|
||||
#on génère le random
|
||||
load_default_conf_if_needed()
|
||||
rand_file = os.path.join(ssl_dir, ".rand")
|
||||
if not os.path.isfile(rand_file) :
|
||||
cmd_random = "/bin/dd if=/dev/urandom of=%s bs=1k count=16 >/dev/null 2>&1" % (rand_file)
|
||||
cmd = Popen(cmd_random, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u"! Error while generating entropy file !"))
|
||||
#on crée les fichiers pour gerer la pki
|
||||
file_serial = os.path.join(ssl_dir, "serial")
|
||||
if not os.path.isfile(file_serial) :
|
||||
f = file(file_serial, "w")
|
||||
f.write(str(start_index))
|
||||
f.close()
|
||||
file_index = os.path.join(ssl_dir, "index.txt")
|
||||
if not os.path.isfile(file_index) :
|
||||
f = file(file_index, "w")
|
||||
f.close()
|
||||
newcerts = os.path.join(ssl_dir, "newcerts")
|
||||
if not os.path.isdir(newcerts):
|
||||
os.makedirs(newcerts)
|
||||
if not os.path.isdir(key_dir):
|
||||
os.makedirs(key_dir)
|
||||
if not os.path.isdir(cert_dir):
|
||||
os.makedirs(cert_dir)
|
||||
if not os.path.isdir(req_dir):
|
||||
os.makedirs(req_dir)
|
||||
if not os.path.isdir(local_ca_dir):
|
||||
os.makedirs(local_ca_dir)
|
||||
##cmd = Popen("chmod 611 %s" % (key_dir), shell=True)
|
||||
dhfile = os.path.join(ssl_dir, "dh")
|
||||
if not os.path.isfile(dhfile):
|
||||
gen_dh = '/usr/bin/openssl dhparam -out "%s" 1024 >/dev/null 2>&1' % (dhfile)
|
||||
Popen(gen_dh, shell=True)
|
||||
|
||||
def sup_passwd(tmp_keyfile, keyfile) :
|
||||
"""
|
||||
Supression de la passphrase sur la clef privée
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
key_cmd = '/usr/bin/openssl rsa -in "%s" -passin pass:secret -out "%s" >/dev/null 2>&1' % (tmp_keyfile, keyfile)
|
||||
cmd = Popen(key_cmd, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u'! Error while generating ssl key in {0} !').format(keyfile))
|
||||
|
||||
def finalise_cert (certfile, keyfile, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod=''):
|
||||
"""
|
||||
Finalisation du certif
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if key_user != '':
|
||||
try:
|
||||
res = Popen("chown %s %s" % (key_user, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_grp != '':
|
||||
try:
|
||||
res=Popen("/bin/chgrp %s %s" % (key_grp, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (key_chmod, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if cert_user != '':
|
||||
try:
|
||||
res = Popen("/bin/chown %s %s" % (cert_user, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_grp != '':
|
||||
try:
|
||||
res = Popen("/bin/chgrp %s %s" % (cert_grp, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (cert_chmod, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_simple_cert(cert_file):
|
||||
"""
|
||||
Teste si le fichier contient un simple certificat ou une chaîne.
|
||||
:param cert_file: chemin du fichier à tester
|
||||
:type cert_file: str
|
||||
"""
|
||||
with open(cert_file, 'r') as pem:
|
||||
cert_num = len(re.findall(r'-+BEGIN CERTIFICATE-+', pem.read()))
|
||||
return cert_num == 1
|
||||
|
||||
def get_certs_catalog(simple=True):
|
||||
"""
|
||||
Créer un dictionnaire des certificats présents
|
||||
pour accélérer la reconstitution de la chaîne
|
||||
de certificats intermédiaires.
|
||||
:param simple: filtre sur les certificats à référencer
|
||||
:type simple: booléen
|
||||
"""
|
||||
global certs_catalog
|
||||
certs_catalog = {}
|
||||
for cert_file in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if simple and is_simple_cert(cert_file):
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
elif not simple:
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
except:
|
||||
continue
|
||||
return certs_catalog
|
||||
|
||||
|
||||
def get_certs_chain(certs):
|
||||
"""
|
||||
Récupération de la chaîne de certificats
|
||||
:param certs: liste des certificats dans l'ordre de la chaîne.
|
||||
:type certs: liste de chemins
|
||||
"""
|
||||
global certs_catalog, ca_issuer
|
||||
load_default_conf_if_needed()
|
||||
subject = get_subject(certfile=certs[-1])
|
||||
issuer = get_issuer_subject(certfile=certs[-1])
|
||||
if ca_issuer is None:
|
||||
ca_issuer = get_issuer_subject(certfile=ca_file)
|
||||
if subject == issuer:
|
||||
pass
|
||||
elif issuer == ca_issuer:
|
||||
certs.append(ca_file)
|
||||
else:
|
||||
try:
|
||||
if certs_catalog is None:
|
||||
certs_catalog = get_certs_catalog()
|
||||
certs.append(certs_catalog[issuer])
|
||||
get_certs_chain(certs)
|
||||
except KeyError as e:
|
||||
print _(u"Certificate chain incomplete.")
|
||||
return certs
|
||||
|
||||
|
||||
def get_intermediate_certs(cert):
|
||||
"""
|
||||
Récupération de la liste des certificats intermédiaires.
|
||||
:param cert: chemin du certificat pour lequel on reconstitue la chaîne
|
||||
:type cert:
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
try:
|
||||
chain = get_certs_chain([cert,])[1:-1]
|
||||
except:
|
||||
chain = []
|
||||
return chain
|
||||
|
||||
|
||||
def concat_fic(dst_fic, in_fics, overwrite=False, need_link=True):
|
||||
"""
|
||||
Concaténation d'une liste de fichiers dans un fichier de destination
|
||||
(le contenu d'origine est conservé)
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if need_link:
|
||||
remove_link(dst_fic)
|
||||
if type(in_fics) != list:
|
||||
in_fics = [in_fics]
|
||||
for fic in in_fics:
|
||||
if not os.path.isfile(fic):
|
||||
print _(u"Error: file {0} does not exist").format(fic)
|
||||
data = ""
|
||||
for fic_src in in_fics:
|
||||
f_src = file(fic_src)
|
||||
data += f_src.read().rstrip() + '\n'
|
||||
f_src.close()
|
||||
if overwrite:
|
||||
f_dst = file(dst_fic, "w")
|
||||
else:
|
||||
f_dst = file(dst_fic, "a+")
|
||||
f_dst.write(data)
|
||||
f_dst.close()
|
||||
if need_link:
|
||||
build_link(dst_fic, in_fics)
|
||||
|
||||
def gen_certs(regen=False, merge=True):
|
||||
"""
|
||||
Génère la ca puis les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
verif_ca()
|
||||
ca_generated = gen_ca(regen)
|
||||
if merge:
|
||||
merge_ca()
|
||||
if ca_generated:
|
||||
regen = True
|
||||
certif_loader(regen=regen)
|
||||
|
||||
def verif_ca():
|
||||
"""
|
||||
vérifie que la ca est générée correctement (serial > 0xstart_index) et cn valide
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# gestion des anciennes version de ca.crt
|
||||
if os.path.isfile(ca_dest_file) and not os.path.isfile(ca_file):
|
||||
# on reprend le premier certificat présent dans ca.crt dans ca_local.crt
|
||||
ca_certs = open(ca_dest_file).read().strip()
|
||||
tag_begin = '-----BEGIN CERTIFICATE-----'
|
||||
try:
|
||||
ca_data = tag_begin + ca_certs.split(tag_begin)[1]
|
||||
local_ca = open(ca_file, 'w')
|
||||
local_ca.write(ca_data)
|
||||
local_ca.close()
|
||||
except IndexError:
|
||||
# impossible de reprendre la ca actuelle, elle sera regénérée
|
||||
pass
|
||||
serial = int(eval('0x%s'%start_index))
|
||||
# vérification de la valeur actuelle du ca
|
||||
# vérification du cn de la ca
|
||||
if os.path.isfile(ca_file):
|
||||
cmd = Popen(['/usr/bin/openssl', 'x509', '-in', ca_file, '-subject', '-noout'], stdout=PIPE)
|
||||
if cmd.wait() != 0:
|
||||
unlink(ca_file)
|
||||
prep_dir()
|
||||
if os.path.isfile(file_serial):
|
||||
serial = open(file_serial).read().strip()
|
||||
# conversion en hexa
|
||||
serial = int(serial, 16)
|
||||
if serial < min_serial:
|
||||
if os.path.isfile(ca_file):
|
||||
unlink(ca_file)
|
||||
unlink(file_serial)
|
||||
for f_index in glob.glob(os.path.join(ssl_dir, 'index*')):
|
||||
unlink(f_index)
|
||||
for f_cert in glob.glob(os.path.join(newcerts_dir, '*.pem')):
|
||||
unlink(f_cert)
|
||||
prep_dir()
|
||||
|
||||
def gen_ca(regen=False, del_passwd=True, extensions="SERVEUR"):
|
||||
"""
|
||||
Generation ca
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
generated = False
|
||||
prep_dir()
|
||||
if not os.path.isfile(ca_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(ca_conf_file))
|
||||
if regen or (not os.path.isfile(ca_keyfile)) or (not os.path.isfile(ca_file)):
|
||||
print("* " + _(u"Generating CA certificate"))
|
||||
remove_link(ca_file)
|
||||
## On genère le certif de l'ac
|
||||
ca_gen = '/usr/bin/openssl req -x509 -config %s -newkey rsa:%s -days %s -keyout "%s" -out "%s" -extensions %s >/dev/null 2>&1' % (ca_conf_file, ssl_default_key_bits, ssl_default_cert_time, tmp_keyfile, ca_file, extensions)
|
||||
cmd = Popen(ca_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CA"))
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, ca_keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
generated = True
|
||||
## application des droits
|
||||
finalise_cert(ca_file, ca_keyfile, key_chmod='600')
|
||||
build_link(ca_file)
|
||||
## génération d'une crl
|
||||
if not os.path.isfile(os.path.join(ssl_dir, 'eole.crl')):
|
||||
print(_(u"Generating certificate revocation list (CRL)"))
|
||||
crl_gen = '/usr/bin/openssl ca -gencrl -config %s -crldays %s -out %s/eole.crl >/dev/null 2>&1' % (ca_conf_file, ssl_default_cert_time, ssl_dir)
|
||||
cmd = Popen(crl_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CRL ({0}/eole.crl)").format(ssl_dir))
|
||||
return generated
|
||||
|
||||
def merge_ca():
|
||||
"""
|
||||
concatène toutes les ca utiles dans ca.crt
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
## concaténation des certificats education
|
||||
ca_list = [ca_file, os.path.join(cert_dir, 'ACInfraEducation.pem')]
|
||||
## concaténation de certificats supplémentaires si définis
|
||||
for ca_perso in glob.glob(os.path.join(local_ca_dir,'*.*')):
|
||||
if os.path.isfile(ca_perso):
|
||||
ca_list.append(ca_perso)
|
||||
concat_fic(ca_dest_file, ca_list, True, False)
|
||||
|
||||
def gen_certif(certfile, keyfile=None, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod='', regen=False, copy_key=False,
|
||||
del_passwd=True, signe_req=True, container=None, client_cert=False,
|
||||
cert_conf_file=None):
|
||||
"""
|
||||
Génération des requêtes de certificats et signature par la CA
|
||||
"""
|
||||
if not cert_conf_file:
|
||||
if client_cert:
|
||||
cert_conf_file = client_conf_file
|
||||
else:
|
||||
cert_conf_file = conf_file
|
||||
load_default_conf_if_needed()
|
||||
if not os.path.isfile(cert_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(cert_conf_file))
|
||||
|
||||
basefile = os.path.splitext(certfile)[0]
|
||||
if keyfile is None:
|
||||
keyfile = "%s.key" % (basefile)
|
||||
|
||||
if container != None:
|
||||
cpath = client.get_container(name=container)['path']
|
||||
certfile = cpath + certfile
|
||||
keyfile = cpath + keyfile
|
||||
|
||||
if regen or not os.path.isfile(certfile) or not os.path.isfile(keyfile):
|
||||
|
||||
remove_link(certfile)
|
||||
if not isdir(dirname(certfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(certfile)))
|
||||
if not isdir(dirname(keyfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(keyfile)))
|
||||
|
||||
# certificat absent ou regénération demandée
|
||||
fic_p10 = os.path.join(req_dir, "%s.p10" % (os.path.basename(basefile)))
|
||||
# génération de la requête de certificat x509 et d'un simili certificat auto-signé
|
||||
if exists(keyfile):
|
||||
gen_req = '/usr/bin/openssl req -new -key "%s" -days %s -config %s -out "%s" >/dev/null 2>&1' % (
|
||||
keyfile, ssl_default_cert_time, cert_conf_file, fic_p10)
|
||||
new_key = False
|
||||
else:
|
||||
gen_req = '/usr/bin/openssl req -new -newkey rsa:%s -days %s -config %s -keyout "%s" -out "%s" >/dev/null 2>&1' % (
|
||||
ssl_default_key_bits, ssl_default_cert_time, cert_conf_file, tmp_keyfile, fic_p10)
|
||||
new_key = True
|
||||
cmd = Popen(gen_req, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while generating certificate request {0} !').format(fic_p10))
|
||||
if new_key:
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, keyfile)
|
||||
else:
|
||||
copy(tmp_keyfile, keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
if signe_req:
|
||||
# on signe la requête
|
||||
ca_signe = '/usr/bin/openssl ca -in "%s" -config %s -out "%s" -batch -notext >/dev/null 2>&1' % (fic_p10, cert_conf_file, certfile)
|
||||
cmd = Popen(ca_signe, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while signing certificate request {0} !') % fic_p10)
|
||||
print(_(u"* Certificate {0} successfully generated").format(certfile))
|
||||
if copy_key:
|
||||
concat_fic(certfile, [keyfile], need_link=False)
|
||||
finalise_cert(certfile, keyfile, key_user=key_user,
|
||||
key_grp=key_grp, key_chmod=key_chmod,
|
||||
cert_user=cert_user, cert_grp=cert_grp,
|
||||
cert_chmod=cert_chmod)
|
||||
build_link(certfile)
|
||||
|
||||
|
||||
def remove_link(name, remove_broken_link=True):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if islink(cert_link):
|
||||
if remove_broken_link and not exists(cert_link):
|
||||
#print 'ok lien cassé pour {} donc supprimé'.format(cert_link)
|
||||
unlink(cert_link)
|
||||
elif str(name) == realpath(cert_link):
|
||||
#print 'ok suppression lien {} comme demandé ({})'.format(cert_link, name)
|
||||
unlink(cert_link)
|
||||
|
||||
|
||||
def build_link(name, concats=[]):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
def _check_contats_link(link):
|
||||
# supprimer tous les liens vers les fichiers utilises pour la concatenation
|
||||
if islink(link):
|
||||
if realpath(link) in concats:
|
||||
#print 'ok suppression du link {} ({} est dans {})'.format(link, realpath(link), concats)
|
||||
unlink(link)
|
||||
|
||||
def _check_link(fp, suffix):
|
||||
# calcul du bon suffix utilise dans le nom
|
||||
# si le fichier existe avec le suffix courant, ajoute 1 au numero de suffix
|
||||
new_name = join(dir_name, fp) + '.' + str(suffix)
|
||||
if islink(new_name):
|
||||
#print 'pas de suppression du link {} ({} n\'est pas dans {})'.format(new_name, realpath(new_name), concats)
|
||||
return _check_link(fp, suffix + 1)
|
||||
#else:
|
||||
# print "ok ce n'est pas un link {}".format(new_name)
|
||||
return new_name
|
||||
|
||||
def _build_link(ret):
|
||||
# creer un lien a partir du hash du subject
|
||||
if ret != '':
|
||||
fp = ret.split('\n')[0]
|
||||
if fp.isalnum():
|
||||
if concats != []:
|
||||
for link in glob.glob(join(dir_name, fp) + '.*'):
|
||||
_check_contats_link(link)
|
||||
|
||||
new_name = _check_link(fp, 0)
|
||||
#print 'ok creation du link {} vers {}'.format(new_name, name)
|
||||
symlink(name, new_name)
|
||||
return stat(new_name).st_mtime
|
||||
return 0
|
||||
|
||||
dir_name = dirname(name)
|
||||
subject_fp = ["/usr/bin/openssl", "x509", "-subject_hash", "-fingerprint", "-noout", "-in", name]
|
||||
subject_fp_old = ["/usr/bin/openssl", "x509", "-subject_hash_old", "-fingerprint", "-noout", "-in", name]
|
||||
new_timestamp = _build_link(system_out(subject_fp)[1])
|
||||
new_timestamp = max(_build_link(system_out(subject_fp_old)[1]), new_timestamp)
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = float(fh.read().strip())
|
||||
except ValueError:
|
||||
timestamp = 0
|
||||
if new_timestamp > timestamp:
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
def rehash_if_needed():
|
||||
load_default_conf_if_needed()
|
||||
need_rehash = False
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = int(float(fh.read().strip()))
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if timestamp < int(stat(cert_link).st_mtime):
|
||||
need_rehash = True
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except ValueError:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
need_rehash = True
|
||||
else:
|
||||
need_rehash = True
|
||||
|
||||
if need_rehash:
|
||||
system_code(['/usr/bin/c_rehash'])
|
||||
new_timestamp = 0
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if isfile(cert_link):
|
||||
timestamp = stat(cert_link).st_mtime
|
||||
if timestamp > new_timestamp:
|
||||
new_timestamp = timestamp
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
# gen_certif utils reader
|
||||
|
||||
def certif_loader(regen=None):
|
||||
"""charge les fichiers permettant de générer les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# XXX FIXME : changer le path de data vers les paquets container,
|
||||
# XXX FIXME et déplacer les .gen_cert
|
||||
files = glob.glob(join('/usr/share/eole/certs', '*_*.gen_cert'))
|
||||
files.sort()
|
||||
for fname in files:
|
||||
# puts name in global namespace because we need it in execfile's
|
||||
# namespace in rules_loader
|
||||
name = splitext(basename(fname))[0].split('_')[1]
|
||||
# exec gen_certs
|
||||
execfile(fname, globals(),locals())
|
||||
|
||||
def get_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
global regexp_get_subject
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-subject', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-subject', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("subject= "):
|
||||
raise Exception(_(u'Invalid certificate subject: {0} ').format(ret))
|
||||
if regexp_get_subject is None:
|
||||
regexp_get_subject = re.compile('^subject= (.*)/CN=(.*)')
|
||||
return regexp_get_subject.findall(ret)[0]
|
||||
|
||||
def get_issuer_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject de la CA d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-issuer', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-issuer', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("issuer= "):
|
||||
raise Exception(_(u'Invalid certificate issuer: {0} ').format(ret))
|
||||
regexp = '^issuer= (.*)/CN=(.*)'
|
||||
return re.findall(regexp, ret)[0]
|
||||
|
||||
def load_conf(ssl_dico):
|
||||
global ssl_dir, cert_dir, key_dir, tmp_keyfile, file_serial, req_dir
|
||||
global local_ca_dir, newcerts_dir, ca_conf_file, conf_file, client_conf_file
|
||||
global ca_file, ca_dest_file, ca_keyfile, start_index, min_serial
|
||||
global ssl_default_key_bits, ssl_default_cert_time
|
||||
global certs_catalog
|
||||
|
||||
ssl_dir = ssl_dico.get('ssl_dir', ssl_dir)
|
||||
cert_dir = ssl_dico.get('cert_dir', os.path.join(ssl_dir, "certs"))
|
||||
key_dir = ssl_dico.get('key_dir', os.path.join(ssl_dir, "private"))
|
||||
tmp_keyfile = ssl_dico.get('tmp_keyfile', os.path.join(key_dir, "tmpkey.key"))
|
||||
file_serial = ssl_dico.get('file_serial', os.path.join(ssl_dir, "serial"))
|
||||
req_dir = ssl_dico.get('req_dir', os.path.join(ssl_dir, "req"))
|
||||
local_ca_dir = ssl_dico.get('local_ca_dir', os.path.join(ssl_dir, "local_ca"))
|
||||
newcerts_dir = ssl_dico.get('newcerts_dir', os.path.join(ssl_dir, "newcerts"))
|
||||
ca_conf_file = ssl_dico.get('ca_conf_file', ca_conf_file)
|
||||
conf_file = ssl_dico.get('conf_file', conf_file)
|
||||
client_conf_file = ssl_dico.get('client_conf_file', conf_file)
|
||||
# chemin de la CA
|
||||
ca_file = ssl_dico.get('ca_file', os.path.join(cert_dir, "ca_local.crt"))
|
||||
ca_dest_file = ssl_dico.get('ca_dest_file', os.path.join(cert_dir, "ca.crt"))
|
||||
ca_keyfile = ssl_dico.get('ca_keyfile', os.path.join(key_dir, "ca.key"))
|
||||
# index
|
||||
start_index = ssl_dico.get('start_index', hex(int(time.time()))[2:])
|
||||
min_serial = int(eval('0x30'))
|
||||
ssl_default_key_bits = ssl_dico.get('ssl_default_key_bits', client.get_creole('ssl_default_key_bits', 2048))
|
||||
ssl_default_cert_time = ssl_dico.get('ssl_default_cert_time', client.get_creole('ssl_default_cert_time', 1096))
|
||||
|
||||
def load_default_conf_if_needed():
|
||||
"""creoled n'est pas forcement démarré à ce moment là
|
||||
ne charger la configuration par défaut qu'à l'utilisation de la lib
|
||||
et non a l'importantion
|
||||
#8448
|
||||
"""
|
||||
global ssl_dir
|
||||
if ssl_dir == None:
|
||||
load_conf({'ssl_dir': '/etc/ssl',
|
||||
'ca_conf_file': '/etc/eole/ssl/ca-eole.conf',
|
||||
'conf_file': '/etc/eole/ssl/certif-eole.conf',
|
||||
'client_conf_file': '/etc/eole/ssl/client-eole.conf'})
|
||||
|
||||
ssl_dir=None
|
||||
ca_conf_file=None
|
||||
client_conf_file=None
|
||||
conf_file=None
|
||||
certs_catalog = None
|
||||
ca_issuer = None
|
838
creole/client.py
Normal file
838
creole/client.py
Normal file
|
@ -0,0 +1,838 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.client - client to request creole.server through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Request informations from :class:`creole.CreoleServer`
|
||||
|
||||
Simple http :mod:`restkit.request` client to request and manipulate
|
||||
informations from :class:`creole.CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
from http_parser.http import NoMoreData
|
||||
import restkit
|
||||
import eventlet
|
||||
from restkit.errors import ResourceError, RequestError, ParseException, RequestTimeout
|
||||
from eventlet.timeout import Timeout as EventletTimeout
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import json
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from .dtd_parser import parse_dtd
|
||||
from .config import dtdfilename
|
||||
|
||||
from .i18n import _
|
||||
from pyeole.encode import normalize
|
||||
|
||||
import re
|
||||
|
||||
# Stat filesystem
|
||||
import os
|
||||
|
||||
# Create instance method on the fly
|
||||
import types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_CONTAINER_COMPONENTS = ['container'] + parse_dtd(dtdfilename)['container']['options']
|
||||
"""List of components used to define an LXC container.
|
||||
|
||||
They are extracted from the ``creole.dtd``.
|
||||
|
||||
Each of them are use to fabric two accessor methods bound to
|
||||
:class:`CreoleClient`.
|
||||
|
||||
"""
|
||||
LOCAL_URL = 'http://127.0.0.1:8000'
|
||||
#Si on veut garder les threads, on peut désactiver les reap_connections pour éviter les tracebacks
|
||||
#restkit.session.get_session('thread', reap_connections=False)
|
||||
|
||||
|
||||
def _merge_entries(old, new):
|
||||
"""Merge component informations
|
||||
|
||||
This merge keep information from :data:`old` when the :data:`new`
|
||||
is ``None``.
|
||||
|
||||
The boolean information are ored between :data:`old` and
|
||||
:data:`new`.
|
||||
|
||||
:param old: previous component informations
|
||||
:type old: `dict`
|
||||
:param new: new component informations
|
||||
:type new: `dict`
|
||||
:return: merged informations
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
for key, val in new.items():
|
||||
if val is None:
|
||||
# Do not override previous value
|
||||
continue
|
||||
elif isinstance(val, bool):
|
||||
# Switch on first True
|
||||
# old[key] may not exists
|
||||
old[key] = val | old.get(key, False)
|
||||
else:
|
||||
old[key] = val
|
||||
|
||||
return old
|
||||
|
||||
|
||||
def _merge_duplicates_in_components(container_info, keys_to_strip=None):
|
||||
"""Merge duplicates entries
|
||||
|
||||
:param container_info: information on a container or group of
|
||||
containers
|
||||
:type container_info: `dict`
|
||||
:param keys_to_strip: keys for which to remove duplicated entries
|
||||
:type keys_to_strip: `list`
|
||||
|
||||
"""
|
||||
# Do not work in-place
|
||||
info = container_info.copy()
|
||||
|
||||
if keys_to_strip is None:
|
||||
# Run on all keys
|
||||
keys_to_strip = info.keys()
|
||||
|
||||
for key in keys_to_strip:
|
||||
if not isinstance(info[key], list):
|
||||
# Do not work on single values
|
||||
continue
|
||||
|
||||
result = OrderedDict()
|
||||
for entry in info[key]:
|
||||
if 'name' in entry:
|
||||
name = repr(entry['name'])
|
||||
if name in result and not entry.get(u'activate', False):
|
||||
# Duplicate found but inactive
|
||||
continue
|
||||
elif name in result:
|
||||
# Merge old and new informations
|
||||
old_entry = result[name]
|
||||
# Make sure entry appears at right place
|
||||
del(result[name])
|
||||
result[name] = _merge_entries(old=old_entry,
|
||||
new=entry)
|
||||
else:
|
||||
# New entry
|
||||
result[name] = entry
|
||||
|
||||
if result:
|
||||
# Store stripped information
|
||||
info[key] = [ item for item in result.values() ]
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def _build_component_accessors(component):
|
||||
"""Fabric of accessors for container components
|
||||
|
||||
It build two accessors:
|
||||
|
||||
- one to get all components for all containers named
|
||||
``get_<component>s``
|
||||
|
||||
- one to get one comoponent item defined for all containers
|
||||
named ``get_<component>``
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component accessors
|
||||
:rtype: `tuple` of `function`
|
||||
|
||||
"""
|
||||
def all_components(self, container=None):
|
||||
"""Return all components
|
||||
"""
|
||||
return self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
|
||||
all_components.__name__ = 'get_{0}s'.format(component)
|
||||
all_components.__doc__ = """Get {0}s for all containers
|
||||
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0}s informations
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
def single_component(self, name, container=None):
|
||||
"""Return single component
|
||||
"""
|
||||
components = []
|
||||
ret = self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
for item in ret:
|
||||
if item['name'] == name:
|
||||
components.append(item)
|
||||
return components
|
||||
single_component.__doc__ = """Get one {0} for all containers
|
||||
|
||||
:param name: name of {0} to return
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0} informations for all containers
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
single_component.__name__ = 'get_{0}'.format(component)
|
||||
|
||||
return all_components, single_component
|
||||
|
||||
|
||||
class CreoleClient(object):
|
||||
"""Request informations from :class:`creole.CreoleServer`.
|
||||
|
||||
In addition, this class provides some utilities to manipulate
|
||||
returned data.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, url=None):
|
||||
"""Initialize client.
|
||||
|
||||
:param url: HTTP URL to the :class:`creole.CreoleServer`
|
||||
:type url: `str`
|
||||
|
||||
"""
|
||||
if url is None:
|
||||
if self.is_in_lxc():
|
||||
url = 'http://192.0.2.1:8000'
|
||||
else:
|
||||
url = LOCAL_URL
|
||||
|
||||
self.url = url
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
comp_list.remove('container')
|
||||
# Disable logging of restkit
|
||||
restkit.set_logging('critical', logging.NullHandler())
|
||||
self._is_container_actif = None
|
||||
self._restkit_request = None
|
||||
for component in comp_list:
|
||||
get_all, get_single = _build_component_accessors(component)
|
||||
setattr(self, get_all.__name__,
|
||||
types.MethodType(get_all, self, CreoleClient))
|
||||
setattr(self, get_single.__name__,
|
||||
types.MethodType(get_single, self, CreoleClient))
|
||||
|
||||
@staticmethod
|
||||
def is_in_lxc():
|
||||
"""Check if we are in LXC.
|
||||
|
||||
We are under LXC if /proc/1/cgroup contains ``/lxc``.
|
||||
|
||||
:return: if we are under LXC.
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
if not os.path.isdir('/proc/self'):
|
||||
# when launch in chroot
|
||||
return True
|
||||
else:
|
||||
return os.access('/dev/lxc/console', os.F_OK)
|
||||
|
||||
|
||||
def close(self):
|
||||
if self._restkit_request is not None:
|
||||
self._restkit_request.close()
|
||||
|
||||
|
||||
def _request(self, path, **kwargs):
|
||||
"""Send HTTP request to Creole server.
|
||||
|
||||
If ConnectionError, try three time before leave.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: response of the request
|
||||
:rtype: :class:`restkit.wrappers.Response`
|
||||
:raise CreoleClientError: on HTTP errors
|
||||
|
||||
"""
|
||||
timeout = 5
|
||||
max_try = 3
|
||||
tried = 0
|
||||
|
||||
method = 'GET'
|
||||
if 'method' in kwargs:
|
||||
method = kwargs['method']
|
||||
del(kwargs['method'])
|
||||
|
||||
uri = restkit.util.make_uri(path, **kwargs)
|
||||
|
||||
while tried < max_try:
|
||||
tried += 1
|
||||
try:
|
||||
# use eventlet backend (#13194, #21388)
|
||||
with eventlet.Timeout(timeout):
|
||||
self._restkit_request = restkit.request(uri, method=method, backend='eventlet')
|
||||
return self._restkit_request
|
||||
except (ResourceError, RequestError, ParseException, NoMoreData, RequestTimeout, EventletTimeout) as err:
|
||||
log.debug(_(u"Connexion error '{0}',"
|
||||
u" retry {1}/{2}").format(err, tried, max_try))
|
||||
sleep(1)
|
||||
|
||||
if isinstance(err, RequestError):
|
||||
msg = _(u"HTTP error: {0}\nPlease check creoled's log (/var/log/rsyslog/local/creoled/creoled.info.log)\nand restart service with command 'service creoled start'")
|
||||
else:
|
||||
msg = _(u"HTTP error: {0}")
|
||||
if isinstance(err, RequestTimeout) or isinstance(err, EventletTimeout):
|
||||
err = _(u"creoled service didn't respond in time")
|
||||
|
||||
raise TimeoutCreoleClientError(msg.format(err))
|
||||
|
||||
def is_container_actif(self):
|
||||
if self._is_container_actif is None:
|
||||
self._is_container_actif = self.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
return self._is_container_actif
|
||||
|
||||
def request(self, command, path=None, **kwargs):
|
||||
"""Send HTTP request to creole server.
|
||||
|
||||
:param command: action to perform for the creole resource
|
||||
:type command: `str`
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dictionary of variable:value
|
||||
:rtype: `dict`
|
||||
:raise CreoleClientError: on bad response status or HTTP error
|
||||
|
||||
"""
|
||||
if path is not None:
|
||||
path = self.validate_path(path)
|
||||
ret = self._request(self.url + command + path, **kwargs)
|
||||
else:
|
||||
ret = self._request(self.url + command, **kwargs)
|
||||
if ret.status_int != 200:
|
||||
log.debug(_(u'HTML content: {0}').format(ret.body_string()))
|
||||
raise CreoleClientError(_(u"HTML error {0}, please consult creoled events log (/var/log/rsyslog/local/creoled/creoled.info.log) to have more informations").format(ret.status_int))
|
||||
reply = json.loads(ret.body_string())
|
||||
|
||||
# Previous fix for NoMoreData exception #7218 :
|
||||
#ret.connection.close()
|
||||
|
||||
if reply['status'] != 0:
|
||||
if reply['status'] == 4:
|
||||
raise NotFoundError(u"{0}".format(reply['response']))
|
||||
else:
|
||||
raise CreoleClientError(normalize(_("Creole error {0}: {1}")).format(
|
||||
reply['status'], reply['response']))
|
||||
|
||||
return reply['response']
|
||||
|
||||
@staticmethod
|
||||
def validate_path(path):
|
||||
"""Validate the path for http request.
|
||||
|
||||
:data:`path` must use ``/`` as separator with a leading one or
|
||||
use ``.`` as separator.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
:raise CreoleClientError: when path does not validate
|
||||
|
||||
"""
|
||||
ret = path
|
||||
if not ret.startswith('/'):
|
||||
if ret.find('.') != -1 and ret.find('/') != -1:
|
||||
raise CreoleClientError(_(u"Path must not mix dotted and" +
|
||||
u" slash notation: '{0}'").format(path))
|
||||
elif ret.find('.') != -1:
|
||||
ret = '/{0}'.format( ret.replace('.', '/') )
|
||||
else:
|
||||
raise CreoleClientError(_(u"Path must start" +
|
||||
u" with '/': '{0}'").format(path))
|
||||
return ret
|
||||
|
||||
def get(self, path='/creole', *args, **kwargs):
|
||||
"""Get the values from part of the tree.
|
||||
|
||||
If :data:`path` is a variable, it returns it's value.
|
||||
|
||||
If :data:`path` is a tree node, it returns the whole tree
|
||||
of ``variable:value`` as flat dictionary.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value if any error occurs
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
|
||||
"""
|
||||
# Use a dictionary to test existence
|
||||
default = {}
|
||||
if len(args) > 1:
|
||||
raise ValueError(_("Too many positional parameters {0}.").format(args))
|
||||
|
||||
if kwargs.has_key('default'):
|
||||
default['value'] = kwargs['default']
|
||||
del(kwargs['default'])
|
||||
elif len(args) == 1:
|
||||
default['value'] = args[0]
|
||||
|
||||
try:
|
||||
ret = self.request('/get', path, **kwargs)
|
||||
except (NotFoundError, CreoleClientError) as err:
|
||||
if default.has_key('value'):
|
||||
ret = default['value']
|
||||
else:
|
||||
raise err
|
||||
|
||||
return ret
|
||||
|
||||
def list(self, path='/creole'):
|
||||
"""List content of a path.
|
||||
|
||||
If :data:`path` is a variable, it returns it's name.
|
||||
|
||||
If :data:`path` is a tree node, it returns the list of items
|
||||
under it.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: items present under a path
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
return self.request('/list', path)
|
||||
|
||||
def get_creole(self, name=None, *args, **kwargs):
|
||||
"""Get variables under ``/creole``.
|
||||
|
||||
The full path of variable names is stripped in key names.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value to return if the variable named
|
||||
:data:`name` does not exist or any error occurs
|
||||
:return: variables and their value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
if name is not None:
|
||||
# Tiramisu has no any meaningful message
|
||||
try:
|
||||
ret = self.get('/creole', *args, variable=name, **kwargs)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown variable {0}')
|
||||
raise NotFoundError(msg.format(name))
|
||||
else:
|
||||
ret = self.strip_full_path(self.get('/creole', *args, **kwargs))
|
||||
|
||||
return ret
|
||||
|
||||
def reload_config(self):
|
||||
"""Reload Tiramisu's config
|
||||
"""
|
||||
return self.request('/reload_config')
|
||||
|
||||
def reload_eol(self):
|
||||
"""Reload Tiramisu's partial config
|
||||
"""
|
||||
return self.request('/reload_eol')
|
||||
|
||||
def valid_mandatory(self):
|
||||
return self.request('/valid_mandatory')
|
||||
|
||||
def get_containers(self, group=None):
|
||||
"""Get basic informations of all containers
|
||||
|
||||
:param group: limit search to a group of containers
|
||||
:type group: `str`
|
||||
:return: containers informations
|
||||
:rtype: `list`
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if group is None or (not mode_container and group == 'root'):
|
||||
args = {}
|
||||
else:
|
||||
args = {'withoption':'group',
|
||||
'withvalue':group}
|
||||
|
||||
try:
|
||||
ret = self.get('/containers/containers', **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
if group is not None:
|
||||
msg = _(u'No container found for group {0}')
|
||||
else:
|
||||
msg = _(u'No container found! Is that possible?')
|
||||
raise NotFoundError(msg.format(group))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret
|
||||
|
||||
|
||||
def get_container(self, name):
|
||||
"""Get informations of one container
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
try:
|
||||
ret = self.get('/containers/containers',
|
||||
withoption='name',
|
||||
withvalue=name)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
raise NotFoundError(_(u'Unknown container {0}').format(name))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret[0]
|
||||
|
||||
|
||||
def get_groups(self):
|
||||
"""Get list of container groups
|
||||
|
||||
All groups are a container, but all containers are not a
|
||||
group.
|
||||
|
||||
:return: container groups names
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
containers = self.get_containers()
|
||||
if not mode_container:
|
||||
groups = ['root']
|
||||
else:
|
||||
groups = []
|
||||
for container in containers:
|
||||
if container['name'] == container['group']:
|
||||
groups.append(container['name'])
|
||||
if 'all' in groups:
|
||||
groups.remove('all')
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
def is_group(self, name):
|
||||
"""Verify is a container is a group of containers.
|
||||
|
||||
:param name: name of the container
|
||||
:type name: `str`
|
||||
:return: is the container a group of containers?
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if not mode_container:
|
||||
return name == 'root'
|
||||
|
||||
container = self.get_container(name)
|
||||
return name == container['group']
|
||||
|
||||
|
||||
def get_containers_components(self, containers, group=False, merge_duplicates=False):
|
||||
"""Get all components of a list of containers or group of containers.
|
||||
|
||||
:param containers: container names
|
||||
:type containers: `list` of `str`
|
||||
:param group: containers are names of groups of containers
|
||||
:type group: `bool`
|
||||
:param merge_duplicates: merge duplicate entries
|
||||
:type merge_duplicates: `bool`
|
||||
:return: components of the containers
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
comp_list = [ '{0}s'.format(name) for name in _CONTAINER_COMPONENTS[:] ]
|
||||
component = {}
|
||||
|
||||
if not group:
|
||||
if 'all' in containers:
|
||||
# make sure all is first
|
||||
containers.remove('all')
|
||||
|
||||
# Remove duplicates
|
||||
containers = list(set(containers))
|
||||
containers.insert(0, 'all')
|
||||
|
||||
for comp in comp_list:
|
||||
component[comp] = []
|
||||
for container in containers:
|
||||
by_cont = self.get_components(None, container=container, group=group)
|
||||
|
||||
for comp, items in by_cont.items():
|
||||
if comp + 's' in comp_list:
|
||||
component[comp + 's'].extend(items)
|
||||
|
||||
if merge_duplicates:
|
||||
component = _merge_duplicates_in_components(component, comp_list)
|
||||
|
||||
if 'interfaces' in component:
|
||||
for interface in component['interfaces']:
|
||||
if 'gateway' in interface and interface['gateway']:
|
||||
component['gateway'] = {u'interface': interface['name'],
|
||||
u'ip': interface['gateway']}
|
||||
|
||||
return component
|
||||
|
||||
|
||||
def get_container_infos(self, container):
|
||||
"""Get all components of a container or its group
|
||||
|
||||
:param container: container name
|
||||
:type container: `str`
|
||||
:return: components of the container or its group
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
container_info = self.get_container(container)
|
||||
group_name = container_info[u'real_container']
|
||||
container_info = self.get_group_infos(group_name)
|
||||
|
||||
return container_info
|
||||
|
||||
|
||||
def get_group_infos(self, group):
|
||||
"""Get all components of a group of container
|
||||
|
||||
:param group: container group name
|
||||
:type group: `str`
|
||||
:return: components of the container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
group_info = self.get_containers_components(containers=[group],
|
||||
group=True,
|
||||
merge_duplicates=True)
|
||||
|
||||
# If we need to do thing in the name of all containers in the group
|
||||
names = []
|
||||
found = False
|
||||
for container in group_info['containers']:
|
||||
name = container['name']
|
||||
names.append(name)
|
||||
if name == group:
|
||||
found = True
|
||||
group_info.update(container)
|
||||
if not found:
|
||||
group_info.update(self.get_container(group))
|
||||
group_info['containers'] = names
|
||||
|
||||
return group_info
|
||||
|
||||
|
||||
def get_components(self, name, container=None, group=False):
|
||||
"""Get component for containers
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
if container is not None:
|
||||
if group:
|
||||
option_name = 'real_container'
|
||||
else:
|
||||
option_name = 'container'
|
||||
|
||||
args = {'withoption': option_name,
|
||||
'withvalue': container}
|
||||
else:
|
||||
args = {}
|
||||
|
||||
ret = None
|
||||
if name is None:
|
||||
path = '/containers'
|
||||
else:
|
||||
path = '/containers/{0}'.format(name)
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
if container is None:
|
||||
msg = _(u'Unknown container components {0}')
|
||||
else:
|
||||
args = {'withoption':'container_group',
|
||||
'withvalue':container}
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
# If not a container, maybe a container's group
|
||||
if ret is None:
|
||||
raise NotFoundError(msg.format(str(name), container))
|
||||
if name is None:
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
dico = {}
|
||||
ret_comp = {}
|
||||
for comp in comp_list:
|
||||
dico[comp] = {}
|
||||
for path, item in ret.items():
|
||||
spath = path.split('.')
|
||||
#without 's'
|
||||
comp = spath[0][:-1]
|
||||
dico[comp]['.'.join(spath[1:])] = item
|
||||
for comp in comp_list:
|
||||
ret_comp[comp] = self.to_list_of_dict(dico[comp], prefix=comp)
|
||||
|
||||
else:
|
||||
ret_comp = self.to_list_of_dict(ret, prefix=name)
|
||||
return ret_comp
|
||||
|
||||
@classmethod
|
||||
def to_list_of_dict(cls, flat, prefix=None):
|
||||
"""Convert a flat dictionary to a list of dictionaries.
|
||||
|
||||
Build a list of dictionary ``<name>:<value>`` for each
|
||||
prefix of the form ``<prefix><integer index>.<name>:<value>``
|
||||
|
||||
If list is numerically ordered by ``<integer index>``
|
||||
extracted from each key accordingly to :data:`prefix`.
|
||||
|
||||
If the :data:`prefix` is not specified, a random element of
|
||||
:data:`flat` is extracted to compute it.
|
||||
|
||||
:param flat: absolute attribute variable names and their
|
||||
values
|
||||
:type flat: `dict`
|
||||
:param prefix: alphabetic prefix to extract integer index
|
||||
:type prefix: `str`
|
||||
:return: variables and their attributes values
|
||||
:rtype: `list` of `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
sorted_items = []
|
||||
sort_key = None
|
||||
|
||||
if prefix is None:
|
||||
# Extract prefix name
|
||||
random_key = flat.iterkeys().next()
|
||||
indexed_prefix = random_key.split('.')[0]
|
||||
re_match = re.match(r'(\D+)\d+', indexed_prefix)
|
||||
prefix = re_match.group(1)
|
||||
|
||||
if prefix is not None:
|
||||
# check for none because maybe regexp match did not work
|
||||
# Extract component index as integer for comparaison
|
||||
sort_key = lambda string: int(string.split('.')[0].lstrip(prefix))
|
||||
|
||||
for key in sorted(flat.keys(), key=sort_key):
|
||||
sid, sattr = cls._split_path_leaf(key)
|
||||
if sid not in reply:
|
||||
sorted_items.append(sid)
|
||||
reply[sid] = {}
|
||||
reply[sid][sattr] = flat[key]
|
||||
return [ reply[item] for item in sorted_items ]
|
||||
|
||||
@staticmethod
|
||||
def strip_full_path(flat):
|
||||
"""Strip full path of flat dictionary keys.
|
||||
|
||||
:param flat: absolute variable names and their value
|
||||
:type flat: `dict`
|
||||
:return: short variable names and their value
|
||||
:rtype: `dict`
|
||||
"""
|
||||
ret = {}
|
||||
for path in flat:
|
||||
parts = path.split('.')[1:]
|
||||
if len(parts) == 1:
|
||||
# Single variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
elif len(parts) == 2 and parts[0] == parts[1]:
|
||||
# Master variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
else:
|
||||
# slave variable
|
||||
ret[ '.'.join(parts) ] = flat[path]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def to_grouped_lists(dict_list, keyname, keyvalue=None):
|
||||
"""Convert a `list` of `dict` to a `dict` :data:`keyvalue`:`list`.
|
||||
|
||||
Build dictionary of ``dictionary[:data:`keyvalue`]:<list of
|
||||
dict>`` to group all items with the same value of a key.
|
||||
|
||||
:param dict_list: dictionaries
|
||||
:type dict_list: `list`
|
||||
:param keyname: name of the key to test
|
||||
:type keyname: `str`
|
||||
:param keyvalue: value to match :data:`keyname`
|
||||
:return: dictionary grouped by a key value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
for key in dict_list:
|
||||
if keyname in key and keyvalue and keyvalue != key[keyname]:
|
||||
continue
|
||||
if keyname not in key:
|
||||
if None not in reply:
|
||||
reply[None] = []
|
||||
reply[None].append(key)
|
||||
else:
|
||||
if key[keyname] not in reply:
|
||||
reply[ key[keyname] ] = []
|
||||
reply[ key[keyname] ].append(key)
|
||||
return reply
|
||||
|
||||
@staticmethod
|
||||
def _split_path_leaf(path, separator='.'):
|
||||
"""Split path in two parts: dirname and basename.
|
||||
|
||||
If :data:`path` does not contains the :data:`separator`, it's
|
||||
considered as leaf and the dirname of :data:`path` is set to
|
||||
`None`.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dirname and basename of :data:`path`
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if path.find(separator) == -1:
|
||||
return (None, path)
|
||||
|
||||
splited = path.split(separator)
|
||||
return ( '.'.join(splited[:-1]), splited[-1] )
|
||||
|
||||
|
||||
class TimeoutCreoleClientError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class CreoleClientError(StandardError):
|
||||
"""Bad use of :class:`CreoleClient`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(CreoleClientError):
|
||||
"""Requested variable not found
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
print(CreoleClient().get('/'))
|
||||
except Exception as err:
|
||||
print(_(u"Error: {0}").format(err))
|
81
creole/config.py
Normal file
81
creole/config.py
Normal file
|
@ -0,0 +1,81 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
fichier de configuration pour créole
|
||||
|
||||
"""
|
||||
from os.path import join, isfile, isdir
|
||||
|
||||
eoledir = '/usr/share/eole'
|
||||
LOCALKERNEL_FILE = join(eoledir, 'noyau/local')
|
||||
REBOOT_FILE = '/var/run/reboot-required'
|
||||
|
||||
charset = 'UTF8'
|
||||
|
||||
# chemin par defaut des templates, fichier config.eol, etc
|
||||
configeoldir = '/etc/eole/'
|
||||
eoleroot = join(eoledir, 'creole')
|
||||
vareole = '/var/lib/eole'
|
||||
|
||||
bareos_restore_root = join(eoledir, 'bareos')
|
||||
bareos_restore = join(bareos_restore_root, 'restore')
|
||||
|
||||
configeol = join(configeoldir, 'config.eol')
|
||||
|
||||
# certificats
|
||||
cert_file = '/etc/ssl/certs/eole.crt'
|
||||
key_file = '/etc/ssl/certs/eole.key'
|
||||
# port du serveur creole_serv
|
||||
port_rpc = 4333
|
||||
|
||||
# chemin du répertoire source des fichiers templates
|
||||
templatedir = '/var/lib/creole'
|
||||
|
||||
dicos_dir = join(eoleroot, 'dicos')
|
||||
modif_dir = join(eoleroot, 'modif')
|
||||
distrib_dir = join(eoleroot, 'distrib')
|
||||
patch_dir = join(eoleroot, 'patch')
|
||||
|
||||
# chemin pour les fichiers de données
|
||||
datadir = '/usr/share/creole'
|
||||
# chemin pour les modules de fonctions supplémentaires
|
||||
func_dir = join(datadir,'funcs')
|
||||
# repertoire du ou des dictionnaires xml creole
|
||||
eoledirs = [dicos_dir, join(dicos_dir, 'variante'), join(dicos_dir, 'local')]
|
||||
|
||||
# extra
|
||||
eoleextradico = join(eoledir, 'creole/extra')
|
||||
eoleextraconfig = join(configeoldir, 'extra')
|
||||
forbiddenextra = ['containers', 'creole']
|
||||
|
||||
# repertoire de la dtd
|
||||
dtddir = datadir
|
||||
if isfile('data/creole.dtd'):
|
||||
dtdfilename = 'data/creole.dtd'
|
||||
elif isfile('../creole/data/creole.dtd'):
|
||||
dtdfilename = '../creole/data/creole.dtd'
|
||||
else:
|
||||
dtdfilename = join(dtddir, 'creole.dtd')
|
||||
|
||||
# repertoire avec le fichier lock
|
||||
LOCK_PATH = '/var/lock/eole'
|
||||
LOCK_SYSTEM_PATH = join(LOCK_PATH, 'eole-system')
|
||||
|
||||
# Nom du serveur maitre
|
||||
VIRTMASTER = 'root'
|
||||
VIRTROOT = '/var/lib/lxc'
|
||||
VIRTBASE = 'rootfs'
|
||||
|
||||
container_instance_lockfile = '/etc/eole/.container_instance.lock'
|
||||
containers_default_network = '192.0.2'
|
||||
gen_conteneurs_needed = '/etc/eole/.gen_conteneurs'
|
||||
|
||||
VIRTENABLED_LOCKFILE = '/etc/eole/.VirtEnabled.lock'
|
||||
VIRTDISABLED_LOCKFILE = '/etc/eole/.VirtDisabled.lock'
|
||||
INSTANCE_LOCKFILE = '/etc/eole/.instance'
|
||||
UPGRADE_LOCKFILE = '/etc/eole/.upgrade-auto'
|
||||
|
||||
SSL_LAST_FILE = '/etc/eole/ssl/lastfile.txt'
|
||||
|
||||
FLATTENED_CREOLE_DIR = join(vareole, 'config')
|
||||
if not isdir(FLATTENED_CREOLE_DIR):
|
||||
FLATTENED_CREOLE_DIR = join('/tmp')
|
224
creole/containers.py
Normal file
224
creole/containers.py
Normal file
|
@ -0,0 +1,224 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.containers - management of LXC containers
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Manage LXC containers
|
||||
|
||||
"""
|
||||
|
||||
from .client import CreoleClient, _CONTAINER_COMPONENTS
|
||||
from .config import VIRTENABLED_LOCKFILE, VIRTDISABLED_LOCKFILE
|
||||
from .error import VirtError
|
||||
from .config import templatedir, VIRTROOT
|
||||
from .template import CreoleTemplateEngine
|
||||
from pyeole.process import system_code, system_out, system_progress_out
|
||||
from pyeole.diagnose import test_tcp
|
||||
from .i18n import _
|
||||
|
||||
from distutils.spawn import find_executable
|
||||
from os.path import isdir
|
||||
from os.path import isfile, islink
|
||||
from os.path import ismount
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os import access
|
||||
from os import F_OK
|
||||
from os import stat
|
||||
from os import symlink
|
||||
from os import makedirs
|
||||
from os import mknod
|
||||
from os import makedev
|
||||
from os import major
|
||||
from os import minor
|
||||
from os import unlink
|
||||
from stat import S_IFBLK
|
||||
from stat import S_ISBLK
|
||||
from hashlib import md5
|
||||
from glob import glob
|
||||
import cjson
|
||||
|
||||
import logging
|
||||
|
||||
client = CreoleClient()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_LXC_MD5 = '/etc/eole/lxc.md5'
|
||||
_LXC_LOG = '/var/log/isolation.log'
|
||||
|
||||
_NOT_REALLY_LXC_CONTAINERS = ['root', 'all']
|
||||
"""List of container names that are not to be generated.
|
||||
|
||||
"""
|
||||
|
||||
_LXC_TEMPLATE = {'config': "lxc.config",
|
||||
'fstab': "lxc.fstab",
|
||||
'rootfs/etc/network/interfaces' : "lxc.interfaces",
|
||||
}
|
||||
"""Creole templates for LXC containers.
|
||||
|
||||
"""
|
||||
|
||||
def is_lxc_locked():
|
||||
"""Check if the LXC virtualization is locked.
|
||||
|
||||
The virtualization is locked after first ``instance`` of the
|
||||
server to avoid switching between modes.
|
||||
|
||||
:return: ``enable`` if LXC is enabled, ``disable`` if LXC is
|
||||
disabled or ``None`` where there is no lockfile.
|
||||
|
||||
"""
|
||||
if isfile(VIRTENABLED_LOCKFILE) and isfile(VIRTDISABLED_LOCKFILE):
|
||||
raise VirtError(_(u"Invalid LXC lock files state: both are present."))
|
||||
elif isfile(VIRTENABLED_LOCKFILE):
|
||||
virtlocked = 'enable'
|
||||
elif isfile(VIRTDISABLED_LOCKFILE):
|
||||
virtlocked = 'disable'
|
||||
else:
|
||||
virtlocked = None
|
||||
return virtlocked
|
||||
|
||||
def is_lxc_enabled():
|
||||
"""Check if LXC controller is enabled
|
||||
|
||||
We do not accept to switch between enabled and disabled LXC, after
|
||||
first ``instance``, a lock file is set to check at each
|
||||
``reconfigure``.
|
||||
|
||||
:return: If the LXC container mode is enabled.
|
||||
:rtype: `bool`
|
||||
:raise VirtError: if state in inconsistent between configuration
|
||||
and lock files.
|
||||
|
||||
"""
|
||||
containers_enabled = client.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
if containers_enabled and not find_executable('lxc-info'):
|
||||
raise VirtError(_(u'LXC is enabled but LXC commands not found in PATH.'))
|
||||
|
||||
if containers_enabled and is_lxc_locked() == 'disable':
|
||||
raise VirtError(_(u"Server already instantiated in no containers mode, attempt to activate containers mode aborted."))
|
||||
elif not containers_enabled and is_lxc_locked() == 'enable':
|
||||
raise VirtError(_(u"Server already instantiated in containers mode, attempt to activate no containers mode aborted."))
|
||||
|
||||
return containers_enabled
|
||||
|
||||
def generate_lxc_container(name, logger=None):
|
||||
"""Run creation of a container.
|
||||
|
||||
Check if LXC is enabled and take care of ``root`` and ``all``
|
||||
containers.
|
||||
|
||||
:param name: name of the LXC container
|
||||
:type name: `str`
|
||||
|
||||
"""
|
||||
if name not in _NOT_REALLY_LXC_CONTAINERS:
|
||||
if not test_tcp('localhost', client.get_creole('apt_cacher_port')):
|
||||
raise Exception(_('cacher not available, please start check log in /var/log/apt-cacher-ng/ and restart it with "service apt-cacher-ng start" command'))
|
||||
if isfile(_LXC_LOG):
|
||||
unlink(_LXC_LOG)
|
||||
cmd = ['lxc-create', '-n', name, '-t', 'eole']
|
||||
log.debug('Run: {0}'.format(' '.join(cmd)))
|
||||
code, stdout, stderr = system_progress_out(cmd, _(u"Managing container {0}").format(name), logger)
|
||||
fh = open(_LXC_LOG, 'w')
|
||||
fh.write(stdout)
|
||||
fh.write(stderr)
|
||||
fh.close()
|
||||
if code != 0 and stdout.find(u"'{0}' already exists'".format(name)) >= 0:
|
||||
raise Exception(_('error during the process of container creation, more informations in {0}').format(_LXC_LOG))
|
||||
path_container = client.get_creole('container_path_{0}'.format(name))
|
||||
path_apt_eole_conf = join(path_container, 'etc', 'apt', 'apt-eole.conf')
|
||||
path_apt_eole = join(path_container, 'usr', 'sbin', 'apt-eole')
|
||||
if not isfile(path_apt_eole_conf) or not isfile(path_apt_eole):
|
||||
raise Exception(_('eole-common-pkg not installed in container, something goes wrong, more informations in {0}').format(_LXC_LOG))
|
||||
|
||||
|
||||
def is_lxc_running(container):
|
||||
"""Check if an LXC container is running.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is running and reachable
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
return is_lxc_started(container) and test_tcp(container[u'ip'], 22)
|
||||
|
||||
|
||||
def is_lxc_started(container):
|
||||
"""Check if an LXC container is started.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is started
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
if not is_lxc_enabled() or container.get(u'path', None) == '':
|
||||
return True
|
||||
|
||||
if container.get(u'name', None) is None:
|
||||
raise ValueError(_(u"Container has no name"))
|
||||
|
||||
if container.get(u'ip', None) is None:
|
||||
raise ValueError(_(u"Container {0} has no IP").format(container[u'name']))
|
||||
|
||||
cmd = ['lxc-info', '--state', '--name', container[u'name']]
|
||||
code, stdout, stderr = system_out(cmd)
|
||||
|
||||
return stdout.strip().endswith('RUNNING')
|
||||
|
||||
|
||||
def create_mount_point(group):
|
||||
"""Create mount points in LXC.
|
||||
|
||||
This is required for LXC to start.
|
||||
|
||||
"""
|
||||
if 'fstabs' not in group:
|
||||
return
|
||||
for fstab in group['fstabs']:
|
||||
mount_point = fstab.get('mount_point', fstab['name'])
|
||||
full_path = join(group['path'], mount_point.lstrip('/'))
|
||||
if not isdir(full_path):
|
||||
makedirs(full_path)
|
||||
|
||||
|
||||
def lxc_need_restart():
|
||||
def md5sum(file):
|
||||
return md5(open(file).read()).hexdigest()
|
||||
files = ['/etc/lxc/default.conf', '/etc/default/lxc-net']
|
||||
files += glob('/opt/lxc/*/config')
|
||||
files += glob('/opt/lxc/*/fstab')
|
||||
md5s = []
|
||||
for f in files:
|
||||
md5s.append(md5sum(f))
|
||||
if not isfile(_LXC_MD5):
|
||||
ret = True
|
||||
else:
|
||||
try:
|
||||
old_md5s = cjson.decode(open(_LXC_MD5, 'r').read())
|
||||
except cjson.DecodeError:
|
||||
ret = True
|
||||
else:
|
||||
ret = not old_md5s == md5s
|
||||
|
||||
if ret:
|
||||
fh = open(_LXC_MD5, 'w')
|
||||
fh.write(cjson.encode(md5s))
|
||||
fh.close()
|
||||
return ret
|
||||
|
115
creole/dtd_parser.py
Normal file
115
creole/dtd_parser.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu import option
|
||||
CONVERT_VALUE = {'True': True, 'False': False, 'None': None}
|
||||
forbidden_name = ('level',)
|
||||
|
||||
def parse_dtd(filename):
|
||||
"""Parse DTD file and return a dict.
|
||||
Dict structure:
|
||||
|
||||
- key: name of element
|
||||
- values:
|
||||
|
||||
- type: if text, option type
|
||||
- options: list of subelements
|
||||
- needs: list of mandatory attributes with None or list of possible
|
||||
value
|
||||
- optionals: tuple:
|
||||
- list of optional attributes with None or list of possible
|
||||
value
|
||||
- default value (None if no default value)
|
||||
|
||||
Example:
|
||||
{'container':
|
||||
{'type': False,
|
||||
'options': ['service', 'interface', 'package', 'file', 'disknod'],
|
||||
'needs': {'name': {'values': None, 'type': None},
|
||||
'optionals': {'group': {'values': None, 'default': None,
|
||||
'type': None},
|
||||
'id': {'values': None, 'default': None, 'type': None}}}
|
||||
}
|
||||
"""
|
||||
def parse_option(option):
|
||||
option = option.replace('(', '').replace('*', '').replace(')', '')
|
||||
option = option.replace('>', '').replace(' ', '').replace('+', '')
|
||||
option = option.split('|')
|
||||
options = []
|
||||
for opt in option:
|
||||
options.extend(opt.split(','))
|
||||
if options == ['EMPTY']:
|
||||
options = []
|
||||
return options
|
||||
|
||||
def parse_comment(comment, options=None):
|
||||
type_ = None
|
||||
if comment.startswith('<!--') and comment.endswith('-->'):
|
||||
comment = comment[4:-3]
|
||||
if comment.endswith('Option'):
|
||||
if comment == 'ChoiceOption':
|
||||
raise ValueError(_(u'Do not write "ChoiceOption" in comments'))
|
||||
try:
|
||||
type_ = getattr(option, comment)
|
||||
except AttributeError:
|
||||
raise ValueError(_(u"Unvalid comment content: must match a valid attribute name"))
|
||||
else:
|
||||
#comment is the attribute name, the option type it's value
|
||||
type_ = comment
|
||||
return type_
|
||||
|
||||
fh = open(filename)
|
||||
dtd_load = {}
|
||||
for line in fh.readlines():
|
||||
sline = line.split()
|
||||
if sline == []:
|
||||
continue
|
||||
#for element line
|
||||
if sline[0] == '<!ELEMENT':
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
options = ' '.join(sline[2:-1])
|
||||
else:
|
||||
options = ' '.join(sline[2:])
|
||||
options = parse_option(options)
|
||||
type_ = None
|
||||
if '#PCDATA' in options:
|
||||
options.remove('#PCDATA')
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
type_ = parse_comment(sline[-1], options)
|
||||
else:
|
||||
type_ = option.UnicodeOption
|
||||
dtd_load[sline[1]] = {'type': type_, 'options': options,
|
||||
'needs': {}, 'optionals': {}}
|
||||
#for attlist line
|
||||
elif sline[0] == '<!ATTLIST':
|
||||
if sline[1] in forbidden_name:
|
||||
raise ValueError(_(u'Using name {0} is forbidden in attributes').format(sline[1]))
|
||||
#possible value
|
||||
if sline[3] == 'CDATA':
|
||||
values = None
|
||||
else:
|
||||
if not sline[3].startswith('(') or not sline[3].endswith(')'):
|
||||
raise Exception(_(u'Not a valid list'))
|
||||
sline3 = sline[3][1:-1].split('|')
|
||||
values = []
|
||||
for val in sline3:
|
||||
values.append(CONVERT_VALUE.get(val, val))
|
||||
#comment
|
||||
type_ = parse_comment(sline[-1])
|
||||
#default value or state value (needs or optionals)
|
||||
if sline[4].startswith('#REQUIRED'):
|
||||
dtd_load[sline[1]]['needs'][sline[2]] = {'values': values,
|
||||
'type': type_}
|
||||
elif sline[4].startswith('#IMPLIED'):
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': None,
|
||||
'type': type_}
|
||||
else:
|
||||
default = sline[4].replace('"', '').replace("'", '').replace(
|
||||
'>', '').strip()
|
||||
default = CONVERT_VALUE.get(default, default)
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': default,
|
||||
'type': type_}
|
||||
return dtd_load
|
36
creole/eoleversion.py
Normal file
36
creole/eoleversion.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Version variable of EOLE distribution
|
||||
|
||||
"""
|
||||
|
||||
UBUNTU_VERSION = u'xenial'
|
||||
"""Ubuntu version used by EOLE.
|
||||
|
||||
"""
|
||||
EOLE_VERSION = u'2.6'
|
||||
"""Current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
EOLE_RELEASE = u'{0}.2'.format(EOLE_VERSION)
|
||||
"""Release version of the current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
ENVOLE_VERSION = u'6'
|
||||
"""Envole version to use.
|
||||
|
||||
"""
|
||||
|
||||
LAST_RELEASE = u'2'
|
||||
"""Last stable release for this version
|
||||
|
||||
"""
|
90
creole/error.py
Normal file
90
creole/error.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Erreurs Creole
|
||||
"""
|
||||
|
||||
class VirtError(Exception):
|
||||
"""incohérence concernant les conteneurs"""
|
||||
pass
|
||||
|
||||
#class ValueEoleError(Exception):
|
||||
# """Cette valeur n'existe pas"""
|
||||
# pass
|
||||
#
|
||||
class NoneError(Exception):
|
||||
"""Valeur vide"""
|
||||
pass
|
||||
|
||||
class OutOfRange(Exception):
|
||||
pass
|
||||
|
||||
class TypeEoleError(Exception):
|
||||
"""Erreur de type"""
|
||||
pass
|
||||
|
||||
class ConfigError(Exception):
|
||||
pass
|
||||
|
||||
class NetworkConfigError(Exception):
|
||||
""" Network configuration error
|
||||
"""
|
||||
pass
|
||||
|
||||
class FileNotFound(ConfigError):
|
||||
pass
|
||||
|
||||
class TemplateError(ConfigError):
|
||||
pass
|
||||
|
||||
class TemplateDisabled(TemplateError):
|
||||
"""Template is disabled.
|
||||
"""
|
||||
pass
|
||||
|
||||
class DependencyError(ConfigError):
|
||||
pass
|
||||
|
||||
#class ConstraintError(ConfigError):
|
||||
# pass
|
||||
#
|
||||
|
||||
|
||||
class LockError(Exception):
|
||||
""" Add lock error
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UnlockError(Exception):
|
||||
""" Remove lock error
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserExit(Exception):
|
||||
""" User exit(0) signal
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserExitError(Exception):
|
||||
""" User exit(1) signal
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CreoleOperationError(Exception):
|
||||
"""Type error or value Error for Creole variable's type or values
|
||||
"""
|
||||
|
||||
|
||||
class SpaceObjShallNotBeUpdated(Exception):
|
||||
"""Specific behavior in case of the presence or not
|
||||
of an object in the space object
|
||||
"""
|
||||
|
||||
|
||||
class CreoleDictConsistencyError(Exception):
|
||||
"""It's not only that the Creole XML is valid against the Creole DTD
|
||||
it's that it is not consistent.
|
||||
"""
|
280
creole/fonctionseole.py
Normal file
280
creole/fonctionseole.py
Normal file
|
@ -0,0 +1,280 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
fonctions communes Creole
|
||||
"""
|
||||
import os, time, re
|
||||
from os.path import join, isfile
|
||||
from pyeole.process import system_out, system_code
|
||||
from pyeole.ansiprint import print_orange
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.pkg import EolePkg
|
||||
from pyeole.encode import normalize
|
||||
from .config import LOCALKERNEL_FILE, REBOOT_FILE
|
||||
|
||||
from .i18n import _
|
||||
|
||||
#si creole client n'est pas démarré
|
||||
global PkgManager
|
||||
PkgManager = None
|
||||
|
||||
######################
|
||||
# Gestion des noyaux #
|
||||
######################
|
||||
|
||||
def split_version(version):
|
||||
"""
|
||||
return version as list splitting subnumbers
|
||||
:param version: version number string
|
||||
:type version: string
|
||||
"""
|
||||
version_splitted = re.split('[-\.]', version)
|
||||
version_splitted = map(int, version_splitted)
|
||||
return version_splitted
|
||||
|
||||
def get_version_filtered_pkgs(prefix='linux-image'):
|
||||
"""
|
||||
return installed packages list ordered by version number
|
||||
"""
|
||||
vers_pkg_re = r"{0}-(?P<vers>[0-9]+(?P<upstr_vers>\.[0-9]+)*(-(?P<pkg_vers>[0-9]+))?)".format(prefix)
|
||||
vers_pkg_re = re.compile(vers_pkg_re)
|
||||
installed_pkgs = get_installed_kernel(prefix)
|
||||
vers_pkgs = [(pkg, split_version(vers_pkg_re.search(pkg).group('vers')))
|
||||
for pkg in installed_pkgs
|
||||
if vers_pkg_re.search(pkg)]
|
||||
vers_pkgs = [pkg[0] for pkg in sorted(vers_pkgs, key=lambda p: p[1])]
|
||||
return vers_pkgs
|
||||
|
||||
def get_custom_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau personnalisé
|
||||
ou None
|
||||
"""
|
||||
if isfile(LOCALKERNEL_FILE):
|
||||
# noyau personnalisé détecté
|
||||
kernel_file = LOCALKERNEL_FILE
|
||||
return file(kernel_file).read().strip()
|
||||
|
||||
def get_wanted_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on veut tourner
|
||||
"""
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
ret = custom_kernel
|
||||
else:
|
||||
kernel_images = get_version_filtered_pkgs()
|
||||
# Get last kernel version
|
||||
last_ver = kernel_images[-1].split('-')
|
||||
if len(last_ver) >= 4:
|
||||
last_ver = "{0}-{1}-{2}".format(*last_ver[2:5])
|
||||
elif len(last_ver) == 3:
|
||||
last_ver = "{0}".format(last_ver[-1])
|
||||
ret = last_ver
|
||||
return ret
|
||||
|
||||
def get_current_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on tourne
|
||||
"""
|
||||
version = system_out(['uname', '-r'])[1].strip()
|
||||
return version
|
||||
|
||||
def get_installed_kernel(kernel):
|
||||
"""
|
||||
renvoie la liste des noyaux installés
|
||||
correspondant à celui demandé
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l 2>/dev/null | awk -F " " '/^(i|h)i.*%s/ {print $2}'""" % kernel
|
||||
return os.popen(cmd).read().splitlines()
|
||||
|
||||
def get_package_depends(pkg):
|
||||
"""
|
||||
Renvois les dépendances d'un paquet
|
||||
"""
|
||||
try:
|
||||
global PkgManager
|
||||
if PkgManager is None:
|
||||
PkgManager = EolePkg('apt')
|
||||
res = PkgManager.get_depends(pkg)
|
||||
return res
|
||||
except:
|
||||
return []
|
||||
|
||||
def controle_kernel(force_grub=True):
|
||||
"""
|
||||
Vérifie si on est sur le noyau désiré
|
||||
Renvoie True si un reboot est nécessaire
|
||||
"""
|
||||
need_boot = False
|
||||
if isfile(REBOOT_FILE):
|
||||
# i.e. /var/run/reboot-required
|
||||
need_boot = True
|
||||
|
||||
wanted_kernel = get_wanted_kernel()
|
||||
# on utilise le noyau spécifié
|
||||
if wanted_kernel != get_current_kernel():
|
||||
need_boot = True
|
||||
if force_grub:
|
||||
# Update grub does the job since eole-kernel-version 2.3-eole37~2
|
||||
print _(u"Updating Grub configuration")
|
||||
# ajout de LVM_SUPPRESS_FD_WARNINGS pour #10761
|
||||
system_code("/usr/sbin/update-grub2", env={'LVM_SUPPRESS_FD_WARNINGS': '1', "LC_ALL": 'fr_FR.UTF-8'})
|
||||
# reboot nécessaire ?
|
||||
return need_boot
|
||||
|
||||
def regen_initrd():
|
||||
"""
|
||||
vérifie la présence de l'initrd
|
||||
"""
|
||||
noyau = get_wanted_kernel()
|
||||
if not isfile("/boot/initrd.img-%s" % noyau):
|
||||
print _(u"Initramfs missing, generating :")
|
||||
cmd = ["/usr/sbin/update-initramfs", '-c', '-k', noyau]
|
||||
system_code(cmd)
|
||||
|
||||
def get_kernel_to_remove():
|
||||
"""
|
||||
Obtenir la liste des noyaux a supprimer. Tous les noyaux sauf :
|
||||
- le noyau courant
|
||||
- les deux noyaux les plus récents
|
||||
- l'éventuel noyau personnalisé
|
||||
"""
|
||||
# tous les noyaux installés
|
||||
installed_kernels = get_version_filtered_pkgs()
|
||||
# les deux noyaux les plus récents
|
||||
to_keep = installed_kernels[-2:]
|
||||
# tous les headers installés
|
||||
installed_kernels.extend(get_version_filtered_pkgs(prefix='linux-headers'))
|
||||
# le noyau courant
|
||||
to_keep.append('linux-image-{0}'.format(get_current_kernel()))
|
||||
# l'éventuel noyau personnalisé
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
to_keep.append('linux-image-{0}'.format(custom_kernel))
|
||||
# les headers correspondants aux noyaux à conserver
|
||||
headers_to_keep = [k.replace('image', 'headers') for k in to_keep]
|
||||
headers_to_keep.extend([h.replace('-generic', '') for h in headers_to_keep])
|
||||
to_keep.extend(headers_to_keep)
|
||||
# on fait la différence
|
||||
to_remove = list(set(installed_kernels) - set(to_keep))
|
||||
return to_remove
|
||||
|
||||
def purge_rc():
|
||||
"""
|
||||
Purge des paquets "rc"
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l|grep "^rc"|awk -F " " '{print $2}'"""
|
||||
rcs = os.popen(cmd).read().splitlines()
|
||||
for pak in rcs:
|
||||
os.system("dpkg -P %s >/dev/null" % pak)
|
||||
|
||||
def log(etat, msg, type_proc, console=True):
|
||||
"""
|
||||
effectue un log local et éventuellement sur zephir
|
||||
"""
|
||||
msg = normalize(msg)
|
||||
type_proc = normalize(type_proc)
|
||||
display = False
|
||||
log_func = 'info'
|
||||
if etat == "ERR":
|
||||
if console:
|
||||
# affichage sur la console
|
||||
display = True
|
||||
log_func = 'error'
|
||||
|
||||
try:
|
||||
z_logger = init_logging(name=u'zephir', syslog=True, level=u'info', console=display)
|
||||
except ValueError, err:
|
||||
z_logger = init_logging(name=u'zephir', level=u'info', console=True)
|
||||
z_logger.warn(_(u"Syslog logging is not working properly: {0}".format(err)))
|
||||
z_logger.warn(_(u"You may need to start/restart systemd-journald"))
|
||||
|
||||
getattr(z_logger, log_func)("%s => %s : %s " % (type_proc, etat, msg))
|
||||
|
||||
def zephir(etat, msg, type_proc, console=True):
|
||||
""" gestion des messages Zephir """
|
||||
etat_zeph = None
|
||||
if etat.upper().startswith("INIT"):
|
||||
etat_zeph = -1
|
||||
elif etat.upper().startswith("FIN"):
|
||||
etat_zeph = 0
|
||||
elif etat.upper().startswith('ERR'):
|
||||
etat_zeph = 1
|
||||
elif etat.upper().startswith('MSG'):
|
||||
etat_zeph = -2
|
||||
# log local si msg ou erreur
|
||||
if (len(msg) > 0) or (etat.upper() == "ERR"):
|
||||
log(etat, msg, type_proc, console)
|
||||
# log sur zephir si disponible
|
||||
if etat_zeph is not None:
|
||||
try:
|
||||
# si serveur enregistré, on envoie un log à Zéphir
|
||||
from zephir.zephir_conf.zephir_conf import id_serveur
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
from zephir.lib_zephir import log as zlog
|
||||
zlog(type_proc, etat_zeph, msg, str(time.ctime()))
|
||||
|
||||
def init_proc(type_proc):
|
||||
"""
|
||||
initialisation d'une procédure (log démarrage + bcage éventuel)
|
||||
"""
|
||||
if verify_lock(type_proc):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
#def end_proc(etat,msg,type_proc):
|
||||
# """
|
||||
# loggue la fin d'une procédure
|
||||
# """
|
||||
# log(etat,msg,type_proc )
|
||||
|
||||
def verify_lock(name):
|
||||
"""
|
||||
vérifie le bloquage ou non d'une procédure
|
||||
"""
|
||||
LOCK_FILE = "/usr/share/zephir/zephir_locks"
|
||||
if name == "":
|
||||
return True
|
||||
from zephir.lib_zephir import zephir_path
|
||||
try:
|
||||
from zephir.lib_zephir import config, zephir, convert
|
||||
locks = convert(zephir.serveurs.get_locks(config.id_serveur))
|
||||
if locks[0] == 0:
|
||||
# erreur sur zephir, on ignore cette phase
|
||||
raise Exception
|
||||
locks = [lock[0] for lock in locks[1]]
|
||||
except Exception, mess:
|
||||
# pas de message d'erreur si le serveur n'est pas enregistré
|
||||
zephir_error = False
|
||||
if isfile(join(zephir_path, "zephir_conf", "zephir_conf.py")):
|
||||
# on ne bloque pas si l'appel à zephir échoue
|
||||
print ""
|
||||
print_orange(_(u"Checking permissions on Zéphir for {0} impossible.").format(name))
|
||||
print_orange(_(u"Error message: {0}").format(mess))
|
||||
zephir_error = True
|
||||
# on regarde le denier état
|
||||
if os.path.exists(LOCK_FILE):
|
||||
if zephir_error:
|
||||
print_orange(_(u"Using stored parameters"))
|
||||
file_lock = file(LOCK_FILE)
|
||||
locks = file_lock.read().split('\n')
|
||||
file_lock.close()
|
||||
# on bloque si interdit
|
||||
if name in locks:
|
||||
return False
|
||||
else:
|
||||
# mise en place du fichier de droits
|
||||
content = "\n".join(locks)
|
||||
try:
|
||||
file_lock = file(LOCK_FILE, "w")
|
||||
file_lock.write(content)
|
||||
file_lock.close()
|
||||
except:
|
||||
print _(u"Updating {0} impossible (insufficient rights).").format(LOCK_FILE)
|
||||
# retour du code
|
||||
if name in locks:
|
||||
return False
|
||||
return True
|
52
creole/i18n.py
Normal file
52
creole/i18n.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
# Copyright (C) 2012-2013 Team tiramisu (see AUTHORS for all contributors)
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# The original `Config` design model is unproudly borrowed from
|
||||
# the rough gus of pypy: pypy: http://codespeak.net/svn/pypy/dist/pypy/config/
|
||||
# the whole pypy projet is under MIT licence
|
||||
"internationalisation utilities"
|
||||
import gettext
|
||||
import os
|
||||
import sys
|
||||
import locale
|
||||
|
||||
# Application Name
|
||||
APP_NAME = 'creole'
|
||||
|
||||
# Traduction dir
|
||||
APP_DIR = os.path.join(sys.prefix, 'share')
|
||||
LOCALE_DIR = os.path.join(APP_DIR, 'locale')
|
||||
|
||||
# Default Lanugage
|
||||
DEFAULT_LANG = os.environ.get('LANG', '').split(':')
|
||||
DEFAULT_LANG += ['en_US']
|
||||
|
||||
languages = []
|
||||
lc, encoding = locale.getdefaultlocale()
|
||||
if lc:
|
||||
languages = [lc]
|
||||
|
||||
languages += DEFAULT_LANG
|
||||
mo_location = LOCALE_DIR
|
||||
|
||||
gettext.find(APP_NAME, mo_location)
|
||||
gettext.textdomain(APP_NAME)
|
||||
gettext.bind_textdomain_codeset(APP_NAME, "UTF-8")
|
||||
gettext.translation(APP_NAME, fallback=True)
|
||||
|
||||
t = gettext.translation(APP_NAME, fallback=True)
|
||||
|
||||
_ = t.gettext
|
14
creole/lint/TODO
Normal file
14
creole/lint/TODO
Normal file
|
@ -0,0 +1,14 @@
|
|||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
MAJOR REASON IS :
|
||||
**revamping the implementation entirely for scalability**
|
||||
|
||||
AND :
|
||||
NOT INTEGRATED YET
|
||||
for pretty print in the console
|
||||
- ansiprint.py
|
||||
- terminalreport.py
|
||||
- terminalwriter.py
|
||||
|
||||
|
||||
|
0
creole/lint/__init__.py
Executable file
0
creole/lint/__init__.py
Executable file
78
creole/lint/ansiwriter.py
Normal file
78
creole/lint/ansiwriter.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Simple API fro creolelint reports"""
|
||||
import sys
|
||||
from creole.lint.warning import Warn
|
||||
from creole.lint import warnsymb
|
||||
|
||||
def ansi_print(text=None, fhandle=None, newline=True, flush=False):
|
||||
"""normalized (ansi) print >> file handle function"""
|
||||
#sys.stdout.write(self.getvalue())
|
||||
if fhandle is None:
|
||||
fhandle = sys.stderr
|
||||
if text != None:
|
||||
#text = text.strip()
|
||||
if newline:
|
||||
text += '\n'
|
||||
fhandle.write(text)
|
||||
if flush:
|
||||
fhandle.flush()
|
||||
# if fhandle:
|
||||
# fhandle.close()
|
||||
|
||||
class AnsiWriter(object):
|
||||
"""Définit une interface d'écriture de warnings
|
||||
"""
|
||||
def __init__(self, write_level, output=sys.stdout):
|
||||
self.write_level = write_level
|
||||
self.output = output
|
||||
|
||||
def process(self, linter):
|
||||
"""
|
||||
parse a result from an item.check() dictionnary
|
||||
which is made of {name: TmplVar}
|
||||
"""
|
||||
ident=1
|
||||
itemname = linter.name
|
||||
warnno = linter.warnno
|
||||
warncomment = linter.warncomment
|
||||
display = linter.display
|
||||
name, level = warnsymb.errorcode[warnno]
|
||||
if level > getattr(warnsymb, self.write_level):
|
||||
print "\nLint {0} désactivé (niveau {1})".format(itemname, warnsymb.errorlevel[level])
|
||||
return ''
|
||||
level = warnsymb.errorlevel[level]
|
||||
if not display:
|
||||
ansi_print('')
|
||||
ansi_print('%s (%s:%s:%s)'%(warncomment, itemname, name, level), self.output)
|
||||
checks = linter.check()
|
||||
warn = Warn(self.write_level, itemname, warnno, warncomment, checks)
|
||||
dico_loc = warn.to_dict()
|
||||
if dico_loc != '' and dico_loc != {}:
|
||||
ansi_print('')
|
||||
ansi_print('%s (%s:%s:%s)'%(warncomment, itemname, name, level), self.output)
|
||||
def compare(x,y):
|
||||
return cmp(x[0],y[0])
|
||||
for vfile in dico_loc.keys():
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print('%s\-- fichier %s' % (' '*ident, vfile), self.output, newline=False)
|
||||
vlines = dico_loc[vfile]
|
||||
vlines.sort(compare)
|
||||
oldline=0
|
||||
for vline, var in vlines:
|
||||
if hasattr(var, 'name'):
|
||||
vname = '%%%%%s'%str(var.name)
|
||||
else:
|
||||
vname = str(var)
|
||||
if vline != None:
|
||||
if vline != oldline:
|
||||
ansi_print('', self.output)
|
||||
ansi_print('%s|-- ligne %s' % (' '*(ident+1), vline), self.output, newline=False)
|
||||
pass
|
||||
oldline=vline
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print(" %s" %vname, self.output, newline=False)
|
||||
else:
|
||||
ansi_print("%s\-- %s" %(' '*ident, vname), self.output)
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print('', self.output)
|
||||
|
39
creole/lint/cmdexec.py
Executable file
39
creole/lint/cmdexec.py
Executable file
|
@ -0,0 +1,39 @@
|
|||
"a system command launcher"
|
||||
|
||||
import os, sys
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
def cmdexec(cmd):
|
||||
""" return output of executing 'cmd' in a separate process.
|
||||
|
||||
raise ExecutionFailed exception if the command failed.
|
||||
the exception will provide an 'err' attribute containing
|
||||
the error-output from the command.
|
||||
"""
|
||||
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = process.communicate()
|
||||
status = process.poll()
|
||||
if status:
|
||||
raise ExecutionFailed(status, status, cmd, out, err)
|
||||
return out
|
||||
|
||||
class ExecutionFailed(Exception):
|
||||
def __init__(self, status, systemstatus, cmd, out, err):
|
||||
Exception.__init__(self)
|
||||
self.status = status
|
||||
self.systemstatus = systemstatus
|
||||
self.cmd = cmd
|
||||
self.err = err
|
||||
self.out = out
|
||||
|
||||
def __str__(self):
|
||||
return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
|
||||
|
||||
# export the exception under the name 'Error'
|
||||
Error = ExecutionFailed
|
||||
try:
|
||||
ExecutionFailed.__module__ = 'cmdexec'
|
||||
ExecutionFailed.__name__ = 'Error'
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
1195
creole/lint/creolelint.py
Normal file
1195
creole/lint/creolelint.py
Normal file
File diff suppressed because it is too large
Load diff
132
creole/lint/entities.py
Normal file
132
creole/lint/entities.py
Normal file
|
@ -0,0 +1,132 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
modules = [
|
||||
'Zéphir',
|
||||
'Eolebase',
|
||||
'Amon',
|
||||
'Sentinelle',
|
||||
'Sphynx',
|
||||
'Scribe',
|
||||
'Eclair',
|
||||
'Horus',
|
||||
'ZéphirLog',
|
||||
'PreludeManager',
|
||||
'AmonEcole',
|
||||
'EoleVZ',
|
||||
'Seshat',
|
||||
'ClientScribe',
|
||||
'ClientHorus']
|
||||
|
||||
projets = [
|
||||
' EOLE', # confusion avec eole-annuaire
|
||||
'EAD',
|
||||
'ead-web',
|
||||
'ead-server',
|
||||
'frontend',
|
||||
'backend',
|
||||
'Era',
|
||||
'ESU',
|
||||
'AGRIATES',
|
||||
'RACINE-AGRIATES',
|
||||
'Samba',
|
||||
'Creole',
|
||||
'GenConfig',
|
||||
'EoleDB',
|
||||
'EoleSSO',
|
||||
'Zéphir',
|
||||
"application Zéphir",
|
||||
"Zéphir-web",
|
||||
]
|
||||
|
||||
os_logiciels_protocoles = [
|
||||
'Linux',
|
||||
'GNU/Linux',
|
||||
'Ubuntu',
|
||||
'Unix',
|
||||
'Windows',
|
||||
'Microsoft',
|
||||
# 'ALIAS',
|
||||
'BlockInput',
|
||||
'Epreuve@SSR',
|
||||
'SSH',
|
||||
'OpenSSH',
|
||||
'DHCP',
|
||||
'DHCPd',
|
||||
'ClamAV',
|
||||
'NuFW',
|
||||
'NuWinC',
|
||||
'Nuauth',
|
||||
'DansGuardian',
|
||||
'Bacula',
|
||||
'Bareos',
|
||||
'TCP',
|
||||
'UDP',
|
||||
'ICMP',
|
||||
'IP',
|
||||
' IPsec', # confusion avec la commande ipsec
|
||||
'strongSwan',
|
||||
'DMZ',
|
||||
'FTP',
|
||||
'SMB',
|
||||
'XML',
|
||||
'XML-RPC',
|
||||
' SSO',
|
||||
# 'CAS',
|
||||
'SAML',
|
||||
'Sympa',
|
||||
'MySQL',
|
||||
'SpamAssassin',
|
||||
'web',
|
||||
'phpMyAdmin',
|
||||
'Grr',
|
||||
'Gibii',
|
||||
'Gepi',
|
||||
'SPIP-EVA',
|
||||
'Envole',
|
||||
'Envole 2',
|
||||
'WebShare',
|
||||
' CSS', # confusion avec .css
|
||||
'CUPS',
|
||||
'OpenOffice.org',
|
||||
'GDM',
|
||||
'client léger',
|
||||
'client lourd',
|
||||
'OpenLDAP',
|
||||
'ProFTPD',
|
||||
'Vim',
|
||||
'Controle-vnc',
|
||||
'BE1D',
|
||||
'RVP',
|
||||
'PostgreSQL',
|
||||
'Squid',
|
||||
'NUT',
|
||||
'PPPoE',
|
||||
'VLAN',
|
||||
'SSL',
|
||||
'Nginx',
|
||||
'reverse proxy',
|
||||
'SquirrelMail',
|
||||
'LDAP',
|
||||
'FreeRADIUS',
|
||||
'LightSquid',
|
||||
'SARG',
|
||||
'iptables',
|
||||
'Netfilter',
|
||||
'POSH',
|
||||
'InterBase',
|
||||
'OCS',
|
||||
]
|
||||
|
||||
divers = [
|
||||
'Éducation nationale',
|
||||
'Internet',
|
||||
'intranet',
|
||||
'pare-feu',
|
||||
'anti-virus',
|
||||
'anti-spam',
|
||||
'USB',
|
||||
'relai',
|
||||
]
|
||||
|
||||
entities = modules + projets + os_logiciels_protocoles + divers
|
||||
|
82
creole/lint/error.py
Executable file
82
creole/lint/error.py
Executable file
|
@ -0,0 +1,82 @@
|
|||
"""errno-specific classes"""
|
||||
|
||||
import sys, os, errornb
|
||||
|
||||
class Error(EnvironmentError):
|
||||
def __repr__(self):
|
||||
return "%s.%s %r: %s " %(self.__class__.__module__,
|
||||
self.__class__.__name__,
|
||||
self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
#repr(self.args)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
s = "[%s]: %s" %(self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
)
|
||||
return s
|
||||
|
||||
#FIXME set the different error better suited than errno
|
||||
_winerrnomap = {
|
||||
2: errsymb.ENOENT,
|
||||
3: errsymb.ENOENT,
|
||||
17: errsymb.EEXIST,
|
||||
22: errsymb.ENOTDIR,
|
||||
267: errsymb.ENOTDIR,
|
||||
5: errsymb.EACCES, # anything better?
|
||||
}
|
||||
|
||||
class ErrorMaker(object):
|
||||
""" lazily provides Exception classes for each possible POSIX errno
|
||||
(as defined per the 'errno' module). All such instances
|
||||
subclass EnvironmentError.
|
||||
"""
|
||||
Error = Error
|
||||
_errno2class = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
eno = getattr(errno, name)
|
||||
cls = self._geterrnoclass(eno)
|
||||
setattr(self, name, cls)
|
||||
return cls
|
||||
|
||||
def _geterrnoclass(self, eno):
|
||||
try:
|
||||
return self._errno2class[eno]
|
||||
except KeyError:
|
||||
clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
|
||||
errorcls = type(Error)(clsname, (Error,),
|
||||
{'__module__':'py.error',
|
||||
'__doc__': os.strerror(eno)})
|
||||
self._errno2class[eno] = errorcls
|
||||
return errorcls
|
||||
|
||||
def checked_call(self, func, *args):
|
||||
""" call a function and raise an errno-exception if applicable. """
|
||||
__tracebackhide__ = True
|
||||
try:
|
||||
return func(*args)
|
||||
except self.Error:
|
||||
raise
|
||||
except EnvironmentError:
|
||||
cls, value, tb = sys.exc_info()
|
||||
if not hasattr(value, 'errno'):
|
||||
raise
|
||||
__tracebackhide__ = False
|
||||
errno = value.errno
|
||||
try:
|
||||
if not isinstance(value, WindowsError):
|
||||
raise NameError
|
||||
except NameError:
|
||||
# we are not on Windows, or we got a proper OSError
|
||||
cls = self._geterrnoclass(errno)
|
||||
else:
|
||||
try:
|
||||
cls = self._geterrnoclass(_winerrnomap[errno])
|
||||
except KeyError:
|
||||
raise value
|
||||
raise cls("%s%r" % (func.__name__, args))
|
||||
__tracebackhide__ = True
|
||||
|
||||
error = ErrorMaker()
|
76
creole/lint/normalize.py
Normal file
76
creole/lint/normalize.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
# coding: utf-8
|
||||
import re
|
||||
import unicodedata
|
||||
from entities import entities
|
||||
|
||||
# ______________________________________________________________________________
|
||||
|
||||
ENCODING = 'utf-8'
|
||||
|
||||
def strip_accents(string):
|
||||
return unicodedata.normalize('NFKD', unicode(string, ENCODING)
|
||||
).encode('ASCII', 'ignore')
|
||||
|
||||
def normalize_entities():
|
||||
"""
|
||||
enleve les accents de la liste des entites + minuscules
|
||||
:return: entities normalisé
|
||||
"""
|
||||
norm_entities = []
|
||||
for entitie in entities:
|
||||
norm_entitie = strip_accents(entitie).lower()
|
||||
norm_entities.append(norm_entitie)
|
||||
return norm_entities
|
||||
|
||||
NORM_ENTITIES = normalize_entities()
|
||||
|
||||
# ______________________________________________________________________________
|
||||
|
||||
def parse_string(text):
|
||||
"""
|
||||
enlève les accents d'un texte
|
||||
"""
|
||||
# libelle = strip_accents(text)
|
||||
words = re.findall('([a-zA-Zéèàùêôëö_]+)', text)
|
||||
return words
|
||||
|
||||
def is_in_entities(text):
|
||||
"""
|
||||
donne l'index dans entities du texte
|
||||
"""
|
||||
norm_text = text.lower()
|
||||
index = None
|
||||
if norm_text in NORM_ENTITIES:
|
||||
index = NORM_ENTITIES.index(norm_text)
|
||||
return index
|
||||
|
||||
def is_correct(libelle, name, family=False):
|
||||
if libelle is not None and type(libelle) != str:
|
||||
libelle = unicode.encode(libelle, ENCODING)
|
||||
ret = []
|
||||
if libelle == '' or libelle is None:
|
||||
return ret
|
||||
if libelle[0].islower():
|
||||
#FIXME: faux positifs connus
|
||||
if not libelle.startswith('ejabberd') and \
|
||||
not libelle.startswith('phpMyAdmin'):
|
||||
ret.append('%%%%%s : phrase sans majuscule'%name)
|
||||
for text in parse_string(libelle):
|
||||
text_index = is_in_entities(text)
|
||||
if not text_index == None:
|
||||
if str(text) != str(entities[text_index]):
|
||||
#FIXME: faux positifs connus
|
||||
if 'ipsec.conf' in libelle or 'test-rvp' in libelle \
|
||||
or 'bareos-' in libelle \
|
||||
or 'bacula-' in libelle \
|
||||
or '/var/log/zephir' in libelle \
|
||||
or 'exemple : eolebase' in libelle:
|
||||
continue
|
||||
ent = str(unicode.encode((unicode(entities[text_index], ENCODING)), ENCODING))
|
||||
if family:
|
||||
ret.append('famille [%s] : %s => %s' % (str(name), text, ent))
|
||||
else:
|
||||
ret.append('%%%%%s : %s => %s' % (str(name), text, ent))
|
||||
return ret
|
||||
# ______________________________________________________________________________
|
||||
|
660
creole/lint/parsetemplate.py
Normal file
660
creole/lint/parsetemplate.py
Normal file
|
@ -0,0 +1,660 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
from os.path import basename
|
||||
from creole.loader import creole_loader
|
||||
from creole.client import CreoleClient
|
||||
from creole.template import CreoleGet, IsDefined, CreoleTemplateEngine, CreoleMaster
|
||||
from creole import eosfunc
|
||||
from tiramisu.option import *
|
||||
from tiramisu import Config
|
||||
from tiramisu.error import ConfigError, PropertiesOptionError, \
|
||||
RequirementError, ValueWarning
|
||||
from Cheetah import Parser, Compiler
|
||||
from Cheetah.Template import Template
|
||||
from Cheetah.NameMapper import NotFound
|
||||
from pyeole.ansiprint import print_red
|
||||
from creole.eosfunc import valid_regexp
|
||||
from Cheetah.Unspecified import Unspecified
|
||||
import warnings
|
||||
|
||||
|
||||
DEBUG = False
|
||||
#DEBUG = True
|
||||
|
||||
|
||||
client = CreoleClient()
|
||||
compilerSettings = {'directiveStartToken' : u'%',
|
||||
'cheetahVarStartToken' : u'%%', 'EOLSlurpToken' : u'%',
|
||||
'PSPStartToken' : u'µ' * 10, 'PSPEndToken' : u'µ' * 10,
|
||||
'commentStartToken' : u'µ' * 10, 'commentEndToken' : u'µ' * 10,
|
||||
'multiLineCommentStartToken' : u'µ' * 10,
|
||||
'multiLineCommentEndToken' : u'µ' * 10}
|
||||
|
||||
#======================= CHEETAH =======================
|
||||
# This class is used to retrieve all template vars
|
||||
#true_HighLevelParser = Parser._HighLevelParser
|
||||
global cl_chunks, cl_vars
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
class cl_Parser(Parser.Parser):
|
||||
|
||||
def getCheetahVarNameChunks(self, *args, **kwargs):
|
||||
global cl_chunks
|
||||
chunks = super(cl_Parser, self).getCheetahVarNameChunks(*args, **kwargs)
|
||||
for chunk in chunks:
|
||||
#if false, it's internal variable
|
||||
if chunk[1]:
|
||||
name = chunk[0]
|
||||
#remove master if master/slave and add force adding master
|
||||
if '.' in name:
|
||||
cl_chunks.add(name.split('.')[-1])
|
||||
cl_chunks.add(name.split('.')[0])
|
||||
else:
|
||||
cl_chunks.add(name)
|
||||
return chunks
|
||||
|
||||
def getCheetahVar(self, *args, **kwargs):
|
||||
global cl_vars
|
||||
var = super(cl_Parser, self).getCheetahVar(*args, **kwargs)
|
||||
if not var.startswith(u'VFFSL('):
|
||||
cl_vars.add(var)
|
||||
return var
|
||||
|
||||
def getVars():
|
||||
global cl_chunks, cl_vars
|
||||
#retrieve all calculated vars
|
||||
ret = list(cl_chunks - cl_vars)
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
return ret
|
||||
|
||||
class CompilerGetVars(Compiler.ModuleCompiler):
|
||||
parserClass = cl_Parser
|
||||
|
||||
|
||||
true_compile = Template.compile
|
||||
@classmethod
|
||||
def cl_compile(kls, *args, **kwargs):
|
||||
kwargs['compilerClass'] = CompilerGetVars
|
||||
kwargs['useCache'] = False
|
||||
return true_compile(*args, **kwargs)
|
||||
Template.compile = cl_compile
|
||||
|
||||
def CompilerGetVar(varName, default=Unspecified):
|
||||
#remplace Cheetah's getVar function
|
||||
#this function permite to known variable if getVar is used
|
||||
if varName.startswith('%%'):
|
||||
raise Exception('varname should not start with %% {0}'.format(varName))
|
||||
global extra_vars, config
|
||||
config.read_only()
|
||||
try:
|
||||
option = config.creole.find_first(byname=varName)
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
value = getattr(config, path)
|
||||
except (AttributeError, ConfigError):
|
||||
try:
|
||||
option = config.creole.find_first(byname=varName, check_properties=False)
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
#populate_mandatory(config, option, path, raise_propertyerror=True)
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
config.read_only()
|
||||
value = getattr(config, path)
|
||||
except (AttributeError, RequirementError), err:
|
||||
config.read_only()
|
||||
#support default value
|
||||
if default != Unspecified:
|
||||
return default
|
||||
else:
|
||||
raise AttributeError('option:', varName, ':', err)
|
||||
except PropertiesOptionError as err:
|
||||
if default != Unspecified:
|
||||
return default
|
||||
else:
|
||||
raise err
|
||||
except Exception as err:
|
||||
config.read_only()
|
||||
raise err
|
||||
except Exception as err:
|
||||
config.read_only()
|
||||
raise err
|
||||
lpath = '.'.join(path.split('.')[2:])
|
||||
dico = {lpath: value}
|
||||
engine = CreoleTemplateEngine(force_values=dico)
|
||||
name = path.split('.')[-1]
|
||||
extra_vars[option] = name
|
||||
if "." in lpath:
|
||||
spath = lpath.split('.')
|
||||
if spath[0] == spath[1]:
|
||||
ret = engine.creole_variables_dict[name]
|
||||
else:
|
||||
ret = engine.creole_variables_dict[spath[0]].slave[spath[1]]
|
||||
else:
|
||||
ret = engine.creole_variables_dict[name]
|
||||
return ret
|
||||
|
||||
def CompilerGetattr(creolemaster, name, default=None):
|
||||
if not isinstance(creolemaster, CreoleMaster):
|
||||
raise Exception('creolemaster must be CreoleMaster, not {0}'.format(type(creolemaster)))
|
||||
if name not in creolemaster.slave:
|
||||
#FIXME assume name is slave?
|
||||
value = CompilerGetVar(name, default)
|
||||
if creolemaster._index is not None:
|
||||
value = value[creolemaster._index]
|
||||
creolemaster.add_slave(name, value)
|
||||
return getattr(creolemaster, name, default)
|
||||
|
||||
#======================= EOSFUNC =======================
|
||||
eos = {}
|
||||
for func in dir(eosfunc):
|
||||
if not func.startswith('_'):
|
||||
eos[func] = getattr(eosfunc, func)
|
||||
|
||||
#======================= CONFIG =======================
|
||||
def populate_mandatory(config, option, path, raise_propertyerror=False):
|
||||
def _build_network(path):
|
||||
for num in range(0, 4):
|
||||
if path.startswith('creole.interface_{0}'.format(num)):
|
||||
return num
|
||||
#si il y a un test de consistence de type _cons_in_network (l'IP doit être dans un network défini)
|
||||
#on utilise le réseau de ce network #10714
|
||||
if getattr(option, '_consistencies', None) is not None:
|
||||
for const in option._consistencies:
|
||||
if const[0] == '_cons_in_network':
|
||||
try:
|
||||
opt = const[1][1]
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(opt)
|
||||
val = config.getattr(path, force_permissive=True)
|
||||
if isinstance(val, list):
|
||||
val = val[0]
|
||||
return val.split('.')[2]
|
||||
except IndexError:
|
||||
pass
|
||||
return 5
|
||||
def _build_ip(path):
|
||||
if path.endswith('_fichier_link'):
|
||||
return 3
|
||||
elif path.endswith('_proxy_link'):
|
||||
return 2
|
||||
else:
|
||||
#ne pas retourner la même valeur si elle est censé être différente
|
||||
if getattr(option, '_consistencies', None) is not None:
|
||||
for const in option._consistencies:
|
||||
if const[0] == '_cons_not_equal':
|
||||
return 4
|
||||
|
||||
return 1
|
||||
if option.impl_getname().startswith('nom_carte_eth'):
|
||||
value = unicode(option.impl_getname())
|
||||
elif isinstance(option, UnicodeOption):
|
||||
value = u'value'
|
||||
elif isinstance(option, IPOption):
|
||||
value = u'192.168.{0}.{1}'.format(_build_network(path), _build_ip(path))
|
||||
elif isinstance(option, NetworkOption):
|
||||
value = u'192.168.{0}.0'.format(_build_network(path))
|
||||
elif isinstance(option, NetmaskOption):
|
||||
value = u'255.255.255.0'
|
||||
elif isinstance(option, BroadcastOption):
|
||||
value = u'192.168.{0}.255'.format(_build_network(path))
|
||||
elif isinstance(option, EmailOption):
|
||||
value = u'foo@bar.com'
|
||||
elif isinstance(option, URLOption):
|
||||
value = u'http://foo.com/bar'
|
||||
elif isinstance(option, DomainnameOption):
|
||||
allow_without_dot = option._get_extra('_allow_without_dot')
|
||||
o_type = option._get_extra('_dom_type')
|
||||
if option._name == 'smb_workgroup':
|
||||
value = u'othervalue'
|
||||
elif o_type in ['netbios', 'hostname']:
|
||||
value = u'value'
|
||||
else:
|
||||
value = u'value.lan'
|
||||
elif isinstance(option, FilenameOption):
|
||||
value = u'/tmp/foo'
|
||||
elif isinstance(option, ChoiceOption):
|
||||
#FIXME devrait le faire tout seul non ?
|
||||
value = option.impl_get_values(config)[0]
|
||||
elif isinstance(option, IntOption):
|
||||
value = 1
|
||||
elif isinstance(option, PortOption):
|
||||
value = 80
|
||||
elif isinstance(option, DomainnameOption):
|
||||
value = 'foo.com'
|
||||
elif isinstance(option, UsernameOption):
|
||||
value = 'toto'
|
||||
elif isinstance(option, PasswordOption):
|
||||
value = 'P@ssWord'
|
||||
else:
|
||||
raise Exception('the Tiramisu type {0} is not supported by CreoleLint (variable : {1})'.format(type(option), path))
|
||||
validator = option.impl_get_validator()
|
||||
if validator is not None and validator[0] == valid_regexp:
|
||||
regexp = validator[1][''][0]
|
||||
# génération d'une "value" valide
|
||||
# en cas de valid_regexp sans valeur par défaut
|
||||
if regexp == u'^[A-Z][0-9]$':
|
||||
value = u'A1'
|
||||
elif option._name == 'additional_repository_source':
|
||||
# variable avec expression (très) spécifique #20291
|
||||
value = u"deb http://test dist"
|
||||
elif not regexp.startswith(u'^[a-z0-9]') and regexp.startswith('^'):
|
||||
value = regexp[1:]
|
||||
if option.impl_is_multi():
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
#slave should have same length as master
|
||||
masterpath = '.'.join(path.split('.')[:-1]+[path.split('.')[-2]])
|
||||
try:
|
||||
len_master = len(getattr(config, masterpath))
|
||||
val = []
|
||||
for i in range(0, len_master):
|
||||
val.append(value)
|
||||
value = val
|
||||
except:
|
||||
value = [value]
|
||||
else:
|
||||
value = [value]
|
||||
try:
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
except ValueError, err:
|
||||
msg = str('error for {0} type {1}: {2}'.format(path, type(option), err))
|
||||
raise Exception(msg)
|
||||
except PropertiesOptionError, err:
|
||||
if 'frozen' not in err.proptype:
|
||||
if raise_propertyerror:
|
||||
raise err
|
||||
msg = str('error for {0} type {1}: {2}'.format(path, type(option), err))
|
||||
raise Exception(msg)
|
||||
|
||||
|
||||
class Reload(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Check_Template:
|
||||
|
||||
def __init__(self, template_name):
|
||||
self.all_requires = {}
|
||||
self.current_opt = {}
|
||||
self.od_list = {}
|
||||
global extra_vars
|
||||
#reinit extra_vars
|
||||
extra_vars = {}
|
||||
self.old_dico = []
|
||||
self.current_var = []
|
||||
self.ori_options = []
|
||||
self.file_path = None
|
||||
self.template_name = template_name
|
||||
self.current_container = client.get_container_infos('mail')
|
||||
self.tmpl = None
|
||||
self.is_tmpl = False
|
||||
self.filename_ok = False
|
||||
|
||||
|
||||
def populate_requires(self, option, path, force=False):
|
||||
def _parse_requires(_option):
|
||||
o_requires = _option.impl_getrequires()
|
||||
if o_requires is not None:
|
||||
for requires in o_requires:
|
||||
for require in requires:
|
||||
opt_ = require[0]
|
||||
path_ = config.cfgimpl_get_description().impl_get_path_by_opt(opt_)
|
||||
self.populate_requires(opt_, path_, force=True)
|
||||
if not force and not path.startswith('creole.'):
|
||||
return
|
||||
if option in self.current_opt:
|
||||
return
|
||||
o_requires = option.impl_getrequires()
|
||||
if o_requires is not None:
|
||||
for requires in o_requires:
|
||||
for require in requires:
|
||||
if require[0].impl_is_master_slaves('slave'):
|
||||
path_ = config.cfgimpl_get_description().impl_get_path_by_opt(require[0])
|
||||
s_path = path_.split('.')
|
||||
master_path = 'creole.' + s_path[1] + '.' + s_path[2] + '.' + s_path[2]
|
||||
try:
|
||||
opt_master = config.unwrap_from_path(master_path)
|
||||
config.cfgimpl_get_settings().remove('everything_frozen')
|
||||
populate_mandatory(config, opt_master, master_path)
|
||||
except:
|
||||
pass
|
||||
self.all_requires.setdefault(option, []).append(require[0])
|
||||
if isinstance(option, OptionDescription):
|
||||
self.od_list[path] = option
|
||||
if force and not option._name in self.current_var:
|
||||
self.current_var.append(option._name)
|
||||
if option._name in self.current_var or not path.startswith('creole.'):
|
||||
if not isinstance(option, OptionDescription):
|
||||
if path.startswith('creole.'):
|
||||
self.current_opt[option] = '.'.join(path.split('.')[1:])
|
||||
else:
|
||||
self.current_opt[option] = None
|
||||
_parse_requires(option)
|
||||
#requires could be in parent's too
|
||||
opath = ''
|
||||
for parent in path.split('.')[:-1]:
|
||||
opath += parent
|
||||
if opath in self.od_list:
|
||||
desc = self.od_list[opath]
|
||||
self.current_opt[desc] = None
|
||||
_parse_requires(desc)
|
||||
opath += '.'
|
||||
try:
|
||||
if option._callback is not None:
|
||||
for params in option._callback[1].values():
|
||||
for param in params:
|
||||
if isinstance(param, tuple):
|
||||
opt = param[0]
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(opt)
|
||||
self.populate_requires(opt, path, force=True)
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
|
||||
def read_write(self):
|
||||
config.read_write()
|
||||
config.cfgimpl_get_settings().remove('disabled')
|
||||
config.cfgimpl_get_settings().remove('hidden')
|
||||
config.cfgimpl_get_settings().remove('frozen')
|
||||
|
||||
def change_value(self, path, value, multi, parse_message, option):
|
||||
self.read_write()
|
||||
config.cfgimpl_get_settings()[option].remove('force_default_on_freeze')
|
||||
if multi:
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
s_path = path.split('.')
|
||||
master_path = s_path[0] + '.' + s_path[1] + '.' + s_path[2] + '.' + s_path[2]
|
||||
master_option = config.cfgimpl_get_description().impl_get_opt_by_path(master_path)
|
||||
if getattr(config, master_path) == []:
|
||||
populate_mandatory(config, master_option, master_path)
|
||||
value = [value]
|
||||
if parse_message:
|
||||
print parse_message, value
|
||||
setattr(config, path, value)
|
||||
config.read_only()
|
||||
|
||||
def template(self):
|
||||
self.last_notfound = []
|
||||
def get_value(opt_, path_):
|
||||
try:
|
||||
return getattr(config.creole, path_)
|
||||
except PropertiesOptionError, err:
|
||||
if err.proptype == ['mandatory']:
|
||||
self.read_write()
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
s_path = path_.split('.')
|
||||
#set value to master
|
||||
if len(s_path) == 3 and s_path[1] != s_path[2]:
|
||||
master_path = 'creole.' + s_path[0] + '.' + s_path[1] + '.' + s_path[1]
|
||||
opt_master = config.unwrap_from_path(master_path)
|
||||
populate_mandatory(config, opt_master, master_path)
|
||||
populate_mandatory(config, opt_, 'creole.' + path_)
|
||||
config.read_only()
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
try:
|
||||
ret = getattr(config.creole, path_)
|
||||
config.cfgimpl_get_settings().append('mandatory')
|
||||
return ret
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
raise NotFound('no value')
|
||||
except ConfigError:
|
||||
self.read_write()
|
||||
populate_mandatory(config, opt_, 'creole.' + path_)
|
||||
config.read_only()
|
||||
try:
|
||||
return getattr(config.creole, path_)
|
||||
except ConfigError, err:
|
||||
raise err
|
||||
except PropertiesOptionError, err:
|
||||
raise NotFound('no value')
|
||||
try:
|
||||
is_gen_file = getattr(config, self.file_path)
|
||||
except PropertiesOptionError, err:
|
||||
is_gen_file = False
|
||||
if not is_gen_file:
|
||||
return
|
||||
try:
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
config.read_only()
|
||||
dico = {}
|
||||
for opt_, path_ in self.current_opt.items():
|
||||
#path_ is None if it's an OptionDescription
|
||||
if path_ is None:
|
||||
continue
|
||||
try:
|
||||
dico[path_] = get_value(opt_, path_)
|
||||
except NotFound:
|
||||
pass
|
||||
#FIXME revoir le strip_full_path
|
||||
ndico = {}
|
||||
for path_, value in dico.items():
|
||||
sdico = path_.split('.')
|
||||
if len(sdico) == 2:
|
||||
ndico[sdico[1]] = value
|
||||
elif len(sdico) == 3:
|
||||
if sdico[1] == sdico[2]:
|
||||
ndico[sdico[1]] = value
|
||||
else:
|
||||
ndico['.'.join(sdico[1:])] = value
|
||||
else:
|
||||
raise Exception('chemin de longueur inconnu {}'.format(path_))
|
||||
engine = CreoleTemplateEngine(force_values=ndico)
|
||||
dico = engine.creole_variables_dict
|
||||
self.read_write()
|
||||
except ConfigError, err:
|
||||
msg = 'erreur de templating', err
|
||||
raise ValueError(msg)
|
||||
diff = True
|
||||
for old in self.old_dico:
|
||||
if dico.keys() == old.keys():
|
||||
for key in old.keys():
|
||||
if old[key] != dico[key]:
|
||||
diff = False
|
||||
break
|
||||
if not diff:
|
||||
break
|
||||
if not diff:
|
||||
return
|
||||
try:
|
||||
self.old_dico.append(dico)
|
||||
searchlist = [dico, eos, {'is_defined' : IsDefined(dico),
|
||||
'creole_client' : CreoleClient(),
|
||||
'current_container': CreoleGet(self.current_container),
|
||||
}]
|
||||
rtmpl = self.tmpl(searchList=searchlist)
|
||||
rtmpl.getVar = CompilerGetVar
|
||||
rtmpl.getattr = CompilerGetattr
|
||||
rtmpl = str(rtmpl)
|
||||
#print rtmpl
|
||||
self.is_tmpl = True
|
||||
except NotFound, err:
|
||||
lst = getVars()
|
||||
if lst == []:
|
||||
raise Exception("Il manque une option", err, 'avec le dictionnaire', dico)
|
||||
for ls in lst:
|
||||
try:
|
||||
CompilerGetVar(ls)
|
||||
except AttributeError:
|
||||
self.last_notfound.append(ls)
|
||||
raise Reload('')
|
||||
except Exception, err:
|
||||
raise Exception("Il y a une erreur", err, 'avec le dictionnaire', dico)
|
||||
|
||||
def check_reload_with_extra(self):
|
||||
#if extra_vars has value, check if not already in current_opt
|
||||
global extra_vars
|
||||
if extra_vars != {}:
|
||||
oret = set(extra_vars.keys())
|
||||
opt_requires = oret & set(self.all_requires.keys())
|
||||
for opt_ in opt_requires:
|
||||
oret.update(self.all_requires[opt_])
|
||||
dont_exists = set(oret) - set(self.current_opt.keys())
|
||||
ret = []
|
||||
for opt_ in dont_exists:
|
||||
try:
|
||||
ret.append(extra_vars[opt_])
|
||||
except KeyError:
|
||||
ret.append(opt_._name)
|
||||
extra_vars = {}
|
||||
if ret == []:
|
||||
return None
|
||||
return ret
|
||||
|
||||
def test_all_values_for(self, options, cpt):
|
||||
option = options[0]
|
||||
parse_message = None
|
||||
if DEBUG:
|
||||
parse_message = '*' * cpt + '>' + option._name
|
||||
|
||||
if not isinstance(option, ChoiceOption):
|
||||
msg = str('pas simple la... ' + option._name)
|
||||
raise NotImplementedError(msg)
|
||||
multi = option.impl_is_multi()
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
for value in option.impl_get_values(config):
|
||||
self.change_value(path, value, multi, parse_message, option)
|
||||
if options[1:] != []:
|
||||
#if already value to test, restart test_all_values_for
|
||||
ret = self.test_all_values_for(options[1:], cpt + 1)
|
||||
if ret != None:
|
||||
return ret
|
||||
else:
|
||||
need_reload = False
|
||||
try:
|
||||
self.template()
|
||||
except Reload:
|
||||
need_reload = True
|
||||
ret = self.check_reload_with_extra()
|
||||
if need_reload and ret is None:
|
||||
notfound = []
|
||||
paths = config.cfgimpl_get_description()._cache_paths[1]
|
||||
for ls in self.last_notfound:
|
||||
#if variable is locale (means template) variable, not config's one
|
||||
for path in paths:
|
||||
if path.endswith('.' + ls):
|
||||
notfound.append(ls)
|
||||
break
|
||||
if notfound != []:
|
||||
raise Exception('variable not found after reload {0}'.format(notfound))
|
||||
if ret is not None:
|
||||
return ret
|
||||
|
||||
|
||||
def open_file(self, force_var):
|
||||
# Open template and compile it
|
||||
# retrieve template vars (add force_var if needed)
|
||||
filecontent = open(self.template_name).read()
|
||||
#try to convert content in unicode
|
||||
self.tmpl = Template.compile(filecontent, compilerSettings=compilerSettings) # ,
|
||||
#compilerClass=CompilerGetVars)
|
||||
self.current_var = getVars()
|
||||
if force_var:
|
||||
self.current_var.extend(force_var)
|
||||
|
||||
def populate_file(self, path, option):
|
||||
if path.startswith('containers.files.file'):
|
||||
if path.endswith('.source') and option.impl_getdefault().endswith('/{0}'.format(self.template_name.split('/')[-1])):
|
||||
self.filename_ok = True
|
||||
if self.filename_ok and path.endswith('.activate'):
|
||||
self.file_path = path
|
||||
self.filename_ok = False
|
||||
self.populate_requires(option, path, force=True)
|
||||
|
||||
def test_all_values(self):
|
||||
try:
|
||||
options = list(set(self.all_requires.keys())&set(self.current_opt.keys()))
|
||||
need_tmpl = False
|
||||
if options != []:
|
||||
requires_options = set()
|
||||
for opt in options:
|
||||
for op in self.all_requires[opt]:
|
||||
if 'frozen' not in config.cfgimpl_get_settings()[op]:
|
||||
requires_options.add(op)
|
||||
if requires_options == set([]):
|
||||
need_tmpl = True
|
||||
else:
|
||||
self.ori_options = requires_options
|
||||
ret = self.test_all_values_for(list(requires_options), 0)
|
||||
if ret is not None:
|
||||
if DEBUG:
|
||||
print "reload with", ret
|
||||
self.check_template(ret, already_load=True)
|
||||
else:
|
||||
need_tmpl = True
|
||||
|
||||
if need_tmpl is True:
|
||||
try:
|
||||
self.template()
|
||||
except:
|
||||
self.test_all_values()
|
||||
except Exception, err:
|
||||
if DEBUG:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
msg = self.template_name, ':', err
|
||||
raise Exception(msg)
|
||||
|
||||
def check_template(self, force_var=None, already_load=False):
|
||||
#remove all modification (value, properties, ...)
|
||||
open_error = None
|
||||
try:
|
||||
self.open_file(force_var)
|
||||
except Exception, err:
|
||||
open_error = "problème à l'ouverture du fichier {}".format(self.template_name)
|
||||
|
||||
config.read_only()
|
||||
for index, option in enumerate(config.cfgimpl_get_description()._cache_paths[0]):
|
||||
path = config.cfgimpl_get_description()._cache_paths[1][index]
|
||||
self.populate_file(path, option)
|
||||
self.populate_requires(option, path)
|
||||
if self.file_path is None:
|
||||
if open_error is not None:
|
||||
print "le fichier {0} non présent dans un dictionnaire a un problème : {1}".format(basename(self.template_name),
|
||||
open_error)
|
||||
else:
|
||||
print " \\-- fichier non présent dans un dictionnaire {0}".format(self.template_name)
|
||||
return
|
||||
if open_error is not None:
|
||||
raise Exception(open_error)
|
||||
|
||||
if not already_load:
|
||||
print " \\--", self.template_name
|
||||
self.test_all_values()
|
||||
if not self.is_tmpl:
|
||||
print "pas de templating !"
|
||||
|
||||
|
||||
def populate_mandatories():
|
||||
for path in config.cfgimpl_get_values().mandatory_warnings(config):
|
||||
if path.startswith('creole.'):
|
||||
option = config.cfgimpl_get_description().impl_get_opt_by_path(path)
|
||||
try:
|
||||
populate_mandatory(config, option, path)
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
|
||||
|
||||
def parse_templates(templates_name):
|
||||
global config, cl_chunks, cl_vars, extra_vars
|
||||
config = creole_loader(load_values=False, load_extra=True)
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
cfg = config
|
||||
for template_name in templates_name:
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
extra_vars = {}
|
||||
config = cfg.duplicate()
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
ctmpl = Check_Template(template_name)
|
||||
try:
|
||||
ctmpl.check_template()
|
||||
except Exception, err:
|
||||
if DEBUG:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print_red(str(err))
|
||||
sys.exit(1)
|
||||
|
125
creole/lint/terminalreport.py
Executable file
125
creole/lint/terminalreport.py
Executable file
|
@ -0,0 +1,125 @@
|
|||
"""
|
||||
Implements terminal reporting of the full validation process.
|
||||
|
||||
Implements the various reporting hooks.
|
||||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("terminal reporting", after="general")
|
||||
group._addoption('-v', '--verbose', action="count",
|
||||
dest="verbose", default=0, help="increase verbosity."),
|
||||
group.addoption('--report',
|
||||
action="store", dest="report", default=None, metavar="opts",
|
||||
help="comma separated options, valid: skipped,xfailed")
|
||||
group._addoption('--fulltrace',
|
||||
action="store_true", dest="fulltrace", default=False,
|
||||
help="don't cut any tracebacks (default is to cut).")
|
||||
|
||||
group.addoption('--traceconfig',
|
||||
action="store_true", dest="traceconfig", default=False,
|
||||
help="trace considerations of conftest.py files."),
|
||||
|
||||
class TerminalReporter:
|
||||
def __init__(self, config, file=None):
|
||||
self.config = config
|
||||
self.stats = {}
|
||||
self.curdir = py.path.local()
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._tw = TerminalWriter(file)
|
||||
self.currentfspath = None
|
||||
self._reportopt = getreportopt(config.getvalue('report'))
|
||||
|
||||
def hasopt(self, name):
|
||||
return self._reportopt.get(name, False)
|
||||
|
||||
def write_fspath_result(self, fspath, res):
|
||||
fspath = self.curdir.bestrelpath(fspath)
|
||||
if fspath != self.currentfspath:
|
||||
self._tw.line()
|
||||
relpath = self.curdir.bestrelpath(fspath)
|
||||
self._tw.write(relpath + " ")
|
||||
self.currentfspath = fspath
|
||||
self._tw.write(res)
|
||||
|
||||
def write_ensure_prefix(self, prefix, extra="", **kwargs):
|
||||
if self.currentfspath != prefix:
|
||||
self._tw.line()
|
||||
self.currentfspath = prefix
|
||||
self._tw.write(prefix)
|
||||
if extra:
|
||||
self._tw.write(extra, **kwargs)
|
||||
self.currentfspath = -2
|
||||
|
||||
def ensure_newline(self):
|
||||
if self.currentfspath:
|
||||
self._tw.line()
|
||||
self.currentfspath = None
|
||||
|
||||
def write_line(self, line, **markup):
|
||||
line = str(line)
|
||||
self.ensure_newline()
|
||||
self._tw.line(line, **markup)
|
||||
|
||||
def write_sep(self, sep, title=None, **markup):
|
||||
self.ensure_newline()
|
||||
self._tw.sep(sep, title, **markup)
|
||||
|
||||
def getoutcomeword(self, rep):
|
||||
if rep.passed:
|
||||
return "PASS", dict(green=True)
|
||||
elif rep.failed:
|
||||
return "FAIL", dict(red=True)
|
||||
elif rep.skipped:
|
||||
return "SKIP"
|
||||
else:
|
||||
return "???", dict(red=True)
|
||||
|
||||
#
|
||||
# summaries for sessionfinish
|
||||
#
|
||||
|
||||
def summary_failures(self):
|
||||
if 'failed' in self.stats and self.config.option.tbstyle != "no":
|
||||
self.write_sep("=", "FAILURES")
|
||||
for rep in self.stats['failed']:
|
||||
msg = self._getfailureheadline(rep)
|
||||
self.write_sep("_", msg)
|
||||
self.write_platinfo(rep)
|
||||
rep.toterminal(self._tw)
|
||||
|
||||
def summary_errors(self):
|
||||
if 'error' in self.stats and self.config.option.tbstyle != "no":
|
||||
self.write_sep("=", "ERRORS")
|
||||
for rep in self.stats['error']:
|
||||
msg = self._getfailureheadline(rep)
|
||||
if not hasattr(rep, 'when'):
|
||||
# collect
|
||||
msg = "ERROR during collection " + msg
|
||||
elif rep.when == "setup":
|
||||
msg = "ERROR at setup of " + msg
|
||||
elif rep.when == "teardown":
|
||||
msg = "ERROR at teardown of " + msg
|
||||
self.write_sep("_", msg)
|
||||
self.write_platinfo(rep)
|
||||
rep.toterminal(self._tw)
|
||||
|
||||
def summary_stats(self):
|
||||
session_duration = py.std.time.time() - self._sessionstarttime
|
||||
|
||||
keys = "failed passed skipped deselected".split()
|
||||
for key in self.stats.keys():
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
parts = []
|
||||
for key in keys:
|
||||
val = self.stats.get(key, None)
|
||||
if val:
|
||||
parts.append("%d %s" %(len(val), key))
|
||||
line = ", ".join(parts)
|
||||
# XXX coloring
|
||||
self.write_sep("=", "%s in %.2f seconds" %(line, session_duration))
|
||||
|
139
creole/lint/terminalwriter.py
Executable file
139
creole/lint/terminalwriter.py
Executable file
|
@ -0,0 +1,139 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
Helper functions for writing to terminals and files.
|
||||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
"""
|
||||
|
||||
import sys, os
|
||||
|
||||
def _getdimensions():
|
||||
import termios,fcntl,struct
|
||||
call = fcntl.ioctl(0,termios.TIOCGWINSZ,"\000"*8)
|
||||
height,width = struct.unpack( "hhhh", call ) [:2]
|
||||
return height, width
|
||||
|
||||
def get_terminal_width():
|
||||
try:
|
||||
height, width = _getdimensions()
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except:
|
||||
# FALLBACK
|
||||
width = int(os.environ.get('COLUMNS', 80))-1
|
||||
# XXX the windows getdimensions may be bogus, let's sanify a bit
|
||||
width = max(width, 40) # we alaways need 40 chars
|
||||
return width
|
||||
|
||||
terminal_width = get_terminal_width()
|
||||
|
||||
# XXX unify with _escaped func below
|
||||
def ansi_print(text, file=None, newline=True, flush=False):
|
||||
if file is None:
|
||||
file = sys.stderr
|
||||
text = text.strip()
|
||||
if newline:
|
||||
text += '\n'
|
||||
file.write(text)
|
||||
if flush:
|
||||
file.flush()
|
||||
if file:
|
||||
file.close()
|
||||
|
||||
def should_do_markup(file):
|
||||
return hasattr(file, 'isatty') and file.isatty() \
|
||||
and os.environ.get('TERM') != 'dumb'
|
||||
|
||||
class TerminalWriter(object):
|
||||
_esctable = dict(black=30, red=31, green=32, yellow=33,
|
||||
blue=34, purple=35, cyan=36, white=37,
|
||||
Black=40, Red=41, Green=42, Yellow=43,
|
||||
Blue=44, Purple=45, Cyan=46, White=47,
|
||||
bold=1, light=2, blink=5, invert=7)
|
||||
|
||||
def __init__(self, file=None, encoding=None):
|
||||
self.encoding = encoding
|
||||
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
|
||||
elif hasattr(file, '__call__'):
|
||||
file = WriteFile(file, encoding=encoding)
|
||||
self._file = file
|
||||
self.fullwidth = get_terminal_width()
|
||||
self.hasmarkup = should_do_markup(file)
|
||||
|
||||
def _escaped(self, text, esc):
|
||||
if esc and self.hasmarkup:
|
||||
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
|
||||
text +'\x1b[0m')
|
||||
return text
|
||||
|
||||
def markup(self, text, **kw):
|
||||
esc = []
|
||||
for name in kw:
|
||||
if name not in self._esctable:
|
||||
raise ValueError("unknown markup: %r" %(name,))
|
||||
if kw[name]:
|
||||
esc.append(self._esctable[name])
|
||||
return self._escaped(text, tuple(esc))
|
||||
|
||||
def sep(self, sepchar, title=None, fullwidth=None, **kw):
|
||||
if fullwidth is None:
|
||||
fullwidth = self.fullwidth
|
||||
# the goal is to have the line be as long as possible
|
||||
# under the condition that len(line) <= fullwidth
|
||||
if title is not None:
|
||||
# we want 2 + 2*len(fill) + len(title) <= fullwidth
|
||||
# i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
|
||||
# 2*len(sepchar)*N <= fullwidth - len(title) - 2
|
||||
# N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
|
||||
N = (fullwidth - len(title) - 2) // (2*len(sepchar))
|
||||
fill = sepchar * N
|
||||
line = "%s %s %s" % (fill, title, fill)
|
||||
else:
|
||||
# we want len(sepchar)*N <= fullwidth
|
||||
# i.e. N <= fullwidth // len(sepchar)
|
||||
line = sepchar * (fullwidth // len(sepchar))
|
||||
# in some situations there is room for an extra sepchar at the right,
|
||||
# in particular if we consider that with a sepchar like "_ " the
|
||||
# trailing space is not important at the end of the line
|
||||
if len(line) + len(sepchar.rstrip()) <= fullwidth:
|
||||
line += sepchar.rstrip()
|
||||
|
||||
self.line(line, **kw)
|
||||
|
||||
def write(self, s, **kw):
|
||||
if s:
|
||||
s = self._getbytestring(s)
|
||||
if self.hasmarkup and kw:
|
||||
s = self.markup(s, **kw)
|
||||
self._file.write(s)
|
||||
self._file.flush()
|
||||
|
||||
def _getbytestring(self, s):
|
||||
# XXX review this and the whole logic
|
||||
if self.encoding and sys.version_info < (3,0) and isinstance(s, unicode):
|
||||
return s.encode(self.encoding)
|
||||
elif not isinstance(s, str):
|
||||
return str(s)
|
||||
return s
|
||||
|
||||
def line(self, s='', **kw):
|
||||
self.write(s, **kw)
|
||||
self.write('\n')
|
||||
|
||||
class WriteFile(object):
|
||||
def __init__(self, writemethod, encoding=None):
|
||||
self.encoding = encoding
|
||||
self._writemethod = writemethod
|
||||
|
||||
def write(self, data):
|
||||
if self.encoding:
|
||||
data = data.encode(self.encoding)
|
||||
self._writemethod(data)
|
||||
|
||||
def flush(self):
|
||||
return
|
||||
|
34
creole/lint/warning.py
Normal file
34
creole/lint/warning.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
from creole.lint import warnsymb
|
||||
|
||||
|
||||
class Warn:
|
||||
|
||||
def __init__(self, write_level, itemname, warnno, comment, checks):
|
||||
self.warnno = warnno
|
||||
self.comment = comment
|
||||
self.checks = checks
|
||||
self.write_level = getattr(warnsymb, write_level)
|
||||
|
||||
def to_dict(self):
|
||||
"""
|
||||
formats a msg warn directly from a warning message
|
||||
"""
|
||||
dico_loc = {}
|
||||
for var in self.checks:
|
||||
if hasattr(var, 'location'):
|
||||
locs = var.location
|
||||
for vfile, vline in locs:
|
||||
if vfile == 'dictionnaire':
|
||||
raise Exception('vfile ne doit pas se nommer dictionnaire !!!')
|
||||
if not dico_loc.has_key(vfile):
|
||||
dico_loc[vfile] = []
|
||||
dico_loc[vfile].append((vline, var))
|
||||
else:
|
||||
if not dico_loc.has_key('dictionnaire'):
|
||||
dico_loc['dictionnaire'] = []
|
||||
dico_loc['dictionnaire'].append((None, var))
|
||||
# ret = ["[%s:%s:%s] %s : %s (dictionnaire)" %(level, name, self.itemname, self.comment, vname)]
|
||||
return dico_loc
|
33
creole/lint/warnsymb.py
Executable file
33
creole/lint/warnsymb.py
Executable file
|
@ -0,0 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Standard errno symbols
|
||||
"""
|
||||
|
||||
"""Dictionary providing a mapping from the errno value to the string
|
||||
name in the underlying waring. For instance,
|
||||
errno.errorcode[errno.EPERM] maps to 'EPERM'."""
|
||||
|
||||
errorlevel = {
|
||||
1: 'error',
|
||||
2: 'warning',
|
||||
3: 'info',
|
||||
}
|
||||
|
||||
errorcode = {
|
||||
1: ('ERROR', 1),
|
||||
2: ('WARNING', 2),
|
||||
3: ('INFO', 3),
|
||||
4: ('NAME', 1),
|
||||
5: ('NAME', 2),
|
||||
6: ('NAME', 3),
|
||||
7: ('UNUSED', 1),
|
||||
8: ('UNUSED', 2),
|
||||
9: ('UNUSED', 3),
|
||||
}
|
||||
|
||||
globs = globals()
|
||||
|
||||
for key, value in errorlevel.items():
|
||||
globs[value] = key
|
||||
|
||||
|
873
creole/loader.py
Normal file
873
creole/loader.py
Normal file
|
@ -0,0 +1,873 @@
|
|||
"""creole loader
|
||||
flattened XML specific
|
||||
"""
|
||||
from os.path import join, isfile, isdir
|
||||
from os import listdir
|
||||
#from ast import literal_eval
|
||||
from lxml.etree import parse, DTD
|
||||
|
||||
from tiramisu.option import (UnicodeOption, OptionDescription, PortOption,
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption,
|
||||
NetworkOption, NetmaskOption, DomainnameOption, BroadcastOption,
|
||||
URLOption, EmailOption, FilenameOption, UsernameOption, DateOption,
|
||||
PasswordOption, BoolOption, Leadership)
|
||||
from tiramisu import Config, MetaConfig, MixConfig
|
||||
from tiramisu.setting import groups
|
||||
from tiramisu.error import ConfigError
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu import Params, ParamOption, ParamValue, ParamContext
|
||||
|
||||
from .config import (FLATTENED_CREOLE_DIR, dtdfilename, eoledirs, eoleextradico, forbiddenextra,
|
||||
configeol, eoleextraconfig)
|
||||
from .i18n import _
|
||||
from .var_loader import convert_tiramisu_value, modes_level, MACOption # FIXME YO
|
||||
from .loader1 import load_config_eol, load_extras, _list_extras
|
||||
#For compatibility
|
||||
from .loader1 import config_save_values, config_load_store, config_get_values, add_eol_version
|
||||
from .loader1 import load_store, load_config_store, load_values
|
||||
from .xmlreflector import HIGH_COMPATIBILITY
|
||||
#from . import eosfunc
|
||||
from .objspace import CreoleObjSpace
|
||||
import imp
|
||||
|
||||
|
||||
class CreoleLoaderError(Exception):
|
||||
pass
|
||||
|
||||
CONVERT_OPTION = {'number': dict(opttype=IntOption),
|
||||
'choice': dict(opttype=ChoiceOption),
|
||||
'string': dict(opttype=UnicodeOption),
|
||||
'password': dict(opttype=PasswordOption),
|
||||
'mail': dict(opttype=EmailOption),
|
||||
'boolean': dict(opttype=BoolOption),
|
||||
'symlink': dict(opttype=SymLinkOption),
|
||||
'filename': dict(opttype=FilenameOption),
|
||||
'date': dict(opttype=DateOption),
|
||||
'unix_user': dict(opttype=UsernameOption),
|
||||
'ip': dict(opttype=IPOption, initkwargs={'allow_reserved': True}),
|
||||
'local_ip': dict(opttype=IPOption, initkwargs={'private_only': True, 'warnings_only': True}),
|
||||
'netmask': dict(opttype=NetmaskOption),
|
||||
'network': dict(opttype=NetworkOption),
|
||||
'broadcast': dict(opttype=BroadcastOption),
|
||||
'netbios': dict(opttype=DomainnameOption, initkwargs={'type_': 'netbios', 'warnings_only': True}),
|
||||
'domain': dict(opttype=DomainnameOption, initkwargs={'type_': 'domainname', 'allow_ip': True, 'allow_without_dot': True}),
|
||||
'domain_strict': dict(opttype=DomainnameOption, initkwargs={'type_': 'domainname', 'allow_ip': False}),
|
||||
'hostname': dict(opttype=DomainnameOption, initkwargs={'type_': 'hostname', 'allow_ip': True}),
|
||||
'hostname_strict': dict(opttype=DomainnameOption, initkwargs={'type_': 'hostname', 'allow_ip': False}),
|
||||
'web_address': dict(opttype=URLOption, initkwargs={'allow_ip': True, 'allow_without_dot': True}),
|
||||
'port': dict(opttype=PortOption, initkwargs={'allow_private': True}),
|
||||
'mac': dict(opttype=MACOption) # FIXME YO
|
||||
}
|
||||
|
||||
|
||||
REMOVED_ATTRIB = ['path', 'type']
|
||||
|
||||
|
||||
class Elt(object):
|
||||
def __init__(self, attrib):
|
||||
self.attrib = attrib
|
||||
|
||||
|
||||
class PopulateTiramisuObjects(object):
|
||||
def __init__(self):
|
||||
self.storage = ElementStorage()
|
||||
self.booleans = []
|
||||
self.force_store_values = set()
|
||||
self.separators = {}
|
||||
self.groups = {}
|
||||
|
||||
def parse_dtd(self, dtdfilename):
|
||||
"""Loads the Creole DTD
|
||||
|
||||
:raises IOError: if the DTD is not found
|
||||
|
||||
:param dtdfilename: the full filename of the Creole DTD
|
||||
"""
|
||||
if not isfile(dtdfilename):
|
||||
raise IOError(_("no such DTD file: {}").format(dtdfilename))
|
||||
with open(dtdfilename, 'r') as dtdfd:
|
||||
dtd = DTD(dtdfd)
|
||||
for elt in dtd.iterelements():
|
||||
if elt.name == 'variable':
|
||||
for attr in elt.iterattributes():
|
||||
if set(attr.itervalues()) == set(['True', 'False']):
|
||||
self.booleans.append(attr.name)
|
||||
|
||||
def make_tiramisu_objects(self, xmlroot, creolefunc_file, load_extra=True):
|
||||
elt = Elt({'name': 'baseoption'})
|
||||
family = Family(elt, self.booleans)
|
||||
self.storage.add('.', family)
|
||||
self.eosfunc = imp.load_source('eosfunc', creolefunc_file)
|
||||
|
||||
elts = {}
|
||||
for elt in xmlroot:
|
||||
elts.setdefault(elt.tag, []).append(elt)
|
||||
list_elts = list(elts.keys())
|
||||
if 'family' in list_elts:
|
||||
list_elts.remove('family')
|
||||
list_elts.insert(0, 'family')
|
||||
for elt in list_elts:
|
||||
xmlelts_ = elts[elt]
|
||||
if elt == 'family':
|
||||
xmlelts = []
|
||||
actions = None
|
||||
# `creole` family has to be loaded before any other family
|
||||
# because `extra` family could use `creole` variables.
|
||||
# `actions` family has to be loaded at the very end
|
||||
# because it may use `creole` or `extra` variables
|
||||
for xml in xmlelts_:
|
||||
if not load_extra and xml.attrib['name'] not in ['creole', 'containers']:
|
||||
continue
|
||||
if xml.attrib['name'] == 'creole':
|
||||
xmlelts.insert(0, xml)
|
||||
elif xml.attrib['name'] == 'actions':
|
||||
actions = xml
|
||||
else:
|
||||
xmlelts.append(xml)
|
||||
if actions is not None:
|
||||
xmlelts.append(actions)
|
||||
else:
|
||||
xmlelts = xmlelts_
|
||||
for xmlelt in xmlelts:
|
||||
if xmlelt.tag == 'family':
|
||||
self._iter_family(xmlelt, family=family)
|
||||
elif xmlelt.tag == 'help':
|
||||
self._iter_help(xmlelt)
|
||||
elif xmlelt.tag == 'constraints':
|
||||
self._iter_constraints(xmlelt, load_extra)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown tag {}').format(xmlelt.tag))
|
||||
|
||||
def _populate_variable(self, elt, subpath, is_slave, is_master):
|
||||
variable = Variable(elt, self.booleans, self.storage, is_slave, is_master, self.eosfunc)
|
||||
path = self._build_path(subpath, elt)
|
||||
properties = variable.attrib.get('properties', [])
|
||||
if 'force_store_value' in properties or "auto_freeze" in properties:
|
||||
self.force_store_values.add(path)
|
||||
self.storage.add(path, variable)
|
||||
return variable
|
||||
|
||||
def _populate_family(self, elt, subpath):
|
||||
if subpath is None:
|
||||
force_icon = False
|
||||
else:
|
||||
force_icon = not subpath.startswith('containers') and not subpath.startswith('actions')
|
||||
family = Family(elt, self.booleans, force_icon)
|
||||
path = self._build_path(subpath, elt)
|
||||
self.storage.add(path, family)
|
||||
return family
|
||||
|
||||
def _build_path(self, subpath, elt):
|
||||
if subpath is None:
|
||||
subpath = elt.attrib['name']
|
||||
else:
|
||||
subpath += '.' + elt.attrib['name']
|
||||
return subpath
|
||||
|
||||
def _iter_constraints(self, xmlelt, load_extra):
|
||||
for elt in xmlelt:
|
||||
if elt.tag == 'fill':
|
||||
self._parse_fill(elt, load_extra)
|
||||
elif elt.tag == 'condition':
|
||||
self._parse_condition(elt, load_extra)
|
||||
elif elt.tag == 'check':
|
||||
self._parse_check(elt, load_extra)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown constraint {}').format(elt.tag))
|
||||
|
||||
def _check_extra(self, variable, load_extra):
|
||||
if load_extra:
|
||||
return True
|
||||
return variable.startswith('creole.') or variable.startswith('containers.')
|
||||
|
||||
|
||||
def _parse_fill(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['target'], load_extra):
|
||||
return
|
||||
callback = getattr(self.eosfunc, elt.attrib['name'])
|
||||
callback_params = {}
|
||||
for param in elt:
|
||||
name = param.attrib.get('name', '')
|
||||
if param.attrib['type'] == 'string':
|
||||
value = str(param.text)
|
||||
elif param.attrib['type'] == 'eole':
|
||||
hidden = param.attrib['hidden']
|
||||
if hidden == 'True':
|
||||
hidden = False
|
||||
elif hidden == 'False':
|
||||
hidden = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown hidden boolean {}').format(hidden))
|
||||
if not self._check_extra(param.text, load_extra):
|
||||
return
|
||||
value = [self.storage.get(param.text), hidden]
|
||||
elif param.attrib['type'] == 'number':
|
||||
value = int(param.text)
|
||||
elif param.attrib['type'] == 'context':
|
||||
value = (None,)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown param type {} in fill to {}').format(param.attrib['type'], elt.attrib['target']))
|
||||
callback_params.setdefault(name, []).append(value)
|
||||
if callback_params == {}:
|
||||
callback_params = None
|
||||
self.storage.add_callback(elt.attrib['target'], callback, callback_params)
|
||||
|
||||
def _parse_check(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['target'], load_extra):
|
||||
return
|
||||
all_param_eole = True
|
||||
for param in elt:
|
||||
if param.attrib.get('type') != 'eole':
|
||||
all_param_eole = False
|
||||
break
|
||||
if elt.attrib['name'] == 'valid_enum':
|
||||
# only for valid_enum with checkval to True
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_enum cannot have more than one param for {}').format(elt.attrib['target']))
|
||||
if elt.attrib['probe'] == 'True':
|
||||
proposed = elt[0].text
|
||||
type_ = 'string'
|
||||
elif elt[0].attrib['type'] == 'eole':
|
||||
proposed = elt[0].text
|
||||
type_ = 'eole'
|
||||
else:
|
||||
#proposed_value = literal_eval(elt[0].text)
|
||||
proposed_value = eval(elt[0].text)
|
||||
proposed = tuple(proposed_value)
|
||||
type_ = 'string'
|
||||
self.storage.add_information(elt.attrib['target'], 'proposed_value', {'value': proposed, 'type': type_})
|
||||
|
||||
validator = getattr(self.eosfunc, elt.attrib['name'])
|
||||
elif elt.attrib['name'] == 'valid_differ' and all_param_eole:
|
||||
if (HIGH_COMPATIBILITY and len(elt) not in [0, 1]) or (not HIGH_COMPATIBILITY and len(elt) != 1):
|
||||
raise CreoleLoaderError(_('valid_differ length should be 1'))
|
||||
if HIGH_COMPATIBILITY and len(elt) == 1:
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
else:
|
||||
variables = []
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'not_equal',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_networknetmask':
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_networknetmask length should be 1'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_networknetmask must have only eole variable'))
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'network_netmask',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_ipnetmask':
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_ipnetmask length should be 1'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_ipnetmask must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'ip_netmask',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_broadcast':
|
||||
if len(elt) != 2:
|
||||
raise CreoleLoaderError(_('valid_broadcast length should be 2'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_broadcast must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
if not self._check_extra(elt[1].text, load_extra):
|
||||
return
|
||||
variables.append(self.storage.get(elt[1].text))
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'broadcast',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_in_network':
|
||||
if len(elt) != 2:
|
||||
raise CreoleLoaderError(_('valid_in_network length should be 2'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_in_network must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
if not self._check_extra(elt[1].text, load_extra):
|
||||
return
|
||||
variables.append(self.storage.get(elt[1].text))
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'in_network',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
else:
|
||||
validator = getattr(self.eosfunc, elt.attrib['name'])
|
||||
validator_params = {}
|
||||
for param in elt:
|
||||
text = param.text
|
||||
if param.attrib['type'] == 'eole':
|
||||
hidden = param.attrib.get('hidden', 'True')
|
||||
if hidden == 'True':
|
||||
hidden = False
|
||||
elif hidden == 'False':
|
||||
hidden = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown hidden boolean {}').format(hidden))
|
||||
if not self._check_extra(text, load_extra):
|
||||
return
|
||||
text = [self.storage.get(text), hidden]
|
||||
validator_params.setdefault(param.attrib.get('name', ''), []).append(text)
|
||||
self.storage.add_validator(elt.attrib['target'], validator, validator_params)
|
||||
|
||||
def _parse_condition(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['source'], load_extra):
|
||||
return
|
||||
if elt.attrib['name'] == 'disabled_if_in':
|
||||
actions = ['disabled']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'disabled_if_not_in':
|
||||
actions = ['disabled']
|
||||
inverse = True
|
||||
elif elt.attrib['name'] == 'auto_frozen_if_in':
|
||||
actions = ['frozen']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'frozen_if_in':
|
||||
actions = ['frozen', 'hidden', 'force_default_on_freeze']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'frozen_if_not_in':
|
||||
actions = ['frozen', 'hidden', 'force_default_on_freeze']
|
||||
inverse = True
|
||||
elif elt.attrib['name'] == 'mandatory_if_in':
|
||||
actions = ['mandatory']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'mandatory_if_not_in':
|
||||
actions = ['mandatory']
|
||||
inverse = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown condition type {} for {}').format(elt.attrib['name'], elt.attrib['source']))
|
||||
expected_values = []
|
||||
options = []
|
||||
for param in elt:
|
||||
if param.tag == 'param':
|
||||
expected_values.append(param.text)
|
||||
elif param.tag == 'target':
|
||||
if param.attrib['type'] in ['variable', 'family']:
|
||||
if not self._check_extra(param.text, load_extra):
|
||||
return
|
||||
option = self.storage.get(param.text)
|
||||
option_actions = actions
|
||||
if 'force_store_value' in option.attrib.get('properties', []) and \
|
||||
'force_default_on_freeze' in option_actions:
|
||||
option_actions.remove('force_default_on_freeze')
|
||||
options.append((param.text, option_actions))
|
||||
source = self.storage.get(elt.attrib['source'])
|
||||
for option, actions in options:
|
||||
conditions = []
|
||||
for action in actions:
|
||||
for expected in expected_values:
|
||||
conditions.append({'option': source, 'expected': expected,
|
||||
'action': action, 'inverse': inverse})
|
||||
self.storage.add_requires(option, conditions)
|
||||
|
||||
def _iter_help(self, xmlelt):
|
||||
for elt in xmlelt:
|
||||
self.storage.add_help(elt.attrib['name'], elt.text)
|
||||
|
||||
def _iter_master(self, master, subpath):
|
||||
subpath = self._build_path(subpath, master)
|
||||
family = Family(master, self.booleans)
|
||||
family.set_master()
|
||||
self.storage.add(subpath, family)
|
||||
master_name = None
|
||||
for var in master:
|
||||
if master_name is None:
|
||||
master_name = var.attrib['name']
|
||||
self.groups[master_name] = []
|
||||
else:
|
||||
self.groups[master_name].append(var.attrib['name'])
|
||||
self._iter_family(var, subpath=subpath, family=family)
|
||||
return family
|
||||
|
||||
def _iter_family(self, child, subpath=None, family=None):
|
||||
if child.tag not in ['family', 'variable', 'separators', 'master']:
|
||||
raise CreoleLoaderError(_('unknown tag {}').format(child.tag))
|
||||
if child.tag == 'family':
|
||||
old_family = family
|
||||
family = self._populate_family(child, subpath)
|
||||
if old_family is not None:
|
||||
old_family.add(family)
|
||||
if child.tag == 'master':
|
||||
master = self._iter_master(child, subpath)
|
||||
family.add(master)
|
||||
elif child.tag == 'separators':
|
||||
self._parse_separators(child)
|
||||
elif child.tag == 'variable':
|
||||
if family is None:
|
||||
raise CreoleLoaderError(_('variable without family'))
|
||||
|
||||
is_slave = False
|
||||
is_master = False
|
||||
if family.is_master:
|
||||
if child.attrib['name'] != family.attrib['name']:
|
||||
is_slave = True
|
||||
else:
|
||||
is_master = True
|
||||
variable = self._populate_variable(child, subpath, is_slave, is_master)
|
||||
family.add(variable)
|
||||
elif len(child) != 0:
|
||||
subpath = self._build_path(subpath, child)
|
||||
for c in child:
|
||||
self._iter_family(c, subpath, family)
|
||||
|
||||
def _parse_separators(self, separators):
|
||||
for separator in separators:
|
||||
elt = self.storage.get(separator.attrib['name'])
|
||||
never_hidden = separator.attrib.get('never_hidden')
|
||||
if never_hidden == 'True':
|
||||
never_hidden = True
|
||||
else:
|
||||
never_hidden = None
|
||||
info = (separator.text, never_hidden)
|
||||
self.separators[separator.attrib['name']] = info
|
||||
elt.add_information('separator', info)
|
||||
|
||||
def build(self, persistent=False, session_id=None, meta_config=False):
|
||||
if meta_config:
|
||||
optiondescription = self.storage.paths['.'].get()
|
||||
config = MetaConfig([],
|
||||
optiondescription=optiondescription,
|
||||
persistent=persistent,
|
||||
session_id=session_id)
|
||||
mixconfig = MixConfig(children=[],
|
||||
optiondescription=optiondescription,
|
||||
persistent=persistent,
|
||||
session_id='m_' + session_id)
|
||||
config.config.add(mixconfig)
|
||||
else:
|
||||
config = Config(self.storage.paths['.'].get(),
|
||||
persistent=persistent,
|
||||
session_id=session_id)
|
||||
config.information.set('force_store_vars', self.force_store_values)
|
||||
config.information.set('force_store_values', list(self.force_store_values))
|
||||
# XXX really usefull?
|
||||
ro_append = frozenset(config.property.getdefault('read_only', 'append') - {'force_store_value'})
|
||||
rw_append = frozenset(config.property.getdefault('read_write', 'append') - {'force_store_value'})
|
||||
config.property.setdefault(ro_append, 'read_only', 'append')
|
||||
config.property.setdefault(rw_append, 'read_write', 'append')
|
||||
|
||||
config.property.read_only()
|
||||
_modes = list(modes_level)
|
||||
_modes.append('hidden')
|
||||
config.permissive.set(frozenset(_modes))
|
||||
return config
|
||||
|
||||
|
||||
class ElementStorage(object):
|
||||
def __init__(self):
|
||||
self.paths = {}
|
||||
|
||||
def add(self, path, elt):
|
||||
if path in self.paths:
|
||||
raise CreoleLoaderError(_('path already loaded {}').format(path))
|
||||
self.paths[path] = elt
|
||||
|
||||
def add_help(self, path, text):
|
||||
elt = self.get(path)
|
||||
self.paths[path].add_information('help', text)
|
||||
|
||||
def add_callback(self, path, callback, callback_params):
|
||||
elt = self.get(path)
|
||||
elt.add_callback(callback, callback_params)
|
||||
|
||||
def add_information(self, path, name, information):
|
||||
elt = self.get(path)
|
||||
elt.add_information(name, information)
|
||||
|
||||
def add_validator(self, path, validator, validator_params):
|
||||
elt = self.get(path)
|
||||
elt.add_validator(validator, validator_params)
|
||||
|
||||
def add_consistency(self, path, consistence, variables, warnings_only, transitive):
|
||||
elt = self.get(path)
|
||||
elt.add_consistency(consistence, variables, warnings_only, transitive)
|
||||
|
||||
def add_requires(self, path, requires):
|
||||
elt = self.get(path)
|
||||
elt.add_requires(requires)
|
||||
|
||||
def get(self, path):
|
||||
if path not in self.paths:
|
||||
raise CreoleLoaderError(_('there is no element for path {}').format(path))
|
||||
return self.paths[path]
|
||||
|
||||
|
||||
class Variable(object):
|
||||
def __init__(self, elt, booleans, storage, is_slave, is_master, eosfunc):
|
||||
self.option = None
|
||||
self.informations = {}
|
||||
self.attrib = {}
|
||||
self.callbacks = []
|
||||
self.requires = []
|
||||
self.validator = None
|
||||
self.consistencies = []
|
||||
self.attrib['properties'] = []
|
||||
self.eosfunc = eosfunc
|
||||
for key, value in elt.attrib.items():
|
||||
if key in REMOVED_ATTRIB:
|
||||
continue
|
||||
#if key != 'name':
|
||||
# value = unicode(value)
|
||||
|
||||
if key in booleans:
|
||||
if value == 'True':
|
||||
value = True
|
||||
elif value == 'False':
|
||||
value = False
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown value {} for {}').format(value, key))
|
||||
self.attrib[key] = value
|
||||
convert_option = CONVERT_OPTION[elt.attrib['type']]
|
||||
self.object_type = convert_option['opttype']
|
||||
if elt.attrib['type'] == 'choice':
|
||||
if self.attrib.get('choice'):
|
||||
self.attrib['values'] = getattr(self.eosfunc, self.attrib.get('choice'))
|
||||
else:
|
||||
self.attrib['values'] = []
|
||||
for child in elt:
|
||||
if child.tag == 'choice':
|
||||
value = child.text
|
||||
if 'type' in child.attrib and child.attrib['type'] == 'number':
|
||||
value = int(value)
|
||||
if value is None:
|
||||
value = u''
|
||||
self.attrib['values'].append(value)
|
||||
self.attrib['values'] = tuple(self.attrib['values'])
|
||||
for child in elt:
|
||||
if "type" in child.attrib:
|
||||
type_ = CONVERT_OPTION[child.attrib['type']]['opttype']
|
||||
else:
|
||||
type_ = self.object_type
|
||||
if child.tag == 'property':
|
||||
self.attrib['properties'].append(child.text)
|
||||
elif child.tag == 'value':
|
||||
if self.attrib['multi'] and not is_slave:
|
||||
if 'default' not in self.attrib:
|
||||
self.attrib['default'] = []
|
||||
value = convert_tiramisu_value(child.text, type_)
|
||||
self.attrib['default'].append(value)
|
||||
if 'default_multi' not in self.attrib and not is_master:
|
||||
self.attrib['default_multi'] = value
|
||||
else:
|
||||
if 'default' in self.attrib:
|
||||
raise CreoleLoaderError(_('default value already set for {}'
|
||||
'').format(self.attrib['path']))
|
||||
value = convert_tiramisu_value(child.text, type_)
|
||||
if value is None: # and (elt.attrib['type'] != 'choice' or value not in self.attrib['values']):
|
||||
value = u''
|
||||
if is_slave:
|
||||
self.attrib['default_multi'] = value
|
||||
else:
|
||||
self.attrib['default'] = value
|
||||
if 'initkwargs' in convert_option:
|
||||
self.attrib.update(convert_option['initkwargs'])
|
||||
self.attrib['properties'] = tuple(self.attrib['properties'])
|
||||
if elt.attrib['type'] == 'symlink':
|
||||
del self.attrib['properties']
|
||||
del self.attrib['multi']
|
||||
self.attrib['opt'] = storage.get(self.attrib['opt'])
|
||||
|
||||
def add_information(self, key, value):
|
||||
if key in self.informations:
|
||||
raise CreoleLoaderError(_('key already exists in information {}').format(key))
|
||||
self.informations[key] = value
|
||||
|
||||
def add_callback(self, callback, callback_params):
|
||||
self.callbacks.append((callback, callback_params))
|
||||
|
||||
def add_requires(self, requires):
|
||||
self.requires.extend(requires)
|
||||
|
||||
def add_validator(self, validator, validator_params):
|
||||
self.validator = (validator, validator_params)
|
||||
|
||||
def add_consistency(self, consistence, variables, warnings_only, transitive):
|
||||
self.consistencies.append((consistence, variables, warnings_only, transitive))
|
||||
|
||||
def build_params(self, params):
|
||||
if params != None:
|
||||
new_params = Params()
|
||||
for key, values in params.items():
|
||||
new_values = []
|
||||
for value in values:
|
||||
if isinstance(value, list):
|
||||
# retrieve object
|
||||
value = ParamOption(value[0].get(), value[1])
|
||||
elif value == (None,):
|
||||
value = ParamContext()
|
||||
else:
|
||||
value = ParamValue(value)
|
||||
if key == '':
|
||||
args = list(new_params.args)
|
||||
args.append(value)
|
||||
new_params.args = tuple(args)
|
||||
else:
|
||||
new_params.kwargs[key] = value
|
||||
return new_params
|
||||
return params
|
||||
|
||||
def get(self):
|
||||
if self.option is None:
|
||||
if self.object_type is SymLinkOption:
|
||||
self.attrib['opt'] = self.attrib['opt'].get()
|
||||
for callback, callback_params in self.callbacks:
|
||||
self.attrib['callback'] = callback
|
||||
self.attrib['callback_params'] = self.build_params(callback_params)
|
||||
for require in self.requires:
|
||||
if isinstance(require['option'], Variable):
|
||||
require['option'] = require['option'].get()
|
||||
if self.requires != []:
|
||||
self.attrib['requires'] = self.requires
|
||||
if self.validator:
|
||||
self.attrib['validator'] = self.validator[0]
|
||||
self.attrib['validator_params'] = self.build_params(self.validator[1])
|
||||
try:
|
||||
option = self.object_type(**self.attrib)
|
||||
except Exception as err:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
name = self.attrib['name']
|
||||
raise CreoleLoaderError(_('cannot create option {}: {}').format(name, err))
|
||||
for key, value in self.informations.items():
|
||||
option.impl_set_information(key, value)
|
||||
for consistency in self.consistencies:
|
||||
options = []
|
||||
for variable in consistency[1]:
|
||||
options.append(variable.get())
|
||||
try:
|
||||
kwargs = {}
|
||||
if consistency[2] == 'True':
|
||||
kwargs['warnings_only'] = True
|
||||
if consistency[3] == 'False':
|
||||
kwargs['transitive'] = False
|
||||
option.impl_add_consistency(consistency[0], *options, **kwargs)
|
||||
except ConfigError as err:
|
||||
name = self.attrib['name']
|
||||
raise CreoleLoaderError(_('cannot load consistency for {}: {}').format(name, err))
|
||||
self.option = option
|
||||
return self.option
|
||||
|
||||
|
||||
class Family(object):
|
||||
def __init__(self, elt, booleans, force_icon=False):
|
||||
self.requires = []
|
||||
self.option = None
|
||||
self.attrib = {}
|
||||
self.is_master = False
|
||||
if force_icon:
|
||||
self.informations = {'icon': None}
|
||||
else:
|
||||
self.informations = {}
|
||||
self.children = []
|
||||
self.attrib['properties'] = []
|
||||
for key, value in elt.attrib.items():
|
||||
if key in REMOVED_ATTRIB:
|
||||
continue
|
||||
if key in booleans:
|
||||
if value == 'True':
|
||||
value = True
|
||||
elif value == 'False':
|
||||
value = False
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown value {} for {}').format(value, key))
|
||||
if key == 'icon':
|
||||
self.add_information('icon', value)
|
||||
continue
|
||||
elif key == 'hidden':
|
||||
if value:
|
||||
self.attrib['properties'].append(key)
|
||||
elif key == 'mode':
|
||||
self.attrib['properties'].append(value)
|
||||
else:
|
||||
self.attrib[key] = value
|
||||
if 'doc' not in self.attrib:
|
||||
self.attrib['doc'] = u''
|
||||
self.attrib['properties'] = tuple(self.attrib['properties'])
|
||||
|
||||
def add(self, child):
|
||||
self.children.append(child)
|
||||
|
||||
def add_information(self, key, value):
|
||||
if key in self.informations and not (key == 'icon' and self.informations[key] is None):
|
||||
raise CreoleLoaderError(_('key already exists in information {}').format(key))
|
||||
self.informations[key] = value
|
||||
|
||||
def set_master(self):
|
||||
self.is_master = True
|
||||
|
||||
def add_requires(self, requires):
|
||||
self.requires.extend(requires)
|
||||
|
||||
def get(self):
|
||||
if self.option is None:
|
||||
self.attrib['children'] = []
|
||||
for child in self.children:
|
||||
self.attrib['children'].append(child.get())
|
||||
for require in self.requires:
|
||||
if isinstance(require['option'], Variable):
|
||||
require['option'] = require['option'].get()
|
||||
if self.requires != []:
|
||||
self.attrib['requires'] = self.requires
|
||||
try:
|
||||
if not self.is_master:
|
||||
option = OptionDescription(**self.attrib)
|
||||
else:
|
||||
option = Leadership(**self.attrib)
|
||||
#option = OptionDescription(**self.attrib)
|
||||
except Exception as err:
|
||||
raise CreoleLoaderError(_('cannot create optiondescription {}: {}').format(self.attrib['name'], err))
|
||||
for key, value in self.informations.items():
|
||||
option.impl_set_information(key, value)
|
||||
self.option = option
|
||||
#if self.is_master:
|
||||
# self.option.impl_set_group_type(groups.master)
|
||||
|
||||
return self.option
|
||||
|
||||
|
||||
def _gen_eol_file(namespace):
|
||||
if namespace == 'creole':
|
||||
return configeol
|
||||
else:
|
||||
return join(eoleextraconfig, namespace, 'config.eol')
|
||||
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None,
|
||||
force_dtdfile=None, force_flattened=None,
|
||||
mandatory_permissive=True, from_zephir=None,
|
||||
force_no_save=False, force_eoleextradico=None,
|
||||
force_eoleextraconfig=None, only_load_flattened=False):
|
||||
"""
|
||||
Loads the Creole XML dictionnary files and return a tiramisu config object
|
||||
|
||||
:param bool load_values: Loads (or not) the :file:`config.eol` file
|
||||
:param bool rw: Config's read/write flag
|
||||
:param str namespace: Root's namespace for the config (example: "creole", "bacula", ...)
|
||||
:param bool load_extra: Loads (or not) the extra dictionnaries (if `namespace='creole'`)
|
||||
:param bool reload_config: This parameter is kept for compatibility reasons
|
||||
:param str owner: forces the owner on a modified variable
|
||||
:param bool disable_mandatory: disables the mandatory variables
|
||||
:param str force_configeol: Forces the used configuration file
|
||||
:param bool try_upgrade: tries to upgrade
|
||||
:param force_load_creole_owner: Forces the owner for the loaded variables
|
||||
:param str force_dirs: Forces the folder's name containing the dictionnaries
|
||||
:param warnings: Shows the validation's warnings
|
||||
:param bool force_instanciate: tells us if the server is already instanciated or not
|
||||
:param force_dtdfile: None or dtd filename
|
||||
:param force_flattened: None or flatened filename's name
|
||||
:param only_load_flattened: boolean to desactivate generated of flattened file
|
||||
"""
|
||||
if namespace is not 'creole':
|
||||
raise CreoleLoaderError(_('Only creole namespace is supported'))
|
||||
#if reload_config is not True:
|
||||
# raise CreoleLoaderError(_('Cannot reload the configuration'))
|
||||
if force_flattened is None:
|
||||
force_flattened = join(FLATTENED_CREOLE_DIR, 'flattened_creole.xml')
|
||||
if force_dtdfile is None:
|
||||
force_dtdfile = dtdfilename
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise CreoleLoaderError(_("Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra and force_eoleextraconfig is None:
|
||||
# if force_configeol, cannot calculate extra configfile name
|
||||
raise CreoleLoaderError(_('Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and load_extra is True and force_eoleextradico is None:
|
||||
raise CreoleLoaderError(_('If force_dirs is defined, namespace must be set to creole and '
|
||||
'load_extra must be set to False.'))
|
||||
if not only_load_flattened:
|
||||
#should not load value now because create a Config
|
||||
eolobj = CreoleObjSpace(force_dtdfile)
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
else:
|
||||
dirs = eoledirs
|
||||
if from_zephir is not None and type(dirs) != list:
|
||||
#if dirs is not a list, add subdirectory 'local'
|
||||
#and 'variante'
|
||||
orig_dir = dirs
|
||||
dirs = [dirs]
|
||||
for tdir in [join(orig_dir, 'local'),
|
||||
join(orig_dir, 'variante')]:
|
||||
if isdir(tdir):
|
||||
dirs.append(tdir)
|
||||
eolobj.create_or_populate_from_xml('creole', dirs, from_zephir=from_zephir)
|
||||
|
||||
if load_extra:
|
||||
if force_eoleextradico == None:
|
||||
force_eoleextradico = eoleextradico
|
||||
extranames = _list_extras(force_eoleextradico)
|
||||
extranames.sort()
|
||||
if isdir(force_eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise CreoleLoaderError(
|
||||
_('Namespace {} for extra dictionary not allowed').format(directory))
|
||||
dirname = join(force_eoleextradico, directory)
|
||||
eolobj.create_or_populate_from_xml(directory, [dirname], from_zephir)
|
||||
eolobj.space_visitor()
|
||||
xmlroot = eolobj.save(force_flattened, force_no_save)
|
||||
else:
|
||||
with open(force_flattened, 'r') as fhd:
|
||||
xmlroot = parse(fhd).getroot()
|
||||
tiramisu_objects = PopulateTiramisuObjects()
|
||||
tiramisu_objects.parse_dtd(force_dtdfile)
|
||||
tiramisu_objects.make_tiramisu_objects(xmlroot)
|
||||
config = tiramisu_objects.build()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values, mandatory_permissive=mandatory_permissive,
|
||||
extradico=force_eoleextradico, force_eoleextraconfig=force_eoleextraconfig)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
if from_zephir is not None:
|
||||
return tiramisu_objects.groups, tiramisu_objects.separators, config
|
||||
else:
|
||||
return config
|
769
creole/loader1.py
Normal file
769
creole/loader1.py
Normal file
|
@ -0,0 +1,769 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
#import cjson
|
||||
import json
|
||||
import fcntl
|
||||
import stat
|
||||
import logging
|
||||
|
||||
from os.path import isdir, isfile, join, basename, dirname, splitext
|
||||
from os import listdir, makedirs, major, minor
|
||||
from os import stat as os_stat
|
||||
from distutils.version import StrictVersion
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from tiramisu.option import UnicodeOption, OptionDescription, \
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption, \
|
||||
NetworkOption, NetmaskOption
|
||||
from tiramisu.error import PropertiesOptionError, LeadershipError
|
||||
from tiramisu.setting import owners
|
||||
|
||||
from .config import configeol, eoledirs, dtdfilename, eoleextradico, \
|
||||
eoleextraconfig, forbiddenextra, VIRTROOT, \
|
||||
VIRTBASE, VIRTMASTER, templatedir
|
||||
from .error import ConfigError
|
||||
from .var_loader import modes_level, CreoleFamily, CreoleConstraint, \
|
||||
CreoleVarLoader
|
||||
try:
|
||||
from .client import CreoleClient, CreoleClientError
|
||||
client = CreoleClient()
|
||||
except:
|
||||
client = None
|
||||
from pyeole.encode import normalize
|
||||
try:
|
||||
from .eosfunc import is_instanciate, get_version
|
||||
except:
|
||||
pass
|
||||
|
||||
from .i18n import _
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class CreoleContainer():
|
||||
"""
|
||||
Charge les conteneurs, les fichiers, les packages, services, interfaces
|
||||
et disknods
|
||||
"""
|
||||
def gen_containers(self, paths):
|
||||
"""
|
||||
Generate Containers information in tiramisu tree
|
||||
|
||||
:paths: paths variables (for added new option in paths's dictionnary)
|
||||
"""
|
||||
containers = []
|
||||
for name, container in self._get_containers().items():
|
||||
container['path'] = 'container_path_{0}'.format(name)
|
||||
container['ip'] = 'container_ip_{0}'.format(name)
|
||||
containers.append(container)
|
||||
|
||||
key_type = {'id': IntOption, 'group': UnicodeOption,
|
||||
'ip': SymLinkOption, 'path': SymLinkOption,
|
||||
'level': UnicodeOption}
|
||||
|
||||
return self._gen_tiramisu_config(paths, "container", containers,
|
||||
key_type)
|
||||
|
||||
def gen_networks(self, paths):
|
||||
var = []
|
||||
descr = None
|
||||
namespace = paths['adresse_ip_br0'].split('.')[0]
|
||||
for descr_ in self.space:
|
||||
if descr_._name == namespace:
|
||||
descr = descr_
|
||||
break
|
||||
if descr == None:
|
||||
raise Exception(_(u'Unable to find namespace: {0}').format(
|
||||
namespace))
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
path = paths[name]
|
||||
subpath = path.split('.')[1:]
|
||||
opt = descr
|
||||
for p in subpath:
|
||||
opt = getattr(opt, p)
|
||||
var.append(SymLinkOption(name, opt))
|
||||
return OptionDescription('network', '', var)
|
||||
|
||||
def gen_interfaces(self, paths):
|
||||
"""Add per container interface linked to inter-containers bridge
|
||||
|
||||
Theses interfaces must come before other containers ones as
|
||||
default gateway.
|
||||
|
||||
"""
|
||||
lxc_net = OrderedDict()
|
||||
if self.containers_enabled:
|
||||
interfaces = OrderedDict()
|
||||
containers = self._get_containers()
|
||||
|
||||
for name, container in containers.items():
|
||||
if name in ['all', 'root']:
|
||||
continue
|
||||
lxc_net[name] = {'name': 'containers',
|
||||
'container': name,
|
||||
'linkto': 'br0',
|
||||
'method': 'bridge',
|
||||
'ip': 'container_ip_{0}'.format(name),
|
||||
'mask': 'adresse_netmask_br0',
|
||||
'bcast': 'adresse_broadcast_br0',
|
||||
'gateway': 'adresse_ip_br0'}
|
||||
|
||||
# Insert default interfaces before
|
||||
self.generic['interfaces'] = lxc_net.values() \
|
||||
+ self.generic['interfaces']
|
||||
|
||||
return self.gen_generic('interfaces', paths, copy_requires='ip')
|
||||
|
||||
def gen_service_accesss(self, paths):
|
||||
return self.__gen_service_access_restriction('service_access', paths)
|
||||
|
||||
def gen_service_restrictions(self, paths):
|
||||
return self.__gen_service_access_restriction('service_restriction', paths)
|
||||
|
||||
def __gen_service_access_restriction(self, service_type, paths):
|
||||
"""Add services requires to service_access/service_restriction
|
||||
If a service is disabled, we remove, also, access to this service
|
||||
"""
|
||||
generic_name = service_type + 's'
|
||||
list_name = service_type + 'list'
|
||||
if 'service' in self.requires:
|
||||
for gen in self.generic[generic_name]:
|
||||
service_name = gen['service']
|
||||
requires_name = gen.get(list_name)
|
||||
if requires_name is None:
|
||||
requires_name = '___auto_{0}'.format(service_name)
|
||||
gen[list_name] = requires_name
|
||||
self.requires[service_type][requires_name] = {'optional': True, 'list': []}
|
||||
if service_name in self.requires['service']:
|
||||
service_requires = self.requires['service'][service_name]['list']
|
||||
if self.requires['service'][service_name]['optional'] is False:
|
||||
self.requires['service'][service_name]['optional'] = False
|
||||
self.requires[service_type][requires_name]['list'].extend(service_requires)
|
||||
return self.gen_generic(generic_name, paths, verify_exists_redefine=False)
|
||||
|
||||
def _gen_file(self, fdata, container, containers):
|
||||
"""Generate one file structure for one container
|
||||
|
||||
:param fdata: file informations
|
||||
:type fdata: `dict`
|
||||
:param container: container of the file
|
||||
:type container: `dict`
|
||||
:return: file information for a container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
file_infos = fdata.copy()
|
||||
# take care of os.path.join and absolute part after first
|
||||
# argument.
|
||||
_file = fdata['name']
|
||||
if _file[0] == '/':
|
||||
_file = _file[1:]
|
||||
|
||||
file_infos['container'] = container['name']
|
||||
file_infos['full_name'] = fdata['name']
|
||||
if self.containers_enabled and container['name'] != VIRTMASTER:
|
||||
# Prefix the full path with container rootfs
|
||||
if fdata['container'] == 'all':
|
||||
cont_grp = container['group']
|
||||
else:
|
||||
cont_grp = fdata['container']
|
||||
cont_name = self.get_real_container_name(containers, cont_grp)
|
||||
_file = join(VIRTROOT, cont_name, VIRTBASE, _file)
|
||||
file_infos['full_name'] = _file
|
||||
|
||||
source = file_infos.get('source', basename(_file))
|
||||
source = join(templatedir, source)
|
||||
file_infos['source'] = source
|
||||
return file_infos
|
||||
|
||||
def gen_files(self, paths):
|
||||
containers = self._get_containers()
|
||||
files = []
|
||||
for fdata in self.generic.get('files', []):
|
||||
if fdata['container'] == 'all':
|
||||
# Generate a file per container
|
||||
for container in containers.values():
|
||||
if container['name'] in ['all', VIRTMASTER]:
|
||||
continue
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
else:
|
||||
container = containers[fdata['container']]
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
|
||||
key_type = {'source': UnicodeOption, 'mode': UnicodeOption,
|
||||
'full_name': UnicodeOption,
|
||||
'owner': UnicodeOption, 'group': UnicodeOption,
|
||||
'mkdir': BoolOption, 'rm': BoolOption,
|
||||
'del_comment': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "file", files, key_type,
|
||||
requires_key='activate')
|
||||
|
||||
def gen_disknods(self, paths):
|
||||
containers = self._get_containers()
|
||||
disknods = []
|
||||
for fdata in self.generic.get('disknods', []):
|
||||
stats = os_stat(fdata['name'])
|
||||
if stat.S_ISBLK(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISCHR(stats.st_mode):
|
||||
dev_type = u'c'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISDIR(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_dev
|
||||
else:
|
||||
dev_type = None
|
||||
device = None
|
||||
fdata['type'] = dev_type
|
||||
if device is not None:
|
||||
fdata['major'] = major(device)
|
||||
fdata['minor'] = minor(device)
|
||||
else:
|
||||
fdata['major'] = None
|
||||
fdata['minor'] = None
|
||||
fdata['mode'] = u'rwm'
|
||||
fdata['permission'] = 'allow'
|
||||
disknods.append(fdata)
|
||||
|
||||
key_type = {'major': IntOption,
|
||||
'minor': IntOption,
|
||||
'name': UnicodeOption,
|
||||
'permission': UnicodeOption,
|
||||
'mode': UnicodeOption,
|
||||
'type': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "disknod", disknods, key_type)
|
||||
|
||||
def gen_packages(self, paths):
|
||||
# c'est le dernier 'package' qui a raison
|
||||
# (si présence de deux balises package avec le même nom dans le
|
||||
# même conteneur)
|
||||
return self.gen_generic('packages', paths, verify_exists_redefine=False)
|
||||
|
||||
|
||||
class CreoleLoader(CreoleVarLoader, CreoleContainer):
|
||||
"""
|
||||
charge les variables + les conteneurs
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _gen_eol_file(namespace, root_path=None):
|
||||
if namespace == 'creole':
|
||||
return unicode(configeol)
|
||||
else:
|
||||
if root_path is None:
|
||||
root_path = eoleextraconfig
|
||||
return unicode(join(root_path, namespace, 'config.eol'))
|
||||
|
||||
|
||||
def _list_extras(extradico=eoleextradico):
|
||||
extranames = []
|
||||
if isdir(extradico):
|
||||
for directory in listdir(extradico):
|
||||
content = listdir(join(extradico, directory))
|
||||
if not len(content) == 0:
|
||||
extensions = [splitext(filename)[1] for filename in content]
|
||||
if ".xml" in extensions:
|
||||
extranames.append(directory)
|
||||
return extranames
|
||||
|
||||
|
||||
def set_mandatory_permissive(config, action):
|
||||
descr = config.cfgimpl_get_description()
|
||||
parent = getattr(descr, action, None)
|
||||
if parent is not None:
|
||||
for family in parent.impl_getchildren():
|
||||
for option in family.impl_getchildren():
|
||||
if 'mandatory' in option.impl_getproperties():
|
||||
config.cfgimpl_get_settings().setpermissive(('mandatory',), option)
|
||||
|
||||
|
||||
def load_extras(config, load_values=True, mandatory_permissive=False, extradico=eoleextradico,
|
||||
force_eoleextraconfig=None):
|
||||
actions = set()
|
||||
if mandatory_permissive and hasattr(config, 'actions'):
|
||||
for name, family in config.actions.iter_groups():
|
||||
for aname, action in family.iter_groups():
|
||||
actions.add(action.name)
|
||||
for extraname in _list_extras(extradico=extradico):
|
||||
if extraname in ['creole', 'containers', 'actions']:
|
||||
raise Exception(_('extra name {} not allowed').format(extraname))
|
||||
eol_file = _gen_eol_file(extraname, root_path=force_eoleextraconfig)
|
||||
config.impl_set_information(extraname, eol_file)
|
||||
if extraname in actions:
|
||||
set_mandatory_permissive(config, extraname)
|
||||
if not load_values:
|
||||
continue
|
||||
#if file not exists, create it (for auto_freeze value)
|
||||
if not isfile(eol_file):
|
||||
try:
|
||||
config_save_values(config, extraname, reload_config=False, check_mandatory=False)
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if isfile(eol_file):
|
||||
config_load_values(config, extraname)
|
||||
|
||||
|
||||
def load_config_eol(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, force_instanciate=None):
|
||||
if not configfile:
|
||||
configfile = _gen_eol_file('creole')
|
||||
config.impl_set_information('creole', configfile)
|
||||
config_load_values(config, 'creole', force_load_owner=force_load_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
configfile=configfile,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version)
|
||||
|
||||
def load_config_store(config, store, unset_default=False,
|
||||
force_load_owner=None, current_eol_version=None,
|
||||
force_instanciate=None, remove_unknown_vars=False,
|
||||
try_upgrade=False):
|
||||
"""used on Zéphir to upgrade values (2.4.X -> 2.4.X+1) on a configuration
|
||||
that has already been migrated (2.2/2.3 −> 2.4)
|
||||
"""
|
||||
config_load_store(config, 'creole', store, force_load_owner=force_load_owner,
|
||||
unset_default=unset_default, force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version,
|
||||
remove_unknown_vars=remove_unknown_vars)
|
||||
|
||||
def load_values(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, remove_unknown_vars=False):
|
||||
load_error = config.impl_get_information('load_error', False)
|
||||
if load_error and try_upgrade:
|
||||
#Try to upgrade
|
||||
from .upgrade import upgrade
|
||||
try:
|
||||
store_dico, version = upgrade(config, configfile)
|
||||
config_load_store(config, 'creole', store_dico, unset_default=True, eol_version='1.0')
|
||||
config.impl_set_information('upgrade', version)
|
||||
remove_unknown_vars = True
|
||||
load_error = False
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(e))
|
||||
config.impl_set_information('load_error', True)
|
||||
#print "fichier de configuration invalide 2.2 ou 2.3: {0} : {1}".format(configfile, e)
|
||||
if current_eol_version == None:
|
||||
current_eol_version = get_version('EOLE_RELEASE')
|
||||
eol_version = str(config.impl_get_information('eol_version'))
|
||||
if try_upgrade and not load_error:
|
||||
if StrictVersion(eol_version) > StrictVersion(current_eol_version):
|
||||
raise Exception(_('eol_version ({0}) is greater than current version ({1})').format(eol_version, current_eol_version))
|
||||
if StrictVersion(eol_version) < StrictVersion(current_eol_version):
|
||||
#can be used to edit lower versions on Zéphir
|
||||
from .upgrade24 import upgrade2
|
||||
try:
|
||||
# 2.4.x (greater than 2.4.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.4.0') and StrictVersion(eol_version) < StrictVersion('2.5.0'):
|
||||
upgrade2('2.4', eol_version, current_eol_version, config)
|
||||
# 2.5.x (greater than 2.5.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.5.0') and StrictVersion(eol_version) < StrictVersion('2.6.0'):
|
||||
upgrade2('2.5', eol_version, current_eol_version, config)
|
||||
# 2.6.x (greater than 2.6.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.6.0') and StrictVersion(eol_version) < StrictVersion('2.7.0'):
|
||||
upgrade2('2.6', eol_version, current_eol_version, config)
|
||||
if config.impl_get_information('upgrade', '') == '':
|
||||
#set the version only if it is the first upgrade
|
||||
config.impl_set_information('upgrade', eol_version)
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(normalize(str(e))))
|
||||
config.impl_set_information('upgrade', False)
|
||||
config.impl_set_information('load_error', True)
|
||||
|
||||
if remove_unknown_vars:
|
||||
# nettoyage des variables inconnues en dernier (#9858)
|
||||
config.impl_set_information('unknown_options', {})
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None):
|
||||
"""
|
||||
charge les dictionnaires Creole et retourne une config Tiramisu
|
||||
|
||||
:load_values: boolean. Charge ou non le fichier config.eol (default True)
|
||||
:rw: boolean. Mode de travail (lecture seule ou lecture/écriture)
|
||||
:namespace: string. Espace de travail (ex: "creole", "bacula", ...)
|
||||
:load_extra: boolean. Charge ou non les dictionnaire extra (si namespace='creole')
|
||||
:reload_config: boolean. Cette option est conservée pour raison de compatibilité
|
||||
ascendante mais n'a plus de justification, a ne pas utiliser
|
||||
:owner: string. Owner forcé sur les variables modifiées
|
||||
:disable_mandatory: boolean.
|
||||
:force_configeol: string. Force le nom du fichier de configuration utilisé
|
||||
:try_upgrade: boolean.
|
||||
:force_dirs: string. Force le nom du réprtoire contenant les dictionnaires
|
||||
:force_load_creole_owner: Owner forcé pour les variables chargées
|
||||
:warnings: affiche les warnings de validation
|
||||
"""
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise ConfigError(_(u"Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra:
|
||||
#if force_configeol, cannot calculated extra configfile name
|
||||
raise Exception(_(u'Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and (load_extra is True or namespace != 'creole'):
|
||||
raise Exception(_(u'If force_dirs is defined, namespace must be set to creole and load_extra must be set to False.'))
|
||||
if namespace != 'creole' and load_extra:
|
||||
raise ValueError(_(u'namespace is not creole, so load_extra is forbidden.'))
|
||||
#should not load value now because create a Config
|
||||
loader = CreoleLoader()
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
elif namespace == 'creole':
|
||||
dirs = eoledirs
|
||||
else:
|
||||
dirs = join(eoleextradico, namespace)
|
||||
#load config
|
||||
loader.read_dir(dirs, namespace)
|
||||
if load_extra:
|
||||
extranames = _list_extras()
|
||||
if isdir(eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise ValueError(
|
||||
_(u'Namespace {} for extra dictionary not allowed').format(directory))
|
||||
loader.read_dir(join(eoleextradico, directory), directory)
|
||||
config = loader.get_config()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
return config
|
||||
|
||||
|
||||
def valid_store(store):
|
||||
if not isinstance(store, dict):
|
||||
raise Exception('store is not a dict: {0}'.format(store))
|
||||
for key, value in store.items():
|
||||
if not isinstance(key, unicode):
|
||||
raise Exception('store key is not an unicode for {0}'.format(key))
|
||||
if key != '___version___' and (not isinstance(value, dict) or value.keys() != ['owner', 'val']):
|
||||
raise Exception('store value is not a dict for {0}'.format(key))
|
||||
|
||||
|
||||
def load_store(config, eol_file=configeol):
|
||||
if not isfile(eol_file):
|
||||
store = {}
|
||||
else:
|
||||
fh = open(eol_file, 'r')
|
||||
fcntl.lockf(fh, fcntl.LOCK_SH)
|
||||
try:
|
||||
store = cjson.decode(fh.read(), all_unicode=True)
|
||||
except cjson.DecodeError:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
fh.close()
|
||||
try:
|
||||
valid_store(store)
|
||||
except Exception as err:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
return store
|
||||
|
||||
|
||||
def config_load_store(config, namespace, store, force_instanciate=None,
|
||||
unset_default=False, force_load_owner=None, eol_version='2.4.0'):
|
||||
subconfig = getattr(config, namespace)
|
||||
cache_paths = config.cfgimpl_get_description()._cache_paths[1]
|
||||
unknown_options = {}
|
||||
|
||||
def reorder_store(path1, path2):
|
||||
"""
|
||||
sorter function.
|
||||
|
||||
sort description : if varname1 is a master and varname 2
|
||||
is a slave, returns [varname1, varname2]
|
||||
"""
|
||||
idx_1 = cache_paths.index(path1)
|
||||
idx_2 = cache_paths.index(path2)
|
||||
return cmp(idx_1, idx_2)
|
||||
|
||||
def store_path_and_reorder(eol_version):
|
||||
"""Convenience function to replace varnames with full paths
|
||||
and to sort an unordered ConfigObj's
|
||||
|
||||
:returns: a sorted ordereddict.
|
||||
"""
|
||||
store_path = {}
|
||||
if namespace == 'creole':
|
||||
paths = {}
|
||||
for path in subconfig.cfgimpl_get_description().impl_getpaths():
|
||||
vname = path.split('.')[-1]
|
||||
paths[vname] = namespace + '.' + path
|
||||
#variable pas dans Tiramisu
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
elif vname not in paths:
|
||||
unknown_options[vname] = value
|
||||
if vname not in paths or value == {}:
|
||||
continue
|
||||
store_path[paths[vname]] = value
|
||||
else:
|
||||
paths = []
|
||||
subpaths = subconfig.cfgimpl_get_description().impl_getpaths()
|
||||
for path in subpaths:
|
||||
paths.append(namespace + '.' + path)
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
continue
|
||||
elif vname not in paths:
|
||||
continue
|
||||
store_path[vname] = value
|
||||
store_order = OrderedDict()
|
||||
store_key = store_path.keys()
|
||||
store_key.sort(reorder_store)
|
||||
for path in store_key:
|
||||
store_order[path] = store_path[path]
|
||||
return eol_version, store_order
|
||||
|
||||
#don't frozen auto_freeze before instance (or enregistrement_zephir for Zephir)
|
||||
if force_instanciate is not None:
|
||||
is_inst = force_instanciate
|
||||
else:
|
||||
is_inst = is_instanciate()
|
||||
eol_version, store = store_path_and_reorder(eol_version)
|
||||
orig_values = {}
|
||||
for path, values in store.items():
|
||||
value = values['val']
|
||||
option = config.unwrap_from_path(path)
|
||||
settings = config.cfgimpl_get_settings()
|
||||
tiramisu_values = config.cfgimpl_get_values()
|
||||
if force_load_owner is not None:
|
||||
owner = force_load_owner
|
||||
else:
|
||||
owner = values['owner']
|
||||
if isinstance(owner, dict):
|
||||
for towner in owner.values():
|
||||
if towner not in dir(owners):
|
||||
owners.addowner(towner)
|
||||
else:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
try:
|
||||
#si unset_default, remet à la valeur par défaut si == à la valeur
|
||||
if unset_default and value == getattr(config, path):
|
||||
continue
|
||||
if isinstance(value, tuple):
|
||||
value = list(value)
|
||||
values['val'] = value
|
||||
orig_values[path.split('.')[-1]] = values
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if not isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner)
|
||||
multi = config.getattr(path, force_permissive=True)
|
||||
if isinstance(value, list):
|
||||
tval = {}
|
||||
for idx, val in enumerate(value):
|
||||
tval[idx] = val
|
||||
value = tval
|
||||
for idx, val in value.items():
|
||||
index = int(idx)
|
||||
if len(multi) > index:
|
||||
multi[index] = val
|
||||
if isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner[idx])
|
||||
tiramisu_values.setowner(option, new_owner, index=index)
|
||||
else:
|
||||
log.error(_("master's len is lower than the slave variable ({})").format(path))
|
||||
else:
|
||||
if isinstance(owner, str):
|
||||
owner = unicode(owner)
|
||||
if not isinstance(owner, unicode):
|
||||
raise Exception(_('owner must be a string for {}').format(path))
|
||||
new_owner = getattr(owners, owner)
|
||||
try:
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
except ValueError as e:
|
||||
if path == 'schedule.schedule.weekday' and 'schedule.schedule.monthday' in store:
|
||||
settings.remove('validator')
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
config.setattr('schedule.schedule.monthday', store['schedule.schedule.monthday'], force_permissive=True)
|
||||
settings.append('validator')
|
||||
else:
|
||||
raise e
|
||||
tiramisu_values.setowner(option, new_owner)
|
||||
except ValueError as e:
|
||||
msg = str(e).decode('utf8')
|
||||
#msg = unicode(e)
|
||||
log.error(_('unable to load variable {} with value {}: {}').format(path, value, msg))
|
||||
settings[option].append('load_error')
|
||||
config.impl_set_information('error_msg_{}'.format(path), msg)
|
||||
config.impl_set_information('orig_value_{}'.format(path), value)
|
||||
except LeadershipError:
|
||||
# ne pas faire d'erreur #8380
|
||||
pass
|
||||
try:
|
||||
config.impl_get_information('force_store_vars').remove(path)
|
||||
except (KeyError, ValueError) as err:
|
||||
pass
|
||||
|
||||
path_split = path.split('.')
|
||||
family_option = config.unwrap_from_path(namespace + '.' + path_split[1])
|
||||
settings.setpermissive(tuple(modes_level), opt=family_option)
|
||||
if len(path_split) == 4:
|
||||
parent_option = config.unwrap_from_path(namespace + '.' + path_split[1] + '.' + path_split[2])
|
||||
settings.setpermissive(tuple(modes_level), opt=parent_option)
|
||||
settings.setpermissive(tuple(modes_level), opt=option)
|
||||
setting = config.cfgimpl_get_settings()
|
||||
if 'auto_freeze' in setting[option] and is_inst == 'oui' and \
|
||||
not tiramisu_values.is_default_owner(option):
|
||||
setting[option].append('frozen')
|
||||
if namespace == 'creole':
|
||||
config.impl_set_information('unknown_options', unknown_options)
|
||||
config.impl_set_information('eol_version', eol_version)
|
||||
config.impl_set_information('orig_values', orig_values)
|
||||
|
||||
def config_load_values(config, namespace, eol_file=None, force_instanciate=None,
|
||||
force_load_owner=None):
|
||||
subconfig = getattr(config, namespace, None)
|
||||
if subconfig is None:
|
||||
return
|
||||
if eol_file is None:
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
else:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
if not isfile(eol_file):
|
||||
raise IOError(_(u'Can not find file {0}').format(
|
||||
eol_file))
|
||||
store = load_store(config, eol_file)
|
||||
config_load_store(config, namespace, store,
|
||||
force_instanciate=force_instanciate,
|
||||
force_load_owner=force_load_owner)
|
||||
|
||||
def config_get_values(config, namespace, check_mandatory=True, ignore_autofreeze=False):
|
||||
"""check_mandatory: allows to disable mandatory checking
|
||||
(i.e : when returning values for partial configuration in Zéphir)
|
||||
"""
|
||||
def _get_varname(path):
|
||||
if namespace == 'creole':
|
||||
value_name = path.split('.')[-1]
|
||||
else:
|
||||
value_name = path
|
||||
return value_name
|
||||
|
||||
subconfig = getattr(config, namespace)
|
||||
if check_mandatory:
|
||||
mandatory_errors = list(config.cfgimpl_get_values(
|
||||
).mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != []:
|
||||
text = []
|
||||
for error in mandatory_errors:
|
||||
if not error.startswith(namespace + '.'):
|
||||
continue
|
||||
error = error.split('.')
|
||||
text.append(_(u"Mandatory variable '{0}' from family '{1}'"
|
||||
u" is not set !").format(unicode(error[-1]),
|
||||
unicode(error[1].capitalize())).encode('utf-8'))
|
||||
if text != []:
|
||||
raise PropertiesOptionError("\n".join(text), ('mandatory',))
|
||||
store = {}
|
||||
opt_values = subconfig.cfgimpl_get_values().get_modified_values()
|
||||
force_store_values = config.impl_get_information('force_store_values', None)
|
||||
|
||||
for path, own_val in opt_values.items():
|
||||
#for variable not related to current namespace
|
||||
if not path.startswith(namespace+'.'):
|
||||
continue
|
||||
if force_store_values and path in force_store_values:
|
||||
force_store_values.remove(path)
|
||||
store[_get_varname(path)] = {'val': own_val[1], 'owner': own_val[0]}
|
||||
if force_store_values:
|
||||
for path in force_store_values:
|
||||
varname = _get_varname(path)
|
||||
if varname not in store:
|
||||
try:
|
||||
store[varname] = {'val': config.getattr(path, force_permissive=True), 'owner': u'forced'}
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if namespace == 'creole':
|
||||
#update with values in store with no known options
|
||||
store.update(config.impl_get_information('unknown_options', {}))
|
||||
return store
|
||||
|
||||
|
||||
def add_eol_version(store, eol_version=None):
|
||||
# on stocke la version passée en paramètre (si >= 2.4.1) ou celle du système le cas échéant
|
||||
if eol_version:
|
||||
if StrictVersion(eol_version) >= StrictVersion('2.4.1'):
|
||||
store['___version___'] = eol_version
|
||||
else:
|
||||
store['___version___'] = get_version('EOLE_RELEASE')
|
||||
|
||||
|
||||
def config_save_values(config, namespace, reload_config=True, eol_file=None, check_mandatory=True, eol_version=None):
|
||||
subconfig = getattr(config, namespace)
|
||||
if eol_file is not None:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
store = config_get_values(config, namespace, check_mandatory)
|
||||
add_eol_version(store, eol_version)
|
||||
try:
|
||||
dirn = dirname(eol_file)
|
||||
if not isdir(dirn):
|
||||
makedirs(dirn)
|
||||
if not isfile(eol_file):
|
||||
fh = file(eol_file, 'w')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
else:
|
||||
fh = file(eol_file, 'r+')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
fh.truncate() # Here's where the magic happens #7073
|
||||
fh.write(cjson.encode(store))
|
||||
fh.close()
|
||||
except Exception as err:
|
||||
raise Exception(_(u"Error saving file: {0}").format(err))
|
||||
if client is not None and reload_config:
|
||||
try:
|
||||
client.reload_eol()
|
||||
#client.reload_config()
|
||||
except CreoleClientError:
|
||||
pass
|
||||
return True
|
454
creole/lxml_parser.py
Normal file
454
creole/lxml_parser.py
Normal file
|
@ -0,0 +1,454 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Parseur LXML des fichiers XML de collecte des variables EOLE
|
||||
"""
|
||||
from lxml import etree
|
||||
from copy import copy
|
||||
from .error import ConfigError
|
||||
from .utils import string_to_bool #, get_text_node
|
||||
from .config import VIRTMASTER
|
||||
from .dtd_parser import CONVERT_VALUE
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from .i18n import _
|
||||
|
||||
def parse_xml_file(filename, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param filename: nom du fichier xml source
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
document = etree.iterparse(filename, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing file {0}: {1}").format(filename, err))
|
||||
|
||||
def parse_string(xml_string, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param xml_string: dictionnaire xml sous forme de chaîne
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
root_node = etree.fromstring(xml_string)
|
||||
document = etree.iterwalk(root_node, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing: {0}").format(err))
|
||||
|
||||
def _parse_root_node(document, dtd, parse_all, test_duplicate=False):
|
||||
"""
|
||||
@param document: le noeud XML racine
|
||||
"""
|
||||
def _parse_container(node, options, container_name):
|
||||
for name in options:
|
||||
key_name = '{0}s'.format(name)
|
||||
ret.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name),
|
||||
container_name, dtd, name)
|
||||
if values != []:
|
||||
ret[key_name].extend(values)
|
||||
|
||||
for unused, first_node in document:
|
||||
root_node = first_node
|
||||
|
||||
#verifie les doublons de variable dans le meme dico
|
||||
if test_duplicate:
|
||||
all_var_dict = []
|
||||
for var in root_node.findall('variables/family/variable'):
|
||||
name = var.attrib['name']
|
||||
if name in all_var_dict:
|
||||
raise ConfigError(_(u'Error, var {0} already exists in current dictionaries').format(name))
|
||||
all_var_dict.append(name)
|
||||
|
||||
ret = {'families': parse_families(root_node)}
|
||||
families_action = parse_actions(root_node, dtd)
|
||||
if len(families_action) != 0:
|
||||
ret['families_action'] = families_action
|
||||
|
||||
ret['containers'] = []
|
||||
## balise <files> (données sur le maître)
|
||||
file_node = root_node.findall('files')
|
||||
if file_node != []:
|
||||
if len(file_node) != 1:
|
||||
raise Exception(_(u"Error: extra <files> tags in dictionaries."))
|
||||
if parse_all:
|
||||
_parse_container(file_node[0], dtd['files']['options'], VIRTMASTER)
|
||||
ret['containers'].append({'name': VIRTMASTER, 'id': '1'})
|
||||
|
||||
## balise <containers> (données dans les conteneurs)
|
||||
containers_node = root_node.findall('containers')
|
||||
if containers_node != []:
|
||||
if len(containers_node) != 1:
|
||||
raise Exception(_(u"Error: extra <containers> tags in dictionaries."))
|
||||
container = containers_node[0]
|
||||
for container_node in container.getiterator('container'):
|
||||
name = container_node.attrib['name']
|
||||
if name in [VIRTMASTER, 'all']:
|
||||
raise Exception(_(u"Name '{0}' is not allowed in tag <container>.").format(name))
|
||||
if name in ret['containers']:
|
||||
raise Exception(
|
||||
_(u"There must be only one name '{0}' in a dictionary.").format(name))
|
||||
containerid = _get_optional(container_node, 'id')
|
||||
groupid = _get_optional(container_node, 'group')
|
||||
ret['containers'].append({'name': name, 'id': containerid,
|
||||
'group': groupid})
|
||||
if parse_all:
|
||||
_parse_container(container_node, dtd['container']['options'], name)
|
||||
if parse_all:
|
||||
all_node = container.findall('all')
|
||||
if all_node != []:
|
||||
if len(all_node) != 1:
|
||||
raise Exception(_(u"Error: extra <all> tags in dictionaries."))
|
||||
ret['containers'].append({'name': 'all'})
|
||||
_parse_container(all_node[0], dtd['all']['options'], 'all')
|
||||
|
||||
## gestion des contraintes
|
||||
#FIXME
|
||||
ret.update(parse_constraints(root_node))
|
||||
|
||||
## gestion des groupes de variables
|
||||
ret['groups'] = parse_groups(root_node)
|
||||
|
||||
## gestion de l'aide
|
||||
ret['helps'] = parse_help(root_node)
|
||||
|
||||
## gestion des séparateurs
|
||||
ret['separators'] = parse_separators(root_node)
|
||||
return ret
|
||||
|
||||
|
||||
def _get_boolean_attr(node, attr_name, default=False):
|
||||
"""
|
||||
Gestion spécifique pour les attributs booléens
|
||||
Ils sont à False par défaut
|
||||
"""
|
||||
val = node.get(attr_name)
|
||||
if default:
|
||||
return str(val).lower() != 'false'
|
||||
elif val is None:
|
||||
return None
|
||||
else:
|
||||
return str(val).lower() == 'true'
|
||||
|
||||
|
||||
def _get_optional(node, attr_name):
|
||||
"""
|
||||
Valeur d'un attribut optionnel
|
||||
"""
|
||||
return node.get(attr_name)
|
||||
|
||||
|
||||
def _parse_value(varnode, attr='value'):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = []
|
||||
for val in varnode.findall(attr):
|
||||
# FIX for <value></value> !
|
||||
if val.text is not None:
|
||||
res.append(val.text)
|
||||
else:
|
||||
res.append('')
|
||||
return res
|
||||
|
||||
def parse_value(varnode, name):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = None
|
||||
for val in varnode.findall('value'):
|
||||
if val.text is not None:
|
||||
tval = val.text
|
||||
if res != None:
|
||||
#str to list
|
||||
if type(res) == str:
|
||||
res = [res]
|
||||
res.append(tval)
|
||||
else:
|
||||
res = tval
|
||||
return res
|
||||
|
||||
def parse_generic(nodes, container, dtd, name, old_result=None):
|
||||
ret = []
|
||||
keys = dtd[name]
|
||||
for node in nodes:
|
||||
if old_result:
|
||||
result = copy(old_result)
|
||||
result['node_name'] = name
|
||||
elif container is not None:
|
||||
result = {'container': container}
|
||||
else:
|
||||
result = {}
|
||||
if keys['type']:
|
||||
if 'name' in keys['needs'] or 'name' in keys['optionals']:
|
||||
raise Exception('PCDATA + name')
|
||||
result['name'] = node.text
|
||||
for key, values in keys['needs'].items():
|
||||
value = node.attrib[key]
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
for key, values in keys['optionals'].items():
|
||||
value = node.attrib.get(key, values['default'])
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if value != None:
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
if keys['options'] == []:
|
||||
ret.append(result)
|
||||
else:
|
||||
for option in keys['options']:
|
||||
ret.extend(parse_generic(node.findall(option), container, dtd, option, result))
|
||||
return ret
|
||||
|
||||
|
||||
def parse_variables(var_node):
|
||||
"""
|
||||
traitement des variables
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for var in var_node.getiterator('variable'):
|
||||
# Default variables are handled in creole.loader
|
||||
hidden = _get_boolean_attr(var, 'hidden')
|
||||
multi = _get_boolean_attr(var, 'multi')
|
||||
redefine = _get_boolean_attr(var, 'redefine')
|
||||
mandatory = _get_boolean_attr(var, 'mandatory')
|
||||
remove_check = _get_boolean_attr(var, 'remove_check')
|
||||
remove_condition = _get_boolean_attr(var, 'remove_condition')
|
||||
exists = _get_boolean_attr(var, 'exists', default=True)
|
||||
disabled = _get_boolean_attr(var, 'disabled', default=False)
|
||||
auto_freeze = _get_boolean_attr(var, 'auto_freeze')
|
||||
auto_save = _get_boolean_attr(var, 'auto_save')
|
||||
mode = _get_optional(var, 'mode')
|
||||
name = var.attrib['name']
|
||||
value = parse_value(var, var.attrib['name'])
|
||||
typ = _get_optional(var, 'type')
|
||||
if typ == None:
|
||||
typ = 'string'
|
||||
desc = _get_optional(var, 'description')
|
||||
if type(desc) == unicode:
|
||||
desc = desc.encode('utf-8')
|
||||
result[name] = dict(value=value,
|
||||
type=typ,
|
||||
description=desc,
|
||||
hidden=hidden,
|
||||
multi=multi,
|
||||
auto='',
|
||||
redefine=redefine,
|
||||
exists=exists,
|
||||
auto_freeze=auto_freeze,
|
||||
auto_save=auto_save,
|
||||
mode=mode,
|
||||
mandatory=mandatory,
|
||||
disabled=disabled,
|
||||
remove_check=remove_check,
|
||||
remove_condition=remove_condition
|
||||
)
|
||||
return result
|
||||
|
||||
def parse_families(var_node):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for family in var_node.findall('variables/family'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Family {0} is set several times.").format(family_name))
|
||||
hidden = _get_boolean_attr(family, 'hidden')
|
||||
# FIXME: mode='' était admis avec domparser
|
||||
mode = _get_optional(family, 'mode')
|
||||
icon = _get_optional(family, 'icon')
|
||||
variables = parse_variables(family)
|
||||
result[family_name] = {'hidden': hidden,
|
||||
'mode': mode,
|
||||
'vars': variables,
|
||||
'icon': icon
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def parse_actions(root_node, dtd):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
def _parse_action(node, options):
|
||||
parse = {}
|
||||
for name in options:
|
||||
key_name = '{0}'.format(name)
|
||||
parse.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name), None, dtd, name)
|
||||
if values != []:
|
||||
parse[key_name].extend(values)
|
||||
parse['type'] = node.get("type", "custom")
|
||||
parse['title'] = node.get('title')
|
||||
parse['description'] = node.get('description')
|
||||
image = node.get('image')
|
||||
if image:
|
||||
parse['image'] = image
|
||||
url = node.get('url', None)
|
||||
if url:
|
||||
parse['url'] = url
|
||||
return parse
|
||||
|
||||
for family in root_node.findall('family_action'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Action Family {0} is set several times.").format(family_name))
|
||||
description = _get_optional(family, 'description')
|
||||
color = _get_optional(family, 'color')
|
||||
image = _get_optional(family, 'image')
|
||||
## balise <action>
|
||||
action_node = family.findall('action')
|
||||
if action_node != [] and len(action_node) != 1:
|
||||
raise Exception(_(u"Error: extra <action> tags in dictionaries."))
|
||||
action = _parse_action(action_node[0], dtd['action']['options'])
|
||||
result[family_name] = {'name': family_name,
|
||||
'description': description,
|
||||
'color': color,
|
||||
'image': image,
|
||||
'action': action
|
||||
}
|
||||
return result
|
||||
|
||||
def parse_constraints(node):
|
||||
"""
|
||||
@param node: node des contraintes
|
||||
"""
|
||||
constraints = {'checks' : parse_funcs(node,'check'),
|
||||
'fills' : parse_funcs(node,'fill'),
|
||||
'autos' : parse_funcs(node,'auto'),
|
||||
'conditions' : parse_conditions(node)
|
||||
}
|
||||
return constraints
|
||||
|
||||
|
||||
def _parse_param(param_node):
|
||||
"""
|
||||
traitement des paramètres d'une fonction
|
||||
"""
|
||||
return {'name' : _get_optional(param_node, 'name'),
|
||||
'type' : _get_optional(param_node, 'type'),
|
||||
'value' : param_node.text,
|
||||
'optional' : _get_optional(param_node, 'optional'),
|
||||
'hidden' : _get_optional(param_node, 'hidden'),
|
||||
}
|
||||
|
||||
|
||||
def parse_funcs(node, func_type):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
@param func_type: TagName of the functions to find
|
||||
@return: {target: [(param_name, _parse_params('param'))]}
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.findall('constraints/%s' % func_type):
|
||||
# lecture des paramètres
|
||||
params = []
|
||||
#si balise <target>
|
||||
targets = _parse_value(func, 'target')
|
||||
#sinon c'est un attribut target=
|
||||
if not targets:
|
||||
#met dans une liste parce que <target> retourne une liste
|
||||
targets = [_get_optional(func, 'target')]
|
||||
level = _get_optional(func, 'level')
|
||||
if not level:
|
||||
level = 'error'
|
||||
for target in targets:
|
||||
if target is not None:
|
||||
for param in func.getiterator('param'):
|
||||
params.append(_parse_param(param))
|
||||
funcs.setdefault(target, []).append((func.attrib['name'],
|
||||
params, level))
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_conditions(node):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.getiterator('condition'):
|
||||
# lecture des paramètres
|
||||
targets = []
|
||||
family_targets = []
|
||||
list_targets = []
|
||||
# paramètres de la fonction
|
||||
params = [_parse_param(param)
|
||||
for param in func.getiterator('param')]
|
||||
# cibles de la dépendance
|
||||
for target in func.getiterator('target'):
|
||||
ttype = target.get('type')
|
||||
optional = target.get('optional', False)
|
||||
if ttype == 'family':
|
||||
family_targets.append((target.text, optional))
|
||||
elif ttype in ['variable', None]:
|
||||
targets.append((target.text, optional))
|
||||
else:
|
||||
if ttype.endswith('list'):
|
||||
#suppress list in ttype
|
||||
list_targets.append((ttype[:-4], target.text, optional))
|
||||
else:
|
||||
raise Exception(_(u'Unknown type {0} for condition target.').format(ttype))
|
||||
funcdef = {'name': func.attrib['name'], 'family': family_targets,
|
||||
'variable': targets, 'list': list_targets, 'param': params,
|
||||
'fallback': _get_boolean_attr(func, 'fallback')}
|
||||
source = _get_optional(func, 'source')
|
||||
if source == None:
|
||||
raise Exception(_(u'Impossible condition without source for {0}.').format(funcdef))
|
||||
funcs.setdefault(source, []).append(funcdef)
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_groups(node):
|
||||
"""
|
||||
Traitement des groupes de variables
|
||||
"""
|
||||
result = {}
|
||||
for group in node.findall('constraints/group'):
|
||||
slaves = _parse_value(group, 'slave')
|
||||
result[group.attrib['master']] = slaves
|
||||
return result
|
||||
|
||||
|
||||
def parse_help(node):
|
||||
"""
|
||||
Traitement de l'aide
|
||||
"""
|
||||
var_help = {}
|
||||
for var in node.findall('help/variable'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
var_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for variable {0}.").format(name))
|
||||
fam_help = {}
|
||||
for var in node.findall('help/family'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
fam_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for family {0}").format(name))
|
||||
return {'variables':var_help, 'families': fam_help}
|
||||
|
||||
|
||||
def parse_separators(node):
|
||||
"""dictionnaire des séparateurs, format {'variable':'text'}
|
||||
variable : nom de la première variable après le sépateur"""
|
||||
var_sep = {}
|
||||
for var in node.findall('variables/separators/separator'):
|
||||
if not var.text:
|
||||
libelle = ''
|
||||
else:
|
||||
libelle = var.text.strip()
|
||||
var_sep[var.attrib['name']] = (libelle, _get_boolean_attr(var, 'never_hidden'))
|
||||
return var_sep
|
||||
|
148
creole/maj.py
Normal file
148
creole/maj.py
Normal file
|
@ -0,0 +1,148 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
Librairie pour la gestion des mises à jour
|
||||
"""
|
||||
from os import system
|
||||
from dateutil import parser
|
||||
from pyeole.schedule import ManageSchedule, list_once, add_schedule, \
|
||||
del_schedule, apply_schedules, DAY_TO_STRING
|
||||
from pyeole.process import system_out
|
||||
from .client import CreoleClient
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# fichier d'information pour la mise à jour unique
|
||||
DIFF_FILENAME = '/var/lib/eole/reports/maj-diff.txt'
|
||||
|
||||
#########################################
|
||||
## Mise à jour hebdomadaire (maj_auto) ##
|
||||
#########################################
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def maj_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return client.get('/schedule/majauto/day') == 'weekly'
|
||||
|
||||
def get_maj_day():
|
||||
"""
|
||||
renvoie le jour et l'heure des mises à jour
|
||||
par exemple :
|
||||
{'hour': 5, 'minute': 4, 'weekday': 'vendredi'}
|
||||
"""
|
||||
shed = client.get('/schedule/schedule')
|
||||
shed.pop('monthday')
|
||||
shed['weekday'] = DAY_TO_STRING[shed['weekday']]
|
||||
return shed
|
||||
|
||||
def enable_maj_auto():
|
||||
"""
|
||||
active la mise à jour hebdomadaire
|
||||
"""
|
||||
if not maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.add('majauto', 'weekly', 'post')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
def disable_maj_auto():
|
||||
"""
|
||||
désactive la mise à jour hebdomadaire
|
||||
"""
|
||||
if maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.delete('majauto')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (schedule once) ##
|
||||
#########################################
|
||||
|
||||
def maj_once_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return 'majauto' in list_once('post')
|
||||
|
||||
def enable_maj_once():
|
||||
"""
|
||||
active la mise à jour 'once'
|
||||
"""
|
||||
if not maj_once_enabled():
|
||||
cancel_maj_differee()
|
||||
add_schedule('once', 'post', 'majauto')
|
||||
write_diff(True, 'ce soir')
|
||||
return True
|
||||
|
||||
def disable_maj_once():
|
||||
"""
|
||||
désactive la mise à jour 'once'
|
||||
"""
|
||||
if maj_once_enabled():
|
||||
del_schedule('once', 'post', 'majauto')
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (maj_differee) ##
|
||||
#########################################
|
||||
|
||||
def write_diff(enable, heure=None):
|
||||
""" ecrit les informations du gestionnaire de mise a jour
|
||||
dans le fichier de config de l'ead """
|
||||
fic = file(DIFF_FILENAME, 'w')
|
||||
if enable:
|
||||
fic.write(_(u'An update is scheduled at {0}').format(heure))
|
||||
else:
|
||||
fic.write("")
|
||||
fic.close()
|
||||
|
||||
def cancel_maj_differee():
|
||||
"""
|
||||
déprogramme les mises à jour differées
|
||||
"""
|
||||
disable_maj_once()
|
||||
cmd = """for i in `grep -l "Maj-Auto" /var/spool/cron/atjobs/* 2>/dev/null`; do rm -f $i ; done;"""
|
||||
system(cmd)
|
||||
write_diff(False)
|
||||
return True
|
||||
|
||||
def prog_maj_differee(heure, options='-R'):
|
||||
"""
|
||||
Programmation une mise à jour différée de quelques heures
|
||||
Elle est lancée via la commande at pour l'utilisateur root
|
||||
options : options à passer à Maj-Auto
|
||||
"""
|
||||
if heure == 'once':
|
||||
return enable_maj_once()
|
||||
# suppression des éventuelles autres maj différées
|
||||
cancel_maj_differee()
|
||||
stdin = "rm -f %s\nMaj-Auto %s\n" % (DIFF_FILENAME, options)
|
||||
env_path = {'PATH': '/usr/share/eole:/usr/share/eole/sbin:/usr/local/sbin:'
|
||||
'/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
|
||||
'LC_ALL': 'fr_FR.UTF-8'}
|
||||
ret = system_out(['/usr/bin/at', 'now', '+', str(heure), 'hours'], stdin=stdin, env=env_path)
|
||||
if ret[0] != 0:
|
||||
return False
|
||||
scheduled_maj = " ".join(ret[2].splitlines()[1].split()[3:7])
|
||||
scheduled_maj = parser.parse(scheduled_maj)
|
||||
scheduled_day = "{0:0=2d}".format(scheduled_maj.day)
|
||||
scheduled_month = "{0:0=2d}".format(scheduled_maj.month)
|
||||
scheduled_year = "{0:0=2d}".format(scheduled_maj.year)
|
||||
scheduled_hour = "{0:0=2d}".format(scheduled_maj.hour)
|
||||
scheduled_minute = "{0:0=2d}".format(scheduled_maj.minute)
|
||||
scheduled_maj = _(u'{0} the {1}').format(":".join((scheduled_hour, scheduled_minute)), \
|
||||
"/".join((scheduled_day, scheduled_month, scheduled_year)))
|
||||
write_diff(True , scheduled_maj)
|
||||
return True
|
704
creole/objspace.py
Normal file
704
creole/objspace.py
Normal file
|
@ -0,0 +1,704 @@
|
|||
"""
|
||||
Creole flattener. Takes a bunch of Creole XML dispatched in differents folders
|
||||
as an input and outputs a human readable flatened XML
|
||||
|
||||
Sample usage::
|
||||
|
||||
>>> from creole.objspace import CreoleObjSpace
|
||||
>>> eolobj = CreoleObjSpace('/usr/share/creole/creole.dtd')
|
||||
>>> eolobj.create_or_populate_from_xml('creole', ['/usr/share/eole/creole/dicos'])
|
||||
>>> eolobj.space_visitor()
|
||||
>>> eolobj.save('/tmp/creole_flatened_output.xml')
|
||||
|
||||
The CreoleObjSpace
|
||||
|
||||
- loads the XML into an internal CreoleObjSpace representation
|
||||
- visits/annotates the objects
|
||||
- dumps the object space as XML output into a single XML target
|
||||
|
||||
The visit/annotation stage is a complex step that corresponds to the Creole
|
||||
procedures.
|
||||
|
||||
For example: a variable is redefined and shall be moved to another family
|
||||
means that a variable1 = Variable() object in the object space who lives in the family1 parent
|
||||
has to be moved in family2. The visit procedure changes the varable1's object space's parent.
|
||||
"""
|
||||
from collections import OrderedDict
|
||||
from lxml.etree import Element, SubElement # pylint: disable=E0611
|
||||
import sys
|
||||
from json import dump
|
||||
|
||||
|
||||
from .i18n import _
|
||||
from .xmlreflector import XMLReflector, HIGH_COMPATIBILITY
|
||||
from .annotator import ERASED_ATTRIBUTES, ActionAnnotator, ContainerAnnotator, SpaceAnnotator
|
||||
from .utils import normalize_family
|
||||
from .error import CreoleOperationError, SpaceObjShallNotBeUpdated, CreoleDictConsistencyError
|
||||
|
||||
# CreoleObjSpace's elements like 'family' or 'slave', that shall be forced to the Redefinable type
|
||||
FORCE_REDEFINABLES = ('family', 'slave', 'container', 'disknod', 'variables', 'family_action')
|
||||
# CreoleObjSpace's elements that shall be forced to the UnRedefinable type
|
||||
FORCE_UNREDEFINABLES = ('value', 'input', 'profile', 'ewtapp', 'tag', 'saltaction')
|
||||
# CreoleObjSpace's elements that shall be set to the UnRedefinable type
|
||||
UNREDEFINABLE = ('multi', 'type')
|
||||
|
||||
PROPERTIES = ('hidden', 'frozen', 'auto_freeze', 'auto_save', 'force_default_on_freeze',
|
||||
'force_store_value', 'disabled', 'mandatory')
|
||||
CONVERT_PROPERTIES = {'auto_save': ['force_store_value'], 'auto_freeze': ['force_store_value', 'auto_freeze']}
|
||||
|
||||
RENAME_ATTIBUTES = {'description': 'doc'}
|
||||
|
||||
#TYPE_TARGET_CONDITION = ('variable', 'family')
|
||||
|
||||
# _____________________________________________________________________________
|
||||
# special types definitions for the Object Space's internal representation
|
||||
class RootCreoleObject(object):
|
||||
""
|
||||
|
||||
|
||||
class CreoleObjSpace(object):
|
||||
"""DOM XML reflexion free internal representation of a Creole Dictionary
|
||||
"""
|
||||
choice = type('Choice', (RootCreoleObject,), OrderedDict())
|
||||
# Creole ObjectSpace's Master variable class type
|
||||
Master = type('Master', (RootCreoleObject,), OrderedDict())
|
||||
"""
|
||||
This Atom type stands for singleton, that is
|
||||
an Object Space's atom object is present only once in the
|
||||
object space's tree
|
||||
"""
|
||||
Atom = type('Atom', (RootCreoleObject,), OrderedDict())
|
||||
"A variable that can't be redefined"
|
||||
Redefinable = type('Redefinable', (RootCreoleObject,), OrderedDict())
|
||||
"A variable can be redefined"
|
||||
UnRedefinable = type('UnRedefinable', (RootCreoleObject,), OrderedDict())
|
||||
|
||||
|
||||
def __init__(self, dtdfilename): # pylint: disable=R0912
|
||||
self.index = 0
|
||||
class ObjSpace(object): # pylint: disable=R0903
|
||||
"""
|
||||
Base object space
|
||||
"""
|
||||
self.space = ObjSpace()
|
||||
self.xmlreflector = XMLReflector()
|
||||
self.xmlreflector.parse_dtd(dtdfilename)
|
||||
self.redefine_variables = None
|
||||
self.probe_variables = []
|
||||
|
||||
# elt container's attrs list
|
||||
self.container_elt_attr_list = [] #
|
||||
# ['variable', 'separator', 'family']
|
||||
self.forced_text_elts = set()
|
||||
# ['disknod', 'slave', 'target', 'service', 'package', 'ip', 'value', 'tcpwrapper',
|
||||
# 'interface', 'input', 'port']
|
||||
self.forced_text_elts_as_name = set(['choice'])
|
||||
self.forced_choice_option = {}
|
||||
self.paths = Path()
|
||||
self.list_conditions = {}
|
||||
|
||||
self.booleans_attributs = []
|
||||
|
||||
for elt in self.xmlreflector.dtd.iterelements():
|
||||
attrs = {}
|
||||
clstype = self.UnRedefinable
|
||||
atomic = True
|
||||
forced_text_elt = False
|
||||
if elt.type == 'mixed':
|
||||
forced_text_elt = True
|
||||
if elt.name == 'container':
|
||||
self.container_elt_attr_list = [elt.content.left.name]
|
||||
self.parse_dtd_right_left_elt(elt.content)
|
||||
for attr in elt.iterattributes():
|
||||
atomic = False
|
||||
if attr.default_value:
|
||||
if attr.default_value == 'True':
|
||||
default_value = True
|
||||
elif attr.default_value == 'False':
|
||||
default_value = False
|
||||
else:
|
||||
default_value = attr.default_value
|
||||
attrs[attr.name] = default_value
|
||||
if not attr.name.endswith('_type'):
|
||||
values = list(attr.itervalues())
|
||||
if values != []:
|
||||
self.forced_choice_option.setdefault(elt.name, {})[attr.name] = values
|
||||
|
||||
if attr.name == 'redefine':
|
||||
clstype = self.Redefinable
|
||||
if attr.name == 'name' and forced_text_elt is True:
|
||||
self.forced_text_elts.add(elt.name)
|
||||
forced_text_elt = False
|
||||
|
||||
if set(attr.itervalues()) == set(['True', 'False']):
|
||||
self.booleans_attributs.append(attr.name)
|
||||
|
||||
if forced_text_elt is True:
|
||||
self.forced_text_elts_as_name.add(elt.name)
|
||||
|
||||
if elt.name in FORCE_REDEFINABLES:
|
||||
clstype = self.Redefinable
|
||||
elif elt.name in FORCE_UNREDEFINABLES:
|
||||
clstype = self.UnRedefinable
|
||||
elif atomic:
|
||||
clstype = self.Atom
|
||||
|
||||
# Creole ObjectSpace class types, it enables us to create objects like:
|
||||
# Service_restriction(), Ip(), Interface(), Host(), Fstab(), Package(), Disknod(),
|
||||
# File(), Variables(), Family(), Variable(), Separators(), Separator(), Value(),
|
||||
# Constraints()... and so on. Creole ObjectSpace is an object's reflexion of
|
||||
# the XML elements
|
||||
setattr(self, elt.name, type(elt.name.capitalize(), (clstype,), attrs))
|
||||
|
||||
def parse_dtd_right_left_elt(self, elt):
|
||||
if elt.right.type == 'or':
|
||||
self.container_elt_attr_list.append(elt.right.left.name)
|
||||
self.parse_dtd_right_left_elt(elt.right)
|
||||
else:
|
||||
self.container_elt_attr_list.append(elt.right.name)
|
||||
|
||||
def _convert_boolean(self, value): # pylint: disable=R0201
|
||||
"""Boolean coercion. The Creole XML may contain srings like `True` or `False`
|
||||
"""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if value == 'True':
|
||||
return True
|
||||
elif value == 'False':
|
||||
return False
|
||||
else:
|
||||
raise TypeError(_('{} is not True or False').format(value).encode('utf8')) # pragma: no cover
|
||||
|
||||
def _is_already_exists(self, name, space, child, namespace):
|
||||
if isinstance(space, self.family): # pylint: disable=E1101
|
||||
if namespace != 'creole':
|
||||
name = space.path + '.' + name
|
||||
return self.paths.path_is_defined(name)
|
||||
if child.tag in ['family', 'family_action']:
|
||||
norm_name = normalize_family(name)
|
||||
else:
|
||||
norm_name = name
|
||||
return norm_name in getattr(space, child.tag, {})
|
||||
|
||||
def _translate_in_space(self, name, family, variable, namespace):
|
||||
if not isinstance(family, self.family): # pylint: disable=E1101
|
||||
if variable.tag in ['family', 'family_action']:
|
||||
norm_name = normalize_family(name)
|
||||
else:
|
||||
norm_name = name
|
||||
return getattr(family, variable.tag)[norm_name]
|
||||
if namespace == 'creole':
|
||||
path = name
|
||||
else:
|
||||
path = family.path + '.' + name
|
||||
old_family_name = self.paths.get_variable_family_name(path)
|
||||
if normalize_family(family.name) == old_family_name:
|
||||
return getattr(family, variable.tag)[name]
|
||||
old_family = self.space.variables['creole'].family[old_family_name] # pylint: disable=E1101
|
||||
variable_obj = old_family.variable[name]
|
||||
del old_family.variable[name]
|
||||
if 'variable' not in vars(family):
|
||||
family.variable = OrderedDict()
|
||||
family.variable[name] = variable_obj
|
||||
self.paths.append('variable', name, namespace, family.name, variable_obj)
|
||||
return variable_obj
|
||||
|
||||
def remove_check(self, name): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'constraints') and hasattr(self.space.constraints, 'check'):
|
||||
remove_checks = []
|
||||
for idx, check in enumerate(self.space.constraints.check): # pylint: disable=E1101
|
||||
if hasattr(check, 'target') and check.target == name:
|
||||
remove_checks.append(idx)
|
||||
|
||||
remove_checks = list(set(remove_checks))
|
||||
remove_checks.sort(reverse=True)
|
||||
for idx in remove_checks:
|
||||
self.space.constraints.check.pop(idx) # pylint: disable=E1101
|
||||
def remove_condition(self, name): # pylint: disable=C0111
|
||||
for idx, condition in enumerate(self.space.constraints.condition): # pylint: disable=E1101
|
||||
remove_targets = []
|
||||
if hasattr(condition, 'target'):
|
||||
for target_idx, target in enumerate(condition.target):
|
||||
if target.name == name:
|
||||
remove_targets.append(target_idx)
|
||||
remove_targets = list(set(remove_targets))
|
||||
remove_targets.sort(reverse=True)
|
||||
for idx in remove_targets:
|
||||
del condition.target[idx]
|
||||
|
||||
def create_or_update_space_object(self, subspace, space, child, namespace):
|
||||
"""Creates or retrieves the space object that corresponds
|
||||
to the `child` XML object
|
||||
|
||||
Two attributes of the `child` XML object are important:
|
||||
|
||||
- with the `redefine` boolean flag attribute we know whether
|
||||
the corresponding space object shall be created or updated
|
||||
|
||||
- `True` means that the corresponding space object shall be updated
|
||||
- `False` means that the corresponding space object shall be created
|
||||
|
||||
- with the `exists` boolean flag attribute we know whether
|
||||
the corresponding space object shall be created
|
||||
(or nothing -- that is the space object isn't modified)
|
||||
|
||||
- `True` means that the corresponding space object shall be created
|
||||
- `False` means that the corresponding space object is not updated
|
||||
|
||||
In the special case `redefine` is True and `exists` is False,
|
||||
we create the corresponding space object if it doesn't exist
|
||||
and we update it if it exists.
|
||||
|
||||
:return: the corresponding space object of the `child` XML object
|
||||
"""
|
||||
if child.tag in self.forced_text_elts_as_name:
|
||||
name = child.text
|
||||
else:
|
||||
name = subspace['name']
|
||||
if self._is_already_exists(name, space, child, namespace):
|
||||
if child.tag in FORCE_REDEFINABLES:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', True))
|
||||
else:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', False))
|
||||
exists = self._convert_boolean(subspace.get('exists', True))
|
||||
if redefine is True:
|
||||
return self._translate_in_space(name, space, child, namespace)
|
||||
elif exists is False:
|
||||
raise SpaceObjShallNotBeUpdated()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Already present in another XML file, {} '
|
||||
'cannot be re-created').format(name).encode('utf8'))
|
||||
else:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', False))
|
||||
exists = self._convert_boolean(subspace.get('exists', False))
|
||||
if redefine is False or exists is True:
|
||||
return getattr(self, child.tag)()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Redefined object: '
|
||||
'{} does not exist yet').format(name).encode('utf8'))
|
||||
|
||||
def generate_creoleobj(self, child, space, namespace):
|
||||
"""
|
||||
instanciates or creates Creole Object Subspace objects
|
||||
"""
|
||||
if issubclass(getattr(self, child.tag), self.Redefinable):
|
||||
creoleobj = self.create_or_update_space_object(child.attrib, space, child, namespace)
|
||||
else:
|
||||
# instanciates an object from the CreoleObjSpace's builtins types
|
||||
# example : child.tag = constraints -> a self.Constraints() object is created
|
||||
creoleobj = getattr(self, child.tag)()
|
||||
# this Atom instance has to be a singleton here
|
||||
# we do not re-create it, we reuse it
|
||||
if isinstance(creoleobj, self.Atom) and child.tag in vars(space):
|
||||
creoleobj = getattr(space, child.tag)
|
||||
self.create_tree_structure(space, child, creoleobj)
|
||||
return creoleobj
|
||||
|
||||
def create_tree_structure(self, space, child, creoleobj): # pylint: disable=R0201
|
||||
"""
|
||||
Builds the tree structure of the object space here
|
||||
we set containers attributes in order to be populated later on
|
||||
for example::
|
||||
|
||||
space = Family()
|
||||
space.variable = OrderedDict()
|
||||
another example:
|
||||
space = Variable()
|
||||
space.value = list()
|
||||
"""
|
||||
if child.tag not in vars(space):
|
||||
if isinstance(creoleobj, self.Redefinable):
|
||||
setattr(space, child.tag, OrderedDict())
|
||||
elif isinstance(creoleobj, self.UnRedefinable):
|
||||
setattr(space, child.tag, [])
|
||||
elif isinstance(creoleobj, self.Atom):
|
||||
pass
|
||||
else: # pragma: no cover
|
||||
raise CreoleOperationError(_("Creole object {} "
|
||||
"has a wrong type").format(type(creoleobj)))
|
||||
|
||||
def _add_to_tree_structure(self, creoleobj, space, child): # pylint: disable=R0201
|
||||
if isinstance(creoleobj, self.Redefinable):
|
||||
name = creoleobj.name
|
||||
if child.tag == 'family' or child.tag == 'family_action':
|
||||
name = normalize_family(name)
|
||||
getattr(space, child.tag)[name] = creoleobj
|
||||
elif isinstance(creoleobj, self.UnRedefinable):
|
||||
getattr(space, child.tag).append(creoleobj)
|
||||
else:
|
||||
setattr(space, child.tag, creoleobj)
|
||||
|
||||
def _set_text_to_obj(self, child, creoleobj):
|
||||
if child.text is None:
|
||||
text = None
|
||||
else:
|
||||
text = child.text.strip()
|
||||
if text:
|
||||
if child.tag in self.forced_text_elts_as_name:
|
||||
creoleobj.name = text
|
||||
else:
|
||||
creoleobj.text = text
|
||||
|
||||
def _set_xml_attributes_to_obj(self, child, creoleobj):
|
||||
redefine = self._convert_boolean(child.attrib.get('redefine', False))
|
||||
has_value = hasattr(creoleobj, 'value')
|
||||
if HIGH_COMPATIBILITY and has_value:
|
||||
has_value = len(child) != 1 or child[0].text != None
|
||||
if (redefine is True and child.tag == 'variable' and has_value
|
||||
and len(child) != 0):
|
||||
del creoleobj.value
|
||||
for attr, val in child.attrib.items():
|
||||
if redefine and attr in UNREDEFINABLE:
|
||||
# UNREDEFINABLE concerns only 'variable' node so we can fix name
|
||||
# to child.attrib['name']
|
||||
name = child.attrib['name']
|
||||
raise CreoleDictConsistencyError(_("cannot redefine attribute {} for variable {}").format(attr, name).encode('utf8'))
|
||||
if isinstance(getattr(creoleobj, attr, None), bool):
|
||||
if val == 'False':
|
||||
val = False
|
||||
elif val == 'True':
|
||||
val = True
|
||||
else: # pragma: no cover
|
||||
raise CreoleOperationError(_('value for {} must be True or False, '
|
||||
'not {}').format(attr, val).encode('utf8'))
|
||||
if not (attr == 'name' and getattr(creoleobj, 'name', None) != None):
|
||||
setattr(creoleobj, attr, val)
|
||||
|
||||
def _creoleobj_tree_visitor(self, child, creoleobj, namespace):
|
||||
"""Creole object tree manipulations
|
||||
"""
|
||||
if child.tag == 'variable' and child.attrib.get('remove_check', False):
|
||||
self.remove_check(creoleobj.name)
|
||||
if child.tag == 'variable' and child.attrib.get('remove_condition', False):
|
||||
self.remove_condition(creoleobj.name)
|
||||
if child.tag in ['auto', 'fill', 'check']:
|
||||
variable_name = child.attrib['target']
|
||||
# XXX not working with variable not in creole and in master/slave
|
||||
if variable_name in self.redefine_variables:
|
||||
creoleobj.redefine = True
|
||||
else:
|
||||
creoleobj.redefine = False
|
||||
if not hasattr(creoleobj, 'index'):
|
||||
creoleobj.index = self.index
|
||||
if child.tag in ['auto', 'fill', 'condition', 'check', 'action']:
|
||||
creoleobj.namespace = namespace
|
||||
|
||||
def xml_parse_document(self, document, space, namespace, is_in_family=False):
|
||||
"""Parses a Creole XML file
|
||||
populates the CreoleObjSpace
|
||||
"""
|
||||
family_names = []
|
||||
for child in document:
|
||||
# this index enables us to reorder the 'fill' and 'auto' objects
|
||||
self.index += 1
|
||||
# doesn't proceed the XML commentaries
|
||||
if not isinstance(child.tag, str):
|
||||
continue
|
||||
if child.tag == 'family':
|
||||
is_in_family = True
|
||||
if child.attrib['name'] in family_names:
|
||||
raise CreoleDictConsistencyError(_('Family {} is set several times').format(child.attrib['name']).encode('utf8'))
|
||||
family_names.append(child.attrib['name'])
|
||||
if child.tag == 'variables':
|
||||
child.attrib['name'] = namespace
|
||||
if HIGH_COMPATIBILITY and child.tag == 'value' and child.text == None:
|
||||
continue
|
||||
# creole objects creation
|
||||
try:
|
||||
creoleobj = self.generate_creoleobj(child, space, namespace)
|
||||
except SpaceObjShallNotBeUpdated:
|
||||
continue
|
||||
self._set_text_to_obj(child, creoleobj)
|
||||
self._set_xml_attributes_to_obj(child, creoleobj)
|
||||
self._creoleobj_tree_visitor(child, creoleobj, namespace)
|
||||
self._fill_creoleobj_path_attribute(space, child, namespace, document, creoleobj)
|
||||
self._add_to_tree_structure(creoleobj, space, child)
|
||||
if list(child) != []:
|
||||
self.xml_parse_document(child, creoleobj, namespace, is_in_family)
|
||||
|
||||
def _fill_creoleobj_path_attribute(self, space, child, namespace, document, creoleobj): # pylint: disable=R0913
|
||||
"""Fill self.paths attributes
|
||||
"""
|
||||
if not isinstance(space, self.help): # pylint: disable=E1101
|
||||
if child.tag == 'variable':
|
||||
family_name = normalize_family(document.attrib['name'])
|
||||
self.paths.append('variable', child.attrib['name'], namespace, family_name,
|
||||
creoleobj)
|
||||
if child.attrib.get('redefine', 'False') == 'True':
|
||||
if namespace == 'creole':
|
||||
self.redefine_variables.append(child.attrib['name'])
|
||||
else:
|
||||
self.redefine_variables.append(namespace + '.' + family_name + '.' +
|
||||
child.attrib['name'])
|
||||
|
||||
if child.tag == 'family':
|
||||
family_name = normalize_family(child.attrib['name'])
|
||||
if namespace != 'creole':
|
||||
family_name = namespace + '.' + family_name
|
||||
self.paths.append('family', family_name, namespace, creoleobj=creoleobj)
|
||||
creoleobj.path = self.paths.get_family_path(family_name, namespace)
|
||||
|
||||
def create_or_populate_from_xml(self, namespace, xmlfolders, from_zephir=None):
|
||||
"""Parses a bunch of XML files
|
||||
populates the CreoleObjSpace
|
||||
"""
|
||||
documents = self.xmlreflector.load_xml_from_folders(xmlfolders, from_zephir)
|
||||
for xmlfile, document in documents:
|
||||
try:
|
||||
self.redefine_variables = []
|
||||
self.xml_parse_document(document, self.space, namespace)
|
||||
except Exception as err:
|
||||
#print(_('error in XML file {}').format(xmlfile))
|
||||
raise err
|
||||
|
||||
def populate_from_zephir(self, namespace, xmlfile):
|
||||
self.redefine_variables = []
|
||||
document = self.xmlreflector.parse_xmlfile(xmlfile, from_zephir=True, zephir2=True)
|
||||
self.xml_parse_document(document, self.space, namespace)
|
||||
|
||||
def space_visitor(self, eosfunc_file): # pylint: disable=C0111
|
||||
ActionAnnotator(self.space, self.paths, self)
|
||||
ContainerAnnotator(self.space, self.paths, self)
|
||||
SpaceAnnotator(self.space, self.paths, self, eosfunc_file)
|
||||
|
||||
def save(self, filename, force_no_save=False):
|
||||
"""Save an XML output on disk
|
||||
|
||||
:param filename: the full XML filename
|
||||
"""
|
||||
xml = Element('creole')
|
||||
self._xml_export(xml, self.space)
|
||||
if not force_no_save:
|
||||
self.xmlreflector.save_xmlfile(filename, xml)
|
||||
return xml
|
||||
|
||||
def save_probes(self, filename, force_no_save=False):
|
||||
"""Save an XML output on disk
|
||||
|
||||
:param filename: the full XML filename
|
||||
"""
|
||||
ret = {}
|
||||
for variable in self.probe_variables:
|
||||
args = []
|
||||
kwargs = {}
|
||||
if hasattr(variable, 'param'):
|
||||
for param in variable.param:
|
||||
list_param = list(vars(param).keys())
|
||||
if 'index' in list_param:
|
||||
list_param.remove('index')
|
||||
if list_param == ['text']:
|
||||
args.append(param.text)
|
||||
elif list_param == ['text', 'name']:
|
||||
kwargs[param.name] = param.text
|
||||
else:
|
||||
print(vars(param))
|
||||
raise Exception('hu?')
|
||||
ret[variable.target] = {'function': variable.name,
|
||||
'args': args,
|
||||
'kwargs': kwargs}
|
||||
if not force_no_save:
|
||||
with open(filename, 'w') as fhj:
|
||||
dump(ret, fhj)
|
||||
return ret
|
||||
|
||||
def _get_attributes(self, space): # pylint: disable=R0201
|
||||
for attr in dir(space):
|
||||
if not attr.startswith('_'):
|
||||
yield attr
|
||||
|
||||
def _sub_xml_export(self, name, node, node_name, space):
|
||||
if isinstance(space, dict):
|
||||
space = list(space.values())
|
||||
if isinstance(space, list):
|
||||
for subspace in space:
|
||||
if isinstance(subspace, self.Master):
|
||||
_name = 'master'
|
||||
subspace.doc = subspace.variable[0].description
|
||||
#subspace.doc = 'Master {}'.format(subspace.name)
|
||||
else:
|
||||
_name = name
|
||||
if name in ['containers', 'variables', 'actions']:
|
||||
_name = 'family'
|
||||
if HIGH_COMPATIBILITY and not hasattr(subspace, 'doc'):
|
||||
subspace.doc = ''
|
||||
if _name == 'value' and (not hasattr(subspace, 'name') or subspace.name is None):
|
||||
continue
|
||||
child_node = SubElement(node, _name)
|
||||
self._xml_export(child_node, subspace, _name)
|
||||
elif isinstance(space, self.Atom):
|
||||
if name == 'containers':
|
||||
child_node = SubElement(node, 'family')
|
||||
child_node.attrib['name'] = name
|
||||
else:
|
||||
child_node = SubElement(node, name)
|
||||
for subname in self._get_attributes(space):
|
||||
subspace = getattr(space, subname)
|
||||
self._sub_xml_export(subname, child_node, name, subspace)
|
||||
elif isinstance(space, self.Redefinable):
|
||||
child_node = SubElement(node, 'family')
|
||||
child_node.attrib['name'] = name
|
||||
for subname in self._get_attributes(space):
|
||||
subspace = getattr(space, subname)
|
||||
self._sub_xml_export(subname, child_node, name, subspace)
|
||||
else:
|
||||
if name in PROPERTIES and node.tag == 'variable':
|
||||
if space is True:
|
||||
for prop in CONVERT_PROPERTIES.get(name, [name]):
|
||||
if sys.version_info[0] < 3:
|
||||
SubElement(node, 'property').text = unicode(prop)
|
||||
else:
|
||||
SubElement(node, 'property').text = prop
|
||||
|
||||
elif name not in ERASED_ATTRIBUTES:
|
||||
if name == 'name' and node_name in self.forced_text_elts_as_name:
|
||||
if sys.version_info[0] < 3 and isinstance(space, unicode):
|
||||
node.text = space
|
||||
elif isinstance(space, str):
|
||||
if sys.version_info[0] < 3:
|
||||
node.text = space.decode('utf8')
|
||||
else:
|
||||
node.text = space
|
||||
else:
|
||||
node.text = str(space)
|
||||
elif name == 'text' and node_name in self.forced_text_elts:
|
||||
node.text = space
|
||||
elif node.tag == 'family' and name == 'name':
|
||||
if 'doc' not in node.attrib.keys():
|
||||
node.attrib['doc'] = space
|
||||
node.attrib['name'] = normalize_family(space, check_name=False)
|
||||
elif node.tag == 'variable' and name == 'mode':
|
||||
if space is not None:
|
||||
SubElement(node, 'property').text = space
|
||||
else:
|
||||
if name in RENAME_ATTIBUTES:
|
||||
name = RENAME_ATTIBUTES[name]
|
||||
if space is not None:
|
||||
if sys.version_info[0] < 3:
|
||||
node.attrib[name] = unicode(space)
|
||||
else:
|
||||
node.attrib[name] = str(space)
|
||||
|
||||
def _xml_export(self, node, space, node_name='creole'):
|
||||
for name in self._get_attributes(space):
|
||||
subspace = getattr(space, name)
|
||||
self._sub_xml_export(name, node, node_name, subspace)
|
||||
|
||||
|
||||
class Path(object):
|
||||
"""Helper class to handle the `path` attribute of a CreoleObjSpace
|
||||
instance.
|
||||
|
||||
sample: path="creole.general.condition"
|
||||
"""
|
||||
def __init__(self):
|
||||
self.variables = {}
|
||||
self.families = {}
|
||||
|
||||
def append(self, pathtype, name, namespace, family=None, creoleobj=None): # pylint: disable=C0111
|
||||
if pathtype == 'family':
|
||||
self.families[name] = dict(name=name, namespace=namespace, creoleobj=creoleobj)
|
||||
elif pathtype == 'variable':
|
||||
if namespace == 'creole':
|
||||
varname = name
|
||||
else:
|
||||
if '.' in name:
|
||||
varname = name
|
||||
else:
|
||||
varname = '.'.join([namespace, family, name])
|
||||
self.variables[varname] = dict(name=name, family=family, namespace=namespace,
|
||||
master=None, creoleobj=creoleobj)
|
||||
else: # pragma: no cover
|
||||
raise Exception('unknown pathtype {}'.format(pathtype))
|
||||
|
||||
def get_family_path(self, name, current_namespace): # pylint: disable=C0111
|
||||
if current_namespace is None: # pragma: no cover
|
||||
raise CreoleOperationError('current_namespace must not be None')
|
||||
dico = self.families[normalize_family(name, check_name=False)]
|
||||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A family located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
path = dico['name']
|
||||
if dico['namespace'] is not None and '.' not in dico['name']:
|
||||
path = '.'.join([dico['namespace'], path])
|
||||
return path
|
||||
|
||||
def get_family_namespace(self, name): # pylint: disable=C0111
|
||||
dico = self.families[name]
|
||||
if dico['namespace'] is None:
|
||||
return dico['name']
|
||||
return dico['namespace']
|
||||
|
||||
def get_family_obj(self, name): # pylint: disable=C0111
|
||||
if name not in self.families:
|
||||
raise CreoleDictConsistencyError(_('unknown family {}').format(name).encode('utf8'))
|
||||
dico = self.families[name]
|
||||
return dico['creoleobj']
|
||||
|
||||
def get_variable_name(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['name']
|
||||
|
||||
def get_variable_obj(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['creoleobj']
|
||||
|
||||
def get_variable_family_name(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['family']
|
||||
|
||||
def get_variable_family_path(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def get_variable_namespace(self, name): # pylint: disable=C0111
|
||||
return self._get_variable(name)['namespace']
|
||||
|
||||
def get_variable_path(self, name, current_namespace, allow_source=False): # pylint: disable=C0111
|
||||
if current_namespace is None: # pragma: no cover
|
||||
raise CreoleOperationError('current_namespace must not be None')
|
||||
dico = self._get_variable(name)
|
||||
if not allow_source:
|
||||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A variable located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
if '.' in dico['name']:
|
||||
return dico['name']
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
list_path.append(dico['name'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def path_is_defined(self, name): # pylint: disable=C0111
|
||||
return name in self.variables
|
||||
|
||||
def set_master(self, name, master): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
namespace = dico['namespace']
|
||||
if dico['master'] != None:
|
||||
raise CreoleDictConsistencyError(_('Already defined master {} for variable'
|
||||
' {}'.format(dico['master'], name)).encode('utf8'))
|
||||
dico['master'] = master
|
||||
if namespace != 'creole':
|
||||
new_path = self.get_variable_path(name, namespace)
|
||||
self.append('variable', new_path, namespace, family=dico['family'], creoleobj=dico['creoleobj'])
|
||||
self.variables[new_path]['master'] = master
|
||||
del self.variables[name]
|
||||
|
||||
def _get_variable(self, name):
|
||||
if name not in self.variables:
|
||||
if name.startswith('creole.'):
|
||||
raise CreoleDictConsistencyError(
|
||||
_("don't set full path variable in creole's namespace "
|
||||
"(set '{}' not '{}')").format(name.split('.')[-1], name).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_('unknown option {}').format(name).encode('utf8'))
|
||||
return self.variables[name]
|
||||
|
||||
def get_master(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['master']
|
1006
creole/reconfigure.py
Normal file
1006
creole/reconfigure.py
Normal file
File diff suppressed because it is too large
Load diff
651
creole/server.py
Normal file
651
creole/server.py
Normal file
|
@ -0,0 +1,651 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.server - distribute creole variables through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Distribute Creole configuration through REST API
|
||||
|
||||
Setup a daemon based on `cherrypy` listening by default on
|
||||
127.0.0.1:8000 for queries on Creole configuration.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import threading
|
||||
|
||||
from creole import eosfunc
|
||||
|
||||
from traceback import format_exc
|
||||
|
||||
from os.path import basename, dirname, isdir, samefile, splitext
|
||||
|
||||
from pyeole.log import init_logging, getLogger
|
||||
from pyeole import scriptargs
|
||||
|
||||
from .config import configeoldir, eoledirs, eoleextradico, \
|
||||
eoleextraconfig
|
||||
from .loader import creole_loader, load_config_eol, load_extras
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu.config import Config, SubConfig, undefined
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
|
||||
from pyeole.cherrypy_plugins import InotifyMonitor
|
||||
|
||||
import cherrypy
|
||||
import socket
|
||||
|
||||
from pyinotify import ProcessEvent
|
||||
from pyinotify import IN_DELETE
|
||||
from pyinotify import IN_CREATE
|
||||
from pyinotify import IN_MODIFY
|
||||
from pyinotify import IN_MOVED_TO
|
||||
from pyinotify import IN_MOVED_FROM
|
||||
|
||||
from systemd import daemon
|
||||
|
||||
import logging
|
||||
|
||||
# Global logger
|
||||
log = getLogger(__name__)
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
num_error = [(PropertiesOptionError, 1), (KeyError, 2),
|
||||
(AttributeError, 4), (Exception, 3)]
|
||||
|
||||
# For pyinotify handler and filtering
|
||||
_INOTIFY_EOL_DIRS = [configeoldir, eoleextraconfig]
|
||||
|
||||
_INOTIFY_MASK = IN_DELETE | IN_CREATE | IN_MODIFY | IN_MOVED_TO | IN_MOVED_FROM
|
||||
|
||||
|
||||
def _inotify_filter(event):
|
||||
"""Check if the path must be excluded from being watched.
|
||||
|
||||
:param event: event to look for
|
||||
:type event: :class:`pyinotify.Event`
|
||||
:return: if the :data:`event` must be excluded
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
_INOTIFY_EOL = True
|
||||
|
||||
if isdir(event.pathname):
|
||||
# Always ok for EOLE directories
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.pathname, directory):
|
||||
_INOTIFY_EOL = False
|
||||
|
||||
if not _INOTIFY_EOL:
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
extension = splitext(event.name)[1]
|
||||
|
||||
if event.mask != IN_DELETE and not os.access(event.pathname, os.F_OK):
|
||||
log.debug(_(u'File not accessible: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
if event.mask != IN_DELETE and os.stat(event.pathname).st_size == 0:
|
||||
log.debug(_(u'File with null size: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
# Check only for files in EOLE directories
|
||||
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.path, directory) or str(event.path).startswith(directory):
|
||||
_INOTIFY_EOL = extension != '.eol'
|
||||
break
|
||||
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
|
||||
class CreoleInotifyHandler(ProcessEvent):
|
||||
"""Process inotify events
|
||||
|
||||
"""
|
||||
|
||||
_server = None
|
||||
"""Instance of :class:`CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
def my_init(self, server):
|
||||
"""Subclass constructor.
|
||||
|
||||
This is the constructor, it is automatically called from
|
||||
:meth:`ProcessEvent.__init__()`,
|
||||
|
||||
Extra arguments passed to ``__init__()`` would be delegated
|
||||
automatically to ``my_init()``.
|
||||
|
||||
"""
|
||||
self._server = server
|
||||
|
||||
def process_default(self, event):
|
||||
"""Reload :class:`CreoleServer` on all managed inotify events
|
||||
|
||||
"""
|
||||
inotify_data = _inotify_filter(event)
|
||||
if not inotify_data["EOL"]:
|
||||
log.warn(_(u'Reload config.eol due to {0} on {1}').format(event.maskname,
|
||||
event.pathname))
|
||||
try:
|
||||
self._server.reload_eol()
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
log.debug(_(u'Filtered inotify event for {0}').format(event.pathname))
|
||||
|
||||
|
||||
class CreoleServer(object):
|
||||
"""Cherrypy application answering REST requests
|
||||
"""
|
||||
|
||||
def __init__(self, running=True):
|
||||
"""Initialize the server
|
||||
|
||||
Load the tiramisu configuration.
|
||||
|
||||
:param `bool` running: Is the web server running during server
|
||||
initialization.
|
||||
|
||||
"""
|
||||
|
||||
log.debug(_(u"Loading tiramisu configuration"))
|
||||
self.config = None
|
||||
self.reload_config(running)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_config(self, running=True):
|
||||
lock.acquire()
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify('RELOADING=1')
|
||||
|
||||
try:
|
||||
log.debug(u"Set umask to 0022")
|
||||
os.umask(0022)
|
||||
reload(eosfunc)
|
||||
eosfunc.load_funcs(force_reload=True)
|
||||
self.config = creole_loader(load_extra=True, reload_config=False,
|
||||
disable_mandatory=True, owner='creoled',
|
||||
try_upgrade=False)
|
||||
if log.isEnabledFor(logging.DEBUG) and self.config.impl_get_information('load_error', False):
|
||||
msg = _('Load creole configuration with errors')
|
||||
log.debug(msg)
|
||||
ret = self.response()
|
||||
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify('READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_eol(self):
|
||||
if not self.config:
|
||||
return self.reload_config()
|
||||
|
||||
lock.acquire()
|
||||
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify(u'RELOADING=1')
|
||||
|
||||
config = Config(self.config.cfgimpl_get_description())
|
||||
try:
|
||||
load_config_eol(config)
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration from config.eol: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
try:
|
||||
load_extras(config)
|
||||
except:
|
||||
msg = _('Unable to load creole configuration from extra: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
else:
|
||||
config.read_only()
|
||||
self.config = config
|
||||
ret = self.response()
|
||||
|
||||
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def valid_mandatory(self):
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
msg = _(u'All variables are not set, please configure your system:')
|
||||
error = False
|
||||
mandatory_errors = set(self.config.cfgimpl_get_values().mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables are mandatories') + ' (' + ', '.join(mandatory_errors) + ')'
|
||||
force_vars = set()
|
||||
for force_store_var in self.config.impl_get_information('force_store_vars'):
|
||||
if force_store_var not in mandatory_errors:
|
||||
try:
|
||||
getattr(self.config, force_store_var)
|
||||
force_vars.add(force_store_var)
|
||||
except:
|
||||
pass
|
||||
if force_vars != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables must be in config file') + ' (' + ', '.join(force_vars) + ')'
|
||||
|
||||
if error:
|
||||
log.debug(mandatory_errors)
|
||||
return self.response(msg, 3)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
return self.response(str(err), 3)
|
||||
return self.response()
|
||||
|
||||
@staticmethod
|
||||
def response(response='OK', status=0):
|
||||
"""Generate a normalized response
|
||||
|
||||
:param response: message of the response
|
||||
:type response: `object`
|
||||
:param status: status code for the response, ``0`` for OK
|
||||
:type status: `int`
|
||||
:return: response of the form: ``{"status": `int`, "response": `message`}``
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return {u'status': status, u'response': response}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def get(self, *args, **kwargs):
|
||||
"""Return the content of a tiramisu path
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
:return: Value of a single variable or sub tree
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
def _remove_properties_error(val):
|
||||
new_val = []
|
||||
for v in val:
|
||||
if isinstance(v, PropertiesOptionError):
|
||||
new_val.append({'err': str(v)})
|
||||
else:
|
||||
new_val.append(v)
|
||||
return new_val
|
||||
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) != 0:
|
||||
subconfig = getattr(config, '.'.join(args))
|
||||
else:
|
||||
subconfig = config
|
||||
if isinstance(subconfig, SubConfig):
|
||||
if u'variable' in kwargs:
|
||||
name = kwargs[u'variable']
|
||||
path = subconfig.find_first(byname=name,
|
||||
type_=u'path',
|
||||
check_properties=False)
|
||||
try:
|
||||
val = getattr(config, path)
|
||||
except PropertiesOptionError as err:
|
||||
if err.proptype == ['mandatory']:
|
||||
raise Exception(_(u'Mandatory variable {0} '
|
||||
u'is not set.').format(name))
|
||||
raise err
|
||||
if isinstance(val, list):
|
||||
val = _remove_properties_error(val)
|
||||
return self.response(val)
|
||||
else:
|
||||
withoption = kwargs.get(u'withoption')
|
||||
withvalue = kwargs.get(u'withvalue')
|
||||
if withvalue is None:
|
||||
withvalue = undefined
|
||||
dico = subconfig.make_dict(withoption=withoption, withvalue=withvalue)
|
||||
for key, val in dico.items():
|
||||
if isinstance(val, list):
|
||||
dico[key] = _remove_properties_error(val)
|
||||
return self.response(dico)
|
||||
else:
|
||||
#if config is a value, not a SubConfig
|
||||
if isinstance(subconfig, list):
|
||||
subconfig = _remove_properties_error(subconfig)
|
||||
return self.response(subconfig)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def list(self, *args):
|
||||
"""List subtree pointed by :data:`args`
|
||||
|
||||
List the nodes and variables under a path.
|
||||
|
||||
If the path point to a single variable, then return its value.
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
|
||||
:return: Nodes and/or variables under a path, or value of a
|
||||
variable
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) == 0:
|
||||
# root of configuration
|
||||
obj = config
|
||||
else:
|
||||
# Path to a sub configuration
|
||||
base = '.'.join(args)
|
||||
obj = getattr(config, base)
|
||||
if isinstance(obj, SubConfig):
|
||||
# Path is a node
|
||||
groups = [u'%s/' % g[0] for g in obj.iter_groups()]
|
||||
items = [u'%s' % i[0] for i in obj]
|
||||
return self.response(groups + items)
|
||||
else:
|
||||
# Path is a leaf
|
||||
value = self.get(*args)[u'response']
|
||||
return self.response([value])
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
def _no_config(self):
|
||||
"""Return an error message when no configuration is loaded
|
||||
|
||||
:return: a failure response
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return self.response(_(u'No configuration'), status=3)
|
||||
|
||||
class CreoleDaemon(object):
|
||||
"""Run the CreoleServer
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the cherrypy daemon
|
||||
"""
|
||||
|
||||
# Built-in configuration
|
||||
self.argparse = self._load_argparse()
|
||||
# Read command line arguments
|
||||
self.option = self.argparse.parse_args()
|
||||
if self.option.verbose:
|
||||
self.option.log_level = u'info'
|
||||
if self.option.debug:
|
||||
self.option.log_level = u'debug'
|
||||
self._configure_log()
|
||||
|
||||
def _load_argparse(self):
|
||||
"""Parse command line arguments
|
||||
|
||||
:return: command line parser
|
||||
:rtype: `argparse.ArgumentParser`
|
||||
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description=u'Run creole daemon',
|
||||
parents=[scriptargs.logging('warning')],
|
||||
conflict_handler='resolve')
|
||||
parser.add_argument("-b", "--base-dir", default='/tmp',
|
||||
help=_(u"Base directory in which the server"
|
||||
" is launched (default: /tmp)"))
|
||||
parser.add_argument("-c", "--conf-file",
|
||||
default='/etc/eole/creoled.conf',
|
||||
help=_(u"Configuration file of the server"
|
||||
" (default: /etc/eole/creoled.conf"))
|
||||
parser.add_argument("-d", "--daemon", action='store_true',
|
||||
help=_(u"Run the server as a daemon (default: false)"))
|
||||
parser.add_argument("-l", "--listen", action='store',
|
||||
default='127.0.0.1:8000',
|
||||
help=_(u"Listen on the specified IP:PORT"
|
||||
" (default: 127.0.0.1:8000)"))
|
||||
parser.add_argument("-m", "--mount-base", default='/',
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-p", "--pidfile",
|
||||
default='/tmp/{0}.pid'.format(
|
||||
basename(sys.argv[0])),
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-u", "--user", default='nobody',
|
||||
help=_(u"User of the running process"
|
||||
" (default: nobody)"))
|
||||
parser.add_argument("-g", "--group", default='nogroup',
|
||||
help=_(u"Group of the running process"
|
||||
" (default: nogroup)"))
|
||||
parser.add_argument("--umask", default='0640',
|
||||
help=_(u"Umask of the running process"
|
||||
" (default: 0644)"))
|
||||
return parser
|
||||
|
||||
def _get_conf(self, name):
|
||||
"""Map command line arguments to cherrypy configuration
|
||||
|
||||
:param name: internal name of argparse option store
|
||||
:returns: piece of cherrypy configuration
|
||||
:rtype: `dict`
|
||||
"""
|
||||
try:
|
||||
option_map = { 'listen' :
|
||||
{ 'server.socket_host' :
|
||||
self.option.listen.split(':')[0],
|
||||
'server.socket_port' :
|
||||
int(self.option.listen.split(':')[1])},
|
||||
}
|
||||
return option_map[name]
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def load_conf(self):
|
||||
"""Load daemon configuration
|
||||
|
||||
Take care to load the configuration in proper order and avoid
|
||||
overriding configuration file parameter by default command
|
||||
line arguments.
|
||||
|
||||
Order is:
|
||||
|
||||
- default values from command line option parser
|
||||
|
||||
- option from a configuration file
|
||||
|
||||
- command line arguments
|
||||
|
||||
"""
|
||||
# Load all default value
|
||||
config = {'engine.autoreload.on': False}
|
||||
for opt in vars(self.option):
|
||||
config.update(self._get_conf(opt))
|
||||
|
||||
cherrypy.config.update( { 'global' : config} )
|
||||
|
||||
# Load configuration file
|
||||
if os.access(self.option.conf_file, os.F_OK):
|
||||
cherrypy.config.update(self.option.conf_file)
|
||||
|
||||
# Override config file option present on command line
|
||||
config = {}
|
||||
for opt in sys.argv[1:]:
|
||||
config.update(self._get_conf(opt))
|
||||
cherrypy.config.update( {'global' : config } )
|
||||
|
||||
def _configure_log(self):
|
||||
"""Configure the module logger
|
||||
|
||||
Avoid logging apache style time since the logger does it.
|
||||
|
||||
"""
|
||||
global log
|
||||
log_filename = None
|
||||
if self.option.daemon:
|
||||
log_filename = u'/var/log/creoled.log'
|
||||
|
||||
log = init_logging(name=u'creoled', as_root=True,
|
||||
level=self.option.log_level,
|
||||
console=not self.option.daemon,
|
||||
syslog=None,
|
||||
filename=log_filename)
|
||||
|
||||
# Cherrypy do not handle logs
|
||||
cherrypy.log.error_file = None
|
||||
cherrypy.log.access_file = None
|
||||
# Do not output on screen
|
||||
cherrypy.log.screen = False
|
||||
# Hack to avoid time in log message
|
||||
cherrypy.log.time = lambda : ''
|
||||
|
||||
def run(self):
|
||||
"""Start the cherrypy server.
|
||||
"""
|
||||
engine = cherrypy.engine
|
||||
|
||||
# Load server but we are not running now
|
||||
# Do not let him tell systemd otherwise
|
||||
server = CreoleServer(running=False)
|
||||
|
||||
inotify_handler = CreoleInotifyHandler(server=server)
|
||||
|
||||
if hasattr(engine, "signal_handler"):
|
||||
engine.signal_handler.subscribe()
|
||||
# Error exit on SIGINT (Ctl-c) #6177
|
||||
engine.signal_handler.set_handler(2, self._kill)
|
||||
|
||||
if hasattr(engine, "console_control_handler"):
|
||||
engine.console_control_handler.subscribe()
|
||||
|
||||
cherrypy.tree.mount(server, self.option.mount_base,
|
||||
config={'global' : {} })
|
||||
|
||||
# Merge configuration from build-in, configuration file and command line
|
||||
self.load_conf()
|
||||
|
||||
if server.config is None:
|
||||
msg = _(u"No configuration found: do not check for container mode.")
|
||||
log.warn(msg)
|
||||
elif server.config.creole.general.mode_conteneur_actif == 'oui':
|
||||
container_ip = server.config.creole.containers.adresse_ip_br0
|
||||
container_port = cherrypy.config.get('server.socket_port')
|
||||
# Start a server for containers if ip can be bounded
|
||||
try:
|
||||
container_socket = socket.socket(socket.AF_INET,
|
||||
socket.SOCK_STREAM)
|
||||
container_socket.setsockopt(socket.SOL_SOCKET,
|
||||
socket.SO_REUSEADDR,
|
||||
1)
|
||||
container_socket.bind((container_ip, container_port))
|
||||
container_socket.close()
|
||||
except socket.error, err:
|
||||
log.error(_(u"Unable to listen for containers: {0}").format(err))
|
||||
else:
|
||||
container_server = cherrypy._cpserver.Server()
|
||||
container_server.socket_host = container_ip
|
||||
container_server.socket_port = container_port
|
||||
container_server.subscribe()
|
||||
|
||||
monitor = InotifyMonitor(engine, inotify_handler)
|
||||
monitor.subscribe()
|
||||
|
||||
monitor.watch.add_watch(_INOTIFY_EOL_DIRS, _INOTIFY_MASK, auto_add=True, rec=True)
|
||||
|
||||
if self.option.pidfile:
|
||||
cherrypy.process.plugins.PIDFile(engine,
|
||||
self.option.pidfile).subscribe()
|
||||
|
||||
if self.option.daemon:
|
||||
cherrypy.process.plugins.Daemonizer(engine).subscribe()
|
||||
|
||||
# Drop priviledges
|
||||
cherrypy.process.plugins.DropPrivileges(engine,
|
||||
uid = self.option.user,
|
||||
gid = self.option.group,
|
||||
umask = self.option.umask)
|
||||
|
||||
# Let's start the CherryPy engine so that
|
||||
# everything works
|
||||
engine.start()
|
||||
|
||||
# Tell systemd that we are ready
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
# Run the engine main loop
|
||||
engine.block()
|
||||
|
||||
@staticmethod
|
||||
def _kill():
|
||||
"""Exit the server with non zero exit code
|
||||
"""
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
daemon = CreoleDaemon()
|
||||
daemon.run()
|
45
creole/service.py
Normal file
45
creole/service.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pyeole.service import manage_services
|
||||
from pyeole.decorator import deprecated
|
||||
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def instance_services(container=None):
|
||||
"""
|
||||
instancie les services
|
||||
"""
|
||||
manage_services(u'configure', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def stop_services(container=None):
|
||||
"""Stop all services
|
||||
|
||||
The networking service is never stopped.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'stop', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def start_services(container=None):
|
||||
"""Start all services
|
||||
|
||||
The networking service is a special case.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'start', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def restart_services(container=None):
|
||||
"""
|
||||
redemarrage des services
|
||||
"""
|
||||
manage_services(u'restart', container=container)
|
607
creole/template.py
Normal file
607
creole/template.py
Normal file
|
@ -0,0 +1,607 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Gestion du mini-langage de template
|
||||
On travaille sur les fichiers cibles
|
||||
"""
|
||||
|
||||
import sys
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
import traceback
|
||||
import os
|
||||
from os import listdir, unlink
|
||||
from os.path import basename, join
|
||||
|
||||
from tempfile import mktemp
|
||||
|
||||
from Cheetah import Parser
|
||||
# l'encoding du template est déterminé par une regexp (encodingDirectiveRE dans Parser.py)
|
||||
# il cherche un ligne qui ressemble à '#encoding: utf-8
|
||||
# cette classe simule le module 're' et retourne toujours l'encoding utf-8
|
||||
# 6224
|
||||
class FakeEncoding():
|
||||
def groups(self):
|
||||
return ('utf-8',)
|
||||
|
||||
def search(self, *args):
|
||||
return self
|
||||
Parser.encodingDirectiveRE = FakeEncoding()
|
||||
|
||||
from Cheetah.Template import Template as ChtTemplate
|
||||
from Cheetah.NameMapper import NotFound as CheetahNotFound
|
||||
|
||||
import config as cfg
|
||||
|
||||
from .client import CreoleClient, CreoleClientError
|
||||
from .error import FileNotFound, TemplateError, TemplateDisabled
|
||||
import eosfunc
|
||||
|
||||
from .i18n import _
|
||||
|
||||
import pyeole
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
||||
class IsDefined(object):
|
||||
"""
|
||||
filtre permettant de ne pas lever d'exception au cas où
|
||||
la variable Creole n'est pas définie
|
||||
"""
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def __call__(self, varname):
|
||||
if '.' in varname:
|
||||
splitted_var = varname.split('.')
|
||||
if len(splitted_var) != 2:
|
||||
msg = _(u"Group variables must be of type master.slave")
|
||||
raise KeyError(msg)
|
||||
master, slave = splitted_var
|
||||
if master in self.context:
|
||||
return slave in self.context[master].slave.keys()
|
||||
return False
|
||||
else:
|
||||
return varname in self.context
|
||||
|
||||
|
||||
class CreoleGet(object):
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def __call__(self, varname):
|
||||
return self.context[varname]
|
||||
|
||||
def __getitem__(self, varname):
|
||||
"""For bracket and dotted notation
|
||||
"""
|
||||
return self.context[varname]
|
||||
|
||||
def __contains__(self, varname):
|
||||
"""Check variable existence in context
|
||||
"""
|
||||
return varname in self.context
|
||||
|
||||
|
||||
@classmethod
|
||||
def cl_compile(kls, *args, **kwargs):
|
||||
kwargs['compilerSettings'] = {'directiveStartToken' : u'%',
|
||||
'cheetahVarStartToken' : u'%%',
|
||||
'EOLSlurpToken' : u'%',
|
||||
'PSPStartToken' : u'µ' * 10,
|
||||
'PSPEndToken' : u'µ' * 10,
|
||||
'commentStartToken' : u'µ' * 10,
|
||||
'commentEndToken' : u'µ' * 10,
|
||||
'multiLineCommentStartToken' : u'µ' * 10,
|
||||
'multiLineCommentEndToken' : u'µ' * 10}
|
||||
return kls.old_compile(*args, **kwargs)
|
||||
ChtTemplate.old_compile = ChtTemplate.compile
|
||||
ChtTemplate.compile = cl_compile
|
||||
|
||||
|
||||
class CheetahTemplate(ChtTemplate):
|
||||
"""classe pour personnaliser et faciliter la construction
|
||||
du template Cheetah
|
||||
"""
|
||||
def __init__(self, filename, context, current_container):
|
||||
"""Initialize Creole CheetahTemplate
|
||||
|
||||
@param filename: name of the file to process
|
||||
@type filename: C{str}
|
||||
@param context: flat dictionary of creole variables as 'name':'value',
|
||||
@type context: C{dict}
|
||||
@param current_container: flat dictionary describing the current container
|
||||
@type current_container: C{dict}
|
||||
"""
|
||||
eos = {}
|
||||
for func in dir(eosfunc):
|
||||
if not func.startswith('_'):
|
||||
eos[func] = getattr(eosfunc, func)
|
||||
# ajout des variables decrivant les conteneurs
|
||||
#FIXME chercher les infos dans le client !
|
||||
ChtTemplate.__init__(self, file=filename,
|
||||
searchList=[context, eos, {u'is_defined' : IsDefined(context),
|
||||
u'creole_client' : CreoleClient(),
|
||||
u'current_container':CreoleGet(current_container),
|
||||
}])
|
||||
|
||||
|
||||
class CreoleMaster(object):
|
||||
def __init__(self, value, slave=None, index=None):
|
||||
"""
|
||||
On rend la variable itérable pour pouvoir faire:
|
||||
for ip in iplist:
|
||||
print ip.network
|
||||
print ip.netmask
|
||||
print ip
|
||||
index is used for CreoleLint
|
||||
"""
|
||||
self._value = value
|
||||
if slave is not None:
|
||||
self.slave = slave
|
||||
else:
|
||||
self.slave = {}
|
||||
self._index = index
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Get slave variable or attribute of master value.
|
||||
|
||||
If the attribute is a name of a slave variable, return its value.
|
||||
Otherwise, returns the requested attribute of master value.
|
||||
"""
|
||||
if name in self.slave:
|
||||
value = self.slave[name]
|
||||
if isinstance(value, Exception):
|
||||
raise value
|
||||
return value
|
||||
else:
|
||||
return getattr(self._value, name)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Get a master.slave at requested index.
|
||||
"""
|
||||
ret = {}
|
||||
for key, values in self.slave.items():
|
||||
ret[key] = values[index]
|
||||
return CreoleMaster(self._value[index], ret, index)
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over master.slave.
|
||||
|
||||
Return synchronised value of master.slave.
|
||||
"""
|
||||
for i in range(len(self._value)):
|
||||
ret = {}
|
||||
for key, values in self.slave.items():
|
||||
ret[key] = values[i]
|
||||
yield CreoleMaster(self._value[i], ret, i)
|
||||
|
||||
def __len__(self):
|
||||
"""Delegate to master value
|
||||
"""
|
||||
return len(self._value)
|
||||
|
||||
def __repr__(self):
|
||||
"""Show CreoleMaster as dictionary.
|
||||
|
||||
The master value is stored under 'value' key.
|
||||
The slaves are stored under 'slave' key.
|
||||
"""
|
||||
return repr({u'value': self._value, u'slave': self.slave})
|
||||
|
||||
def __eq__(self, value):
|
||||
return value == self._value
|
||||
|
||||
def __ne__(self, value):
|
||||
return value != self._value
|
||||
|
||||
def __lt__(self, value):
|
||||
return self._value < value
|
||||
|
||||
def __le__(self, value):
|
||||
return self._value <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
return self._value > value
|
||||
|
||||
def __ge__(self, value):
|
||||
return self._value >= value
|
||||
|
||||
def __str__(self):
|
||||
"""Delegate to master value
|
||||
"""
|
||||
return str(self._value)
|
||||
|
||||
def __add__(self, val):
|
||||
return self._value.__add__(val)
|
||||
|
||||
def __radd__(self, val):
|
||||
return val + self._value
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._value
|
||||
|
||||
def add_slave(self, name, value):
|
||||
"""Add a slave variable
|
||||
|
||||
Minimal check on type and value of the slave in regards to the
|
||||
master one.
|
||||
|
||||
@param name: name of the slave variable
|
||||
@type name: C{str}
|
||||
@param value: value of the slave variable
|
||||
"""
|
||||
if isinstance(self._value, list):
|
||||
if not isinstance(value, list):
|
||||
raise TypeError
|
||||
elif len(value) != len(self._value):
|
||||
raise ValueError(_(u'length mismatch'))
|
||||
new_value = []
|
||||
for val in value:
|
||||
if isinstance(val, dict):
|
||||
new_value.append(ValueError(val['err']))
|
||||
else:
|
||||
new_value.append(val)
|
||||
value = new_value
|
||||
elif isinstance(value, list):
|
||||
raise TypeError
|
||||
self.slave[name] = value
|
||||
|
||||
class CreoleTemplateEngine(object):
|
||||
"""Engine to process Creole cheetah template
|
||||
"""
|
||||
def __init__(self, force_values=None):
|
||||
#force_values permit inject value and not used CreoleClient (used by CreoleLint)
|
||||
self.client = CreoleClient()
|
||||
self.creole_variables_dict = {}
|
||||
self.force_values = force_values
|
||||
self.load_eole_variables()
|
||||
|
||||
def load_eole_variables(self):
|
||||
# remplacement des variables EOLE
|
||||
self.creole_variables_dict = {}
|
||||
if self.force_values is not None:
|
||||
values = self.force_values
|
||||
else:
|
||||
values = self.client.get_creole()
|
||||
for varname, value in values.items():
|
||||
if varname in self.creole_variables_dict:
|
||||
# Creation of a slave create the master
|
||||
continue
|
||||
if varname.find('.') != -1:
|
||||
#support des groupes
|
||||
mastername, slavename = varname.split('.')
|
||||
if not mastername in self.creole_variables_dict or not \
|
||||
isinstance(self.creole_variables_dict [mastername],
|
||||
CreoleMaster):
|
||||
# Create the master variable
|
||||
if mastername in values:
|
||||
self.creole_variables_dict[mastername] = CreoleMaster(values[mastername])
|
||||
else:
|
||||
#only for CreoleLint
|
||||
self.creole_variables_dict[mastername] = CreoleMaster(value)
|
||||
#test only for CreoleLint
|
||||
if mastername != slavename:
|
||||
self.creole_variables_dict[mastername].add_slave(slavename, value)
|
||||
else:
|
||||
self.creole_variables_dict[varname] = value
|
||||
|
||||
def patch_template(self, filevar, force_no_active=False):
|
||||
"""Apply patch to a template
|
||||
"""
|
||||
var_dir = os.path.join(cfg.patch_dir,'variante')
|
||||
patch_cmd = ['patch', '-d', cfg.templatedir, '-N', '-p1']
|
||||
patch_no_debug = ['-s', '-r', '-', '--backup-if-mismatch']
|
||||
|
||||
tmpl_filename = os.path.split(filevar[u'source'])[1]
|
||||
# patches variante + locaux
|
||||
for directory in [var_dir, cfg.patch_dir]:
|
||||
patch_file = os.path.join(directory, '{0}.patch'.format(tmpl_filename))
|
||||
if os.access(patch_file, os.F_OK):
|
||||
msg = _(u"Patching template '{0}' with '{1}'")
|
||||
log.info(msg.format(filevar[u'source'], patch_file))
|
||||
ret, out, err = pyeole.process.system_out(patch_cmd + patch_no_debug + ['-i', patch_file])
|
||||
if ret != 0:
|
||||
msg = _(u"Error applying patch: '{0}'\nTo reproduce and fix this error {1}")
|
||||
log.error(msg.format(patch_file, ' '.join(patch_cmd + ['-i', patch_file])))
|
||||
#8307 : recopie le template original et n'arrête pas le processus
|
||||
self._copy_to_template_dir(filevar, force_no_active)
|
||||
#raise TemplateError(msg.format(patch_file, err))
|
||||
|
||||
def strip_template_comment(self, filevar):
|
||||
"""Strip comment from template
|
||||
|
||||
This apply if filevar has a del_comment attribut
|
||||
"""
|
||||
# suppression des commentaires si demandé (attribut del_comment)
|
||||
strip_cmd = ['sed', '-i']
|
||||
if u'del_comment' in filevar and filevar[u'del_comment'] != '':
|
||||
log.info(_(u"Cleaning file '{0}'").format( filevar[u'source'] ))
|
||||
ret, out, err = pyeole.process.system_out(strip_cmd
|
||||
+ ['/^\s*{0}/d ; /^$/d'.format(filevar[u'del_comment']),
|
||||
filevar[u'source'] ])
|
||||
if ret != 0:
|
||||
msg = _(u"Error removing comments '{0}': {1}")
|
||||
raise TemplateError(msg.format(filevar[u'del_comment'], err))
|
||||
|
||||
def _check_filevar(self, filevar, force_no_active=False):
|
||||
"""Verify that filevar is processable
|
||||
|
||||
:param filevar: template file informations
|
||||
:type filevar: `dict`
|
||||
|
||||
:raise CreoleClientError: if :data:`filevar` is disabled
|
||||
inexistant or unknown.
|
||||
|
||||
"""
|
||||
if not force_no_active and (u'activate' not in filevar or not filevar[u'activate']):
|
||||
|
||||
raise CreoleClientError(_(u"Template file not enabled:"
|
||||
u" {0}").format(basename(filevar[u'source'])))
|
||||
if u'source' not in filevar or filevar[u'source'] is None:
|
||||
raise CreoleClientError(_(u"Template file not set:"
|
||||
u" {0}").format(basename(filevar['source'])))
|
||||
|
||||
if u'name' not in filevar or filevar[u'name'] is None:
|
||||
raise CreoleClientError(_(u"Template target not set:"
|
||||
u" {0}").format(basename(filevar[u'source'])))
|
||||
|
||||
def _copy_to_template_dir(self, filevar, force_no_active=False):
|
||||
"""Copy template to processing temporary directory.
|
||||
|
||||
:param filevar: template file informations
|
||||
:type filevar: `dict`
|
||||
:param force_no_active: copy disabled template if `True`
|
||||
:type filevar: `bool`
|
||||
:raise FileNotFound: if source template does not exist
|
||||
|
||||
"""
|
||||
self._check_filevar(filevar, force_no_active)
|
||||
tmpl_source_name = os.path.split(filevar[u'source'])[1]
|
||||
tmpl_source_file = os.path.join(cfg.distrib_dir, tmpl_source_name)
|
||||
if not os.path.isfile(tmpl_source_file):
|
||||
msg = _(u"Template {0} unexistent").format(tmpl_source_file)
|
||||
raise FileNotFound(msg)
|
||||
else:
|
||||
log.info(_(u"Copy template: '{0}' -> '{1}'").format(tmpl_source_file, cfg.templatedir))
|
||||
shutil.copy(tmpl_source_file, cfg.templatedir)
|
||||
|
||||
def prepare_template(self, filevar, force_no_active=False):
|
||||
"""Prepare template source file
|
||||
"""
|
||||
self._copy_to_template_dir(filevar, force_no_active)
|
||||
self.patch_template(filevar, force_no_active)
|
||||
self.strip_template_comment(filevar)
|
||||
|
||||
def verify(self, filevar):
|
||||
"""
|
||||
verifie que les fichiers existent
|
||||
@param mkdir : création du répertoire si nécessaire
|
||||
"""
|
||||
if not os.path.isfile(filevar[u'source']):
|
||||
raise FileNotFound(_(u"File {0} does not exist.").format(filevar[u'source']))
|
||||
destfilename = filevar[u'full_name']
|
||||
dir_target = os.path.dirname(destfilename)
|
||||
if dir_target != '' and not os.path.isdir(dir_target):
|
||||
if not filevar[u'mkdir']:
|
||||
raise FileNotFound(_(u"Folder {0} does not exist but is required by {1}").format(dir_target, destfilename))
|
||||
os.makedirs(dir_target)
|
||||
# FIXME: pose plus de problème qu'autre chose (cf. #3048)
|
||||
#if not isfile(target):
|
||||
# system('cp %s %s' % (source, target))
|
||||
|
||||
def process(self, filevar, container):
|
||||
"""Process a cheetah template
|
||||
|
||||
Process a cheetah template and copy the file to destination.
|
||||
@param filevar: dictionary describing the file to process
|
||||
@type filevar: C{dict}
|
||||
@param container: dictionary describing the container
|
||||
@type container: C{dict}
|
||||
"""
|
||||
UTF = "#encoding: utf-8"
|
||||
|
||||
self._check_filevar(filevar)
|
||||
|
||||
# full path of the destination file
|
||||
destfilename = filevar[u'full_name']
|
||||
|
||||
log.info(_(u"Cheetah processing: '{0}' -> '{1}'").format(filevar[u'source'],
|
||||
destfilename))
|
||||
|
||||
# utilisation d'un fichier temporaire
|
||||
# afin de ne pas modifier l'original
|
||||
tmpfile = mktemp()
|
||||
shutil.copy(filevar[u'source'], tmpfile)
|
||||
|
||||
# ajout de l'en-tête pour le support de l'UTF-8
|
||||
# FIXME: autres encodages ?
|
||||
#os.system("sed -i '1i{0}' {1}".format(UTF, tmpfile)) (supprimé depuis #6224)
|
||||
|
||||
try:
|
||||
cheetah_template = CheetahTemplate(tmpfile, self.creole_variables_dict, container)
|
||||
os.unlink(tmpfile)
|
||||
# suppression de l'en-tête UTF-8 ajouté !!! (supprimé depuis #6224)
|
||||
data = str(cheetah_template) # .replace("{0}\n".format(UTF), '', 1)
|
||||
except CheetahNotFound, err:
|
||||
varname = err.args[0][13:-1]
|
||||
msg = _(u"Error: unknown variable used in template {0} : {1}").format(filevar[u'name'], varname)
|
||||
raise TemplateError, msg
|
||||
except UnicodeDecodeError, err:
|
||||
msg = _(u"Encoding issue detected in template {0}").format(filevar[u'name'])
|
||||
raise TemplateError, msg
|
||||
except Exception, err:
|
||||
msg = _(u"Error while instantiating template {0}: {1}").format(filevar[u'name'], err)
|
||||
raise TemplateError, msg
|
||||
|
||||
# écriture du fichier cible
|
||||
if destfilename == '':
|
||||
# CreoleCat may need to write on stdout (#10065)
|
||||
sys.stdout.write(data)
|
||||
else:
|
||||
try:
|
||||
file_h = file(destfilename, 'w')
|
||||
file_h.write(data)
|
||||
file_h.close()
|
||||
except IOError, e:
|
||||
msg = _(u"Unable to write in file '{0}': '{1}'").format(destfilename, e)
|
||||
raise FileNotFound, msg
|
||||
|
||||
def change_properties(self, filevar, container=None, force_full_name=False):
|
||||
chowncmd = [u'chown']
|
||||
chownarg = ''
|
||||
chmodcmd = [u'chmod']
|
||||
chmodarg = ''
|
||||
|
||||
if not force_full_name:
|
||||
destfilename = filevar[u'name']
|
||||
else:
|
||||
destfilename = filevar[u'full_name']
|
||||
|
||||
if u'owner' in filevar and filevar[u'owner']:
|
||||
chownarg = filevar[u'owner']
|
||||
else:
|
||||
chownarg = u'root'
|
||||
|
||||
if u'group' in filevar and filevar[u'group']:
|
||||
chownarg += ":" + filevar[u'group']
|
||||
else:
|
||||
chownarg += u':root'
|
||||
|
||||
if u'mode' in filevar and filevar[u'mode']:
|
||||
chmodarg = filevar[u'mode']
|
||||
else:
|
||||
chmodarg = u'0644'
|
||||
|
||||
chowncmd.extend( [chownarg, destfilename] )
|
||||
chmodcmd.extend( [chmodarg, destfilename] )
|
||||
|
||||
log.info(_(u'Changing properties: {0}').format(' '.join(chowncmd)) )
|
||||
ret, out, err = pyeole.process.creole_system_out( chowncmd, container=container, context=False )
|
||||
if ret != 0:
|
||||
log.error(_(u'Error changing properties {0}: {1}').format(ret, err) )
|
||||
|
||||
log.info(_(u'Changing properties: {0}').format(' '.join(chmodcmd)) )
|
||||
ret, out, err = pyeole.process.creole_system_out( chmodcmd, container=container, context=False )
|
||||
if ret != 0:
|
||||
log.error(_(u'Error changing properties {0}: {1}').format(ret, err) )
|
||||
|
||||
def remove_destfile(self, filevar):
|
||||
"""
|
||||
suppression du fichier de destination
|
||||
"""
|
||||
destfilename = filevar[u'full_name']
|
||||
if os.path.isfile(destfilename):
|
||||
os.unlink(destfilename)
|
||||
else:
|
||||
log.debug(_(u"File '{0}' unexistent.").format(destfilename))
|
||||
|
||||
|
||||
def _instance_file(self, filevar, container=None):
|
||||
"""Run templatisation on one file of one container
|
||||
|
||||
@param filevar: Dictionary describing the file
|
||||
@type filevar: C{dict}
|
||||
@param container: Dictionary describing a container
|
||||
@type container: C{dict}
|
||||
"""
|
||||
if not filevar.get(u'activate', False):
|
||||
try:
|
||||
# copy and patch disabled templates too (#11029)
|
||||
self.prepare_template(filevar, force_no_active=True)
|
||||
except FileNotFound:
|
||||
pass
|
||||
|
||||
if u'rm' in filevar and filevar[u'rm']:
|
||||
log.info(_(u"Removing file '{0}'"
|
||||
u" from container '{1}'").format(filevar[u'name'],
|
||||
container[u'name']))
|
||||
self.remove_destfile(filevar)
|
||||
|
||||
# The caller handles if it's an error
|
||||
raise TemplateDisabled(_(u"Instantiation of file '{0}' disabled").format(filevar[u'name']))
|
||||
|
||||
log.info(_(u"Instantiating file '{0}'"
|
||||
u" from '{1}'").format(filevar[u'name'], filevar[u'source']))
|
||||
self.prepare_template(filevar)
|
||||
self.verify(filevar)
|
||||
self.process(filevar, container)
|
||||
if filevar['name'].startswith('..') and container not in [None, 'root']:
|
||||
self.change_properties(filevar, None, True)
|
||||
else:
|
||||
self.change_properties(filevar, container)
|
||||
|
||||
|
||||
def instance_file(self, filename=None, container='root', ctx=None):
|
||||
"""Run templatisation on one file
|
||||
|
||||
@param filename: name of a file
|
||||
@type filename: C{str}
|
||||
@param container: name of a container
|
||||
@type container: C{str}
|
||||
"""
|
||||
if container == 'all':
|
||||
if ctx is None:
|
||||
groups = self.client.get_groups()
|
||||
else:
|
||||
groups = ctx.keys()
|
||||
for group in groups:
|
||||
if group in ['all', 'root']:
|
||||
continue
|
||||
if ctx is None:
|
||||
lctx = None
|
||||
else:
|
||||
lctx = ctx[group]
|
||||
self.instance_file(filename=filename, container=group, ctx=lctx)
|
||||
else:
|
||||
if ctx is None:
|
||||
ctx = self.client.get_container_infos(container)
|
||||
|
||||
filevars = [f for f in ctx[u'files'] if f[u'name'] == filename]
|
||||
for f in filevars:
|
||||
self._instance_file(f, ctx)
|
||||
|
||||
def instance_files(self, filenames=None, container=None, containers_ctx=None):
|
||||
"""Run templatisation on all files of all containers
|
||||
|
||||
@param filenames: names of files
|
||||
@type filename: C{list}
|
||||
@param container: name of a container
|
||||
@type container: C{str}
|
||||
"""
|
||||
if containers_ctx is None:
|
||||
containers_ctx = []
|
||||
if container is not None:
|
||||
containers_ctx = [self.client.get_container_infos(container)]
|
||||
else:
|
||||
for group_name in self.client.get_groups():
|
||||
containers_ctx.append(self.client.get_group_infos(group_name))
|
||||
if filenames is None:
|
||||
all_files = set(listdir(cfg.distrib_dir))
|
||||
prev_files = set(listdir(cfg.templatedir))
|
||||
all_declared_files = set()
|
||||
for ctx in containers_ctx:
|
||||
for fdict in ctx[u'files']:
|
||||
all_declared_files.add(basename(fdict['source']))
|
||||
undeclared_files = all_files - all_declared_files
|
||||
toremove_files = prev_files - all_files
|
||||
# delete old templates (#6600)
|
||||
for fname in toremove_files:
|
||||
rm_file = join(cfg.templatedir, fname)
|
||||
log.debug(_(u"Removing file '{0}'").format(rm_file))
|
||||
unlink(rm_file)
|
||||
# copy template not referenced in a dictionary (#6303)
|
||||
for fname in undeclared_files:
|
||||
fobj = {'source': join(cfg.templatedir, fname), 'name': ''}
|
||||
self.prepare_template(fobj, True)
|
||||
|
||||
for ctx in containers_ctx:
|
||||
for fdict in ctx[u'files']:
|
||||
if not filenames or fdict[u'name'] in filenames:
|
||||
try:
|
||||
self._instance_file(fdict, container=ctx)
|
||||
except TemplateDisabled, err:
|
||||
# Information on disabled template only useful
|
||||
# in debug
|
||||
log.debug(err, exc_info=True)
|
799
creole/upgrade.py
Normal file
799
creole/upgrade.py
Normal file
|
@ -0,0 +1,799 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
utilitaire d'importation de configuration config.eol 2.2 ou config.eol 2.3
|
||||
vers config.eol 2.4
|
||||
|
||||
usage :
|
||||
|
||||
%prog <config_file_name>
|
||||
|
||||
"""
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
from tiramisu.option import SymLinkOption, ChoiceOption
|
||||
from .eosfunc import is_empty
|
||||
from .var_loader import convert_value
|
||||
import re
|
||||
from itertools import product
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# ____ logger utility ____
|
||||
# log_filename = u'/var/log/creole.log'
|
||||
# try:
|
||||
# from pyeole.log import init_logging
|
||||
# except:
|
||||
# # compatibilité pour Zéphir 2.3
|
||||
# from pyeole.log import make_logger
|
||||
# log = make_logger(u'creole3.upgrade',
|
||||
# logfile=log_filename,
|
||||
# loglevel='INFO')
|
||||
# else:
|
||||
# log = init_logging(name=u'creoleUpgrade',
|
||||
# level='info',
|
||||
# console=False,
|
||||
# syslog=None,
|
||||
# filename=log_filename)
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
KEYS = ['val', 'valprec', 'valdefault']
|
||||
|
||||
|
||||
def migration_23_to_tiramisu(opt, val):
|
||||
if not opt.impl_is_multi():
|
||||
if (val == [] or val == ['']) and not isinstance(opt, ChoiceOption):
|
||||
val = None
|
||||
else:
|
||||
if val == []:
|
||||
val = None
|
||||
else:
|
||||
try:
|
||||
val = convert_value(opt, val[0])
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
val = val[0]
|
||||
else:
|
||||
if val == ['']:
|
||||
val = []
|
||||
else:
|
||||
new_val = []
|
||||
for v in val:
|
||||
if v == '':
|
||||
new_val.append(None)
|
||||
else:
|
||||
try:
|
||||
new_val.append(convert_value(opt, v))
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
new_val.append(v)
|
||||
val = new_val
|
||||
return val
|
||||
|
||||
class Dico(ConfigParser):
|
||||
|
||||
def get_val(self, var, default=''):
|
||||
"""
|
||||
Renvoie la valeur d'une variable
|
||||
"""
|
||||
if self.has_section(var):
|
||||
return self.get(var, 'val')
|
||||
return default
|
||||
|
||||
def copy(self, old, new, keep=True):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
if self.has_section(old):
|
||||
if not self.has_section(new):
|
||||
self.add_section(new)
|
||||
for key in KEYS:
|
||||
value = self.get(old, key)
|
||||
self.set(new, key, value)
|
||||
if keep:
|
||||
log.info(_(u"Variable {0} has been copied in {1}").format(old, new))
|
||||
else:
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old, new))
|
||||
|
||||
def move(self, old, new):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
self.copy(old, new, keep=False)
|
||||
|
||||
def remove(self, old):
|
||||
if self.has_section(old):
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been removed").format(old))
|
||||
|
||||
def simple2multi(self, src, new):
|
||||
"""
|
||||
n variables simples => 1 multi
|
||||
"""
|
||||
res = []
|
||||
for var in src:
|
||||
if self.has_section(var):
|
||||
try:
|
||||
value = eval(self.get(var, 'val'))[0]
|
||||
if value != '':
|
||||
res.append(value)
|
||||
except:
|
||||
log.error(_(u"Source variable {0} invalid").format(var))
|
||||
if res != []:
|
||||
self.fill_var(new, res)
|
||||
|
||||
def fill_var(self, var, val, valprec=[], valdefault=[]):
|
||||
"""
|
||||
Crée ou met à jour une variable
|
||||
"""
|
||||
if type(val) != list:
|
||||
val = [val]
|
||||
if not self.has_section(var):
|
||||
self.add_section(var)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(var, val))
|
||||
self.set(var, 'val', str(val))
|
||||
self.set(var, 'valprec', valprec)
|
||||
self.set(var, 'valdefault', valdefault)
|
||||
|
||||
def save(self, fichier):
|
||||
"""
|
||||
Enregistre le résultat
|
||||
"""
|
||||
fic = file(fichier, 'w')
|
||||
self.write(fic)
|
||||
fic.close()
|
||||
|
||||
def upgrade(config, configfile):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.4
|
||||
ou de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
:param version: config.eol version ('2.3' ou '2.4')
|
||||
"""
|
||||
log.info(_(u"config.eol upgrade started"))
|
||||
dico = Dico()
|
||||
dico.read(configfile)
|
||||
version = get_version(dico)
|
||||
if version == '2.2':
|
||||
upgrade22to23(dico)
|
||||
upgrade23to24(dico)
|
||||
# FIXME do stuff on 2.4 variables
|
||||
# chargement des valeurs par default depuis les dicos XML 2.4
|
||||
owner = u"upgrade"
|
||||
store_dico = export_to_store(dico, config)
|
||||
return store_dico, version
|
||||
|
||||
def export_to_store(dico, config):
|
||||
"""
|
||||
exporte depuis un dico vers un dico qui a été mis à jour par les
|
||||
valeurs par défaut creole 2.4::
|
||||
|
||||
{"libelle_etab": {"owner": "gen_config", "val": "monchapet"},
|
||||
{"owner": "gen_config", "val": ["0.0.0.0"]}
|
||||
|
||||
:param dico: configparser dict
|
||||
:returns: config parser dico
|
||||
"""
|
||||
default_owner = u'upgrade'
|
||||
store = {}
|
||||
# modification des settings pour accéder aux options disabled
|
||||
config.cfgimpl_get_settings().remove('disabled')
|
||||
old_format = False
|
||||
for section in dico.sections():
|
||||
val = eval(dico.get_val(section))
|
||||
try:
|
||||
path = config.find_first(byname=section, type_='path', check_properties=False)
|
||||
if not path.startswith('creole.') or path.startswith('creole.containers.'):
|
||||
continue
|
||||
|
||||
opt = config.unwrap_from_path(path)
|
||||
if isinstance(opt, SymLinkOption):
|
||||
continue
|
||||
val = migration_23_to_tiramisu(opt, val)
|
||||
except AttributeError:
|
||||
log.error(_(u"Unknown variable: {0}").format(section))
|
||||
old_format = True
|
||||
if val is None or val == []:
|
||||
continue
|
||||
store[section] = {"owner": default_owner, "val": val}
|
||||
if old_format:
|
||||
store[section]['old_format'] = True
|
||||
return store
|
||||
|
||||
def upgrade22to23(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.3
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.2', '2.3'))
|
||||
# famille General
|
||||
dico.move('proxy', 'activer_proxy_client')
|
||||
dico.move('proxy_server', 'proxy_client_adresse')
|
||||
dico.move('proxy_port', 'proxy_client_port')
|
||||
dico.simple2multi(['serveur_maj', 'serveur_maj2'], 'serveur_maj')
|
||||
# spécifique Amon
|
||||
domaine = dico.get_val('nom_domaine_academique')
|
||||
if domaine != '':
|
||||
if '.' in domaine:
|
||||
ac, dom = eval(domaine)[0].rsplit('.', 1)
|
||||
else:
|
||||
# gère le cas particulier de sphynx ou le suffixe n'était pas
|
||||
# dans le domaine académique (.fr par défaut)
|
||||
ac = eval(domaine)[0]
|
||||
dom = 'fr'
|
||||
dico.fill_var('nom_academie', ac)
|
||||
dico.fill_var('suffixe_domaine_academique', dom)
|
||||
# rien sur Zéphir 2.2
|
||||
if dico.has_section('ip_ssh_eth0'):
|
||||
# ip/netmask facultatifs sur Scribe-2.2
|
||||
if 'oui' in dico.get_val('ssh_eth0') and dico.get_val('ip_ssh_eth0') == '[]':
|
||||
dico.fill_var('ip_ssh_eth0', '0.0.0.0')
|
||||
dico.fill_var('netmask_ssh_eth0', '0.0.0.0')
|
||||
# pas de ssh_eth0 sur Horus-2.2
|
||||
if not dico.has_section('ssh_eth0'):
|
||||
# FIXME ip_ssh_eth0 semble faculatif
|
||||
dico.fill_var('ssh_eth0', 'oui')
|
||||
# familles Interface-X
|
||||
for num in range(0, 5):
|
||||
dico.copy('ssh_eth%s' % num, 'admin_eth%s' % num)
|
||||
dico.copy('ip_ssh_eth%s' % num, 'ip_admin_eth%s' % num)
|
||||
dico.copy('netmask_ssh_eth%s' % num, 'netmask_admin_eth%s' % num)
|
||||
dico.move('agregation', 'activer_agregation')
|
||||
|
||||
# famille Services
|
||||
dico.move('cups', 'activer_cups')
|
||||
dico.move('ftp_perso', 'activer_proftpd')
|
||||
dico.move('ead_web', 'activer_ead_web')
|
||||
dico.move('apache', 'activer_apache')
|
||||
dico.move('mysql', 'activer_mysql')
|
||||
dico.move('xinet_interbase', 'activer_interbase')
|
||||
if 'oui' in dico.get_val('sso'):
|
||||
dico.fill_var('activer_sso', 'local')
|
||||
else:
|
||||
dico.fill_var('activer_sso', 'non')
|
||||
|
||||
# migration DHCP
|
||||
dhcp = dico.get_val('dhcp', None)
|
||||
if dhcp is not None:
|
||||
dico.move('dhcp', 'activer_dhcp')
|
||||
if dico.get_val('adresse_network_dhcp', None) is None:
|
||||
#migration d'un Horus 2.2
|
||||
len_dhcp = len(eval(dico.get_val('ip_basse_dhcp', "[]")))
|
||||
#recuperation des variables a migrer
|
||||
adresse_network_dhcp = eval(dico.get_val("adresse_network_eth0"))
|
||||
dico.fill_var("adresse_network_dhcp", adresse_network_dhcp * len_dhcp)
|
||||
adresse_netmask_dhcp = eval(dico.get_val("adresse_netmask_eth0"))
|
||||
dico.fill_var("adresse_netmask_dhcp", adresse_netmask_dhcp * len_dhcp)
|
||||
adresse_ip_gw_dhcp = eval(dico.get_val("adresse_ip_gw", "[]"))
|
||||
if adresse_ip_gw_dhcp != []:
|
||||
dico.fill_var("adresse_ip_gw_dhcp", adresse_ip_gw_dhcp * len_dhcp)
|
||||
nom_domaine_dhcp = eval(dico.get_val("nom_domaine_local", "[]"))
|
||||
if nom_domaine_dhcp != []:
|
||||
dico.fill_var("nom_domaine_dhcp", nom_domaine_dhcp * len_dhcp)
|
||||
adresse_ip_dns_dhcp = eval(dico.get_val("adresse_ip_dns", "[]"))
|
||||
if adresse_ip_dns_dhcp != []:
|
||||
dico.fill_var("adresse_ip_dns_dhcp", [adresse_ip_dns_dhcp[0]] * len_dhcp)
|
||||
|
||||
# famille Messagerie
|
||||
dico.move('passerelle_smtp_aca', 'passerelle_smtp')
|
||||
dico.move('spamassassin', 'activer_spamassassin')
|
||||
if 'oui' in dico.get_val('courier_imap'):
|
||||
if 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop - imap')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'imap')
|
||||
elif 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'non')
|
||||
# Zéphir
|
||||
dico.move('serveur_smtp', 'passerelle_smtp')
|
||||
dico.move('compte_smtp', 'system_mail_from')
|
||||
if '465' in dico.get_val('port_smtp'):
|
||||
dico.fill_var('tls_smtp', 'port 465')
|
||||
|
||||
# famille Client_ldap
|
||||
dico.move('base_ldap', 'ldap_base_dn')
|
||||
serveur_ldap = dico.get_val('serveur_ldap', '[]')
|
||||
if serveur_ldap != '[]':
|
||||
dico.move('serveur_ldap', 'adresse_ip_ldap')
|
||||
if eval(serveur_ldap)[0] not in ['127.0.0.1', 'localhost']:
|
||||
dico.fill_var('activer_client_ldap', 'distant')
|
||||
|
||||
# famille Eole-sso
|
||||
dico.move('adresse_ip_sso', 'eolesso_adresse')
|
||||
dico.move('port_sso', 'eolesso_port')
|
||||
# eolesso_ldap (multi)
|
||||
dico.move('ldap_sso', 'eolesso_ldap')
|
||||
dico.move('port_ldap_sso', 'eolesso_port_ldap')
|
||||
dico.move('base_ldap_sso', 'eolesso_base_ldap')
|
||||
dico.move('sso_ldap_label', 'eolesso_ldap_label')
|
||||
dico.move('sso_ldap_reader', 'eolesso_ldap_reader')
|
||||
dico.move('sso_ldap_reader_passfile', 'eolesso_ldap_reader_passfile')
|
||||
# la "suite"
|
||||
dico.move('adresse_sso_parent', 'eolesso_adresse_parent')
|
||||
dico.move('port_sso_parent', 'eolesso_port_parent')
|
||||
dico.move('sso_pam_securid', 'eolesso_pam_securid')
|
||||
dico.move('sso_cert', 'eolesso_cert')
|
||||
dico.move('sso_ca_location', 'eolesso_ca_location')
|
||||
dico.move('sso_session_timeout', 'eolesso_session_timeout')
|
||||
dico.move('sso_css', 'eolesso_css')
|
||||
|
||||
# famille Applications web
|
||||
dico.move('phpmyadmin', 'activer_phpmyadmin')
|
||||
dico.move('posh', 'activer_envole')
|
||||
dico.move('web_domain', 'web_url')
|
||||
dico.move('web_default', 'web_redirection')
|
||||
posh_path = dico.get_val('posh_path', '[]')
|
||||
if posh_path != '[]' and eval(posh_path)[0] != '':
|
||||
dico.fill_var('alias_envole', '/' + eval(posh_path)[0])
|
||||
|
||||
# famille Bacula
|
||||
if 'oui' in "%s%s%s" % (dico.get_val('active_bacula_dir'),
|
||||
dico.get_val('active_bacula_fd'),
|
||||
dico.get_val('active_bacula_sd')):
|
||||
dico.fill_var('activer_bacula', 'oui')
|
||||
dico.move('active_bacula_dir', 'activer_bacula_dir')
|
||||
dico.move('active_bacula_sd', 'activer_bacula_sd')
|
||||
# bacula_fd n'est plus géré
|
||||
else:
|
||||
dico.fill_var('activer_bacula', 'non')
|
||||
|
||||
# famille Clamav
|
||||
dico.move('enable_clamd', 'dansguardian_clam')
|
||||
|
||||
# famille Certifs-ssl
|
||||
dico.move('ssl_serveur_name', 'ssl_server_name')
|
||||
|
||||
# famille Authentification
|
||||
dico.move('active_nufw', 'activer_nufw')
|
||||
dico.move('freeradius', 'activer_freeradius')
|
||||
|
||||
# famille Logs
|
||||
if 'Oui' in dico.get_val('activate_tls'):
|
||||
dico.fill_var('rsyslog_tls', 'oui')
|
||||
|
||||
# famille Reverse proxy
|
||||
revprox = dico.get_val('revprox_domainname', '[]')
|
||||
if revprox != '[]' and eval(revprox)[0] != '':
|
||||
dico.fill_var('revprox_activate_http', 'oui')
|
||||
|
||||
# famille réseau avancé
|
||||
route = dico.get_val('route_adresse', '[]')
|
||||
if route != '[]' and eval(route)[0] != '':
|
||||
dico.fill_var('activer_route', 'oui')
|
||||
|
||||
# famille Vpn-pki
|
||||
dico.simple2multi(['url_crl1', 'url_crl2'], 'url_crl')
|
||||
|
||||
|
||||
def upgrade23to24(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.3', '2.4'))
|
||||
cache_dir = dico.get_val('cache_dir', '[]')
|
||||
if cache_dir != '[]' and eval(cache_dir)[0] == '/var/spool/squid':
|
||||
dico.fill_var('cache_dir', '')
|
||||
|
||||
system_mail_to = dico.get_val('system_mail_to', '[]')
|
||||
if system_mail_to != '[]' and eval(system_mail_to)[0] == 'postmaster':
|
||||
dico.fill_var('system_mail_to', '')
|
||||
|
||||
varname = 'alias_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
for i in range(0, 5):
|
||||
dico.move('adresse_ip_vlan_eth{0}'.format(i), 'vlan_ip_eth{0}'.format(i))
|
||||
dico.move('adresse_netmask_vlan_eth{0}'.format(i), 'vlan_netmask_eth{0}'.format(i))
|
||||
dico.move('adresse_network_vlan_eth{0}'.format(i), 'vlan_network_eth{0}'.format(i))
|
||||
dico.move('adresse_broadcast_vlan_eth{0}'.format(i), 'vlan_broadcast_eth{0}'.format(i))
|
||||
dico.move('adresse_gw_vlan_eth{0}'.format(i), 'vlan_gw_eth{0}'.format(i))
|
||||
dico.move('id_vlan_eth{0}'.format(i), 'vlan_id_eth{0}'.format(i))
|
||||
|
||||
varname = 'vlan_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
dico.move('proxy_eth0_adresse', 'proxy_eth0_ip')
|
||||
dico.move('proxy_eth0_network', 'proxy_eth0_network')
|
||||
dico.move('nom_interface1', 'nom_zone_eth1')
|
||||
dico.move('era_proxy_bypass', 'era_proxy_bypass_eth1')
|
||||
dico.move('smb_adresse_ip_wins', 'smb_wins_server')
|
||||
|
||||
dico.remove('http_port')
|
||||
dico.remove('http_port_2')
|
||||
dico.remove('test_nutmaster')
|
||||
dico.remove('test_activer_routage_ipv6')
|
||||
dico.remove('test_activer_kerberos')
|
||||
dico.remove('test_activer_clam_proxy')
|
||||
dico.remove('test_activer_proxy_eth0')
|
||||
dico.remove('revprox_poshadmin')
|
||||
dico.remove('ip_client_logs_udp')
|
||||
dico.remove('adresse_ip_conteneur_dns')
|
||||
|
||||
dico.simple2multi(['test_distant_domaine1', 'test_distant_domaine2'], 'test_distant_domaine')
|
||||
dico.remove('test_distant_domaine1')
|
||||
dico.remove('test_distant_domaine2')
|
||||
dico.simple2multi(['ssl_subjectaltname_ip', 'ssl_subjectaltname_ns'], 'ssl_subjectaltname')
|
||||
dico.remove('ssl_subjectaltname_ip')
|
||||
dico.remove('ssl_subjectaltname_ns')
|
||||
|
||||
old_serveur_maj = eval(dico.get_val('serveur_maj', '[]'))
|
||||
if old_serveur_maj != []:
|
||||
serveur_maj = []
|
||||
for maj in old_serveur_maj:
|
||||
if maj == 'eoleng.ac-dijon.fr':
|
||||
maj = 'eole.ac-dijon.fr'
|
||||
if maj == 'test-eoleng.ac-dijon.fr':
|
||||
maj = 'test-eole.ac-dijon.fr'
|
||||
serveur_maj.append(maj)
|
||||
dico.fill_var('serveur_maj', serveur_maj)
|
||||
|
||||
ssl_country_name = eval(dico.get_val('ssl_country_name', '[""]'))[0].upper()
|
||||
dico.fill_var('ssl_country_name', ssl_country_name)
|
||||
|
||||
tmp_short_name = []
|
||||
tmp_long_name = []
|
||||
tmp_ip = []
|
||||
nom_domaine_local = eval(dico.get_val('nom_domaine_local', "['']"))[0]
|
||||
def _append_tmp_name(ip, long_name, short_name="NONE"):
|
||||
splitted_labels = long_name.split('.')
|
||||
if short_name == "NONE":
|
||||
short_name = splitted_labels[0]
|
||||
# ajout si non déjà défini dans Réseau avancé
|
||||
if long_name not in tmp_long_name:
|
||||
if short_name not in tmp_short_name:
|
||||
#le nom court n'existe pas dans la liste, donc l'ajoute
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
if '.'.join(splitted_labels[1:]) == nom_domaine_local:
|
||||
# le nom court est déjà présent
|
||||
# privilégie le nom court pour le nom de domaine local
|
||||
tmp_short_name[tmp_short_name.index(short_name)] = None
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
# ne pas doublonner le nom court
|
||||
tmp_short_name.append(None)
|
||||
if len(splitted_labels) > 1:
|
||||
tmp_long_name.append(long_name)
|
||||
else:
|
||||
# si nom court, transforme en nom long
|
||||
tmp_long_name.append(long_name + '.' + nom_domaine_local)
|
||||
tmp_ip.append(ip)
|
||||
|
||||
if eval(dico.get_val('activer_ajout_hosts', '["non"]'))[0] == 'oui':
|
||||
# récupération et passage en minuscules des
|
||||
# nom_court_hosts et nom_long_hosts existants #11473
|
||||
ips = eval(dico.get_val('adresse_ip_hosts', '[]').lower())
|
||||
long_names = eval(dico.get_val('nom_long_hosts', '[]').lower())
|
||||
for idx, short_name in enumerate(eval(dico.get_val('nom_court_hosts', '[]').lower())):
|
||||
_append_tmp_name(ips[idx], long_names[idx], short_name)
|
||||
|
||||
# Migration des variables hosts #2795
|
||||
# noms d'hôtes forcés en minuscules #9790
|
||||
nom_host_dns = eval(dico.get_val('nom_host_dns', '[]').lower())
|
||||
if not is_empty(nom_host_dns):
|
||||
ips = eval(dico.get_val('ip_host_dns'))
|
||||
# transforme les nom_host_dns en nom_court_hosts et nom_long_hosts
|
||||
# donc force activer_ajout_hosts à oui
|
||||
dico.fill_var('activer_ajout_hosts', 'oui')
|
||||
for idx, long_name in enumerate(nom_host_dns):
|
||||
_append_tmp_name(ips[idx], long_name)
|
||||
|
||||
if not is_empty(tmp_short_name):
|
||||
dico.fill_var('adresse_ip_hosts', tmp_ip)
|
||||
dico.fill_var('nom_court_hosts', tmp_short_name)
|
||||
dico.fill_var('nom_long_hosts', tmp_long_name)
|
||||
dico.remove('nom_host_dns')
|
||||
dico.remove('ip_host_dns')
|
||||
|
||||
# Ajout du point devant chaque zone #7008
|
||||
old_nom_zone_dns_cache = eval(dico.get_val('nom_zone_dns_cache', '[]'))
|
||||
if not is_empty(old_nom_zone_dns_cache):
|
||||
nom_zone_dns_cache = []
|
||||
for old in old_nom_zone_dns_cache:
|
||||
nom_zone_dns_cache.append('.' + old)
|
||||
dico.fill_var('nom_zone_dns_cache', nom_zone_dns_cache)
|
||||
|
||||
# Modification du chemin de la corbeille Samba #7463
|
||||
smb_trash_dir = eval(dico.get_val('smb_trash_dir', '["/"]'))[0]
|
||||
if not smb_trash_dir.startswith('/'):
|
||||
dico.fill_var('smb_trash_dir', 'perso/{0}'.format(smb_trash_dir))
|
||||
|
||||
# antivirus temps réel => remis à default #19833
|
||||
if dico.get_val('smb_vscan', "['non']") == "['oui']":
|
||||
dico.remove('smb_vscan')
|
||||
|
||||
# Famille Proxy parent #7823
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere', '[]'))):
|
||||
dico.fill_var('activer_cache_pere', 'oui')
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere_zone', '[]'))):
|
||||
dico.fill_var('activer_cache_pere_zone', 'oui')
|
||||
if not is_empty(eval(dico.get_val('proxy_sibling_ip', '[]'))):
|
||||
dico.fill_var('activer_proxy_sibling', 'oui')
|
||||
|
||||
# Autorisation proxy eth0 #8167
|
||||
if not is_empty(eval(dico.get_val('proxy_eth0_ip', '[]'))):
|
||||
dico.fill_var('activer_supp_proxy_eth0', 'oui')
|
||||
|
||||
# Famille Rvp #8164
|
||||
if not is_empty(eval(dico.get_val('adresse_network_zone_rvp', '[]'))):
|
||||
dico.fill_var('acces_proxy_zone_rvp', 'oui')
|
||||
|
||||
# half_closed_clients => remise à default #19813
|
||||
if dico.get_val('half_closed_clients', "['off']") == "['on']":
|
||||
dico.remove('half_closed_clients')
|
||||
|
||||
##
|
||||
## Modification de la configuration exim
|
||||
##
|
||||
# passerelle SMTP
|
||||
log.info(_(u"Migrating SMTP parameters"))
|
||||
passerelle_smtp = dico.get_val('passerelle_smtp', '[]')
|
||||
dico.move('passerelle_smtp', 'exim_relay_smtp')
|
||||
if is_empty(passerelle_smtp):
|
||||
# No SMTP gateway
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
|
||||
# Type de serveur SMTP
|
||||
exim_mail_type = eval(dico.get_val('exim_mail_type', '["satellite"]'))[0]
|
||||
log.info("Migration de exim_mail_type: '{0}'".format(exim_mail_type))
|
||||
dico.remove('exim_mail_type')
|
||||
if exim_mail_type == 'satellite':
|
||||
# Nothing to do
|
||||
pass
|
||||
elif exim_mail_type == 'local':
|
||||
# Local is smarthost without relay, should not happen
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'smarthost':
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'mailhub':
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
elif exim_mail_type == 'internet':
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
else:
|
||||
log.warn(_(u'Mail configuration not recognised, not processed'))
|
||||
|
||||
# Réécriture
|
||||
mail_rewrite_domain = eval(dico.get_val('mail_rewrite_domain', '["non"]'))[0]
|
||||
dico.remove('mail_rewrite_domain')
|
||||
if mail_rewrite_domain == 'oui':
|
||||
dico.fill_var('exim_qualify_domain', 'nom de domaine local')
|
||||
|
||||
# Modèle Era utilisé (#9082)
|
||||
mapping = {'2zones-amonecole-nginx' : u'2zones-amonecole',
|
||||
'3zones-scribe-nginx' : u'3zones-dmz',
|
||||
'3zones-scribe' : u'3zones-dmz',
|
||||
'4zones-scribe-nginx' : u'4zones',
|
||||
'4zones-scribe-nufw' : u'4zones',
|
||||
'4zones-scribe' : u'4zones',
|
||||
'5zones-scribe-nginx' : u'5zones',
|
||||
'5zones-scribe' : u'5zones',
|
||||
}
|
||||
model = eval(dico.get_val('type_amon', '[""]'))[0]
|
||||
if model in mapping:
|
||||
dico.fill_var('type_amon', mapping[model])
|
||||
|
||||
# Migration des modules ecdl
|
||||
if dico.get_val('ecdl_regles_filtrage_supplementaires', 'Pas un eCDL') != 'Pas un eCDL':
|
||||
dico.move('ecdl_ldap_machine_suffix', 'ldap_machine_suffix')
|
||||
dico.move('ecdl_ldap_group_suffix', 'ldap_group_suffix')
|
||||
dico.move('ecdl_smb_share_model', 'smb_share_model')
|
||||
dico.move('ecdl_smb_vscan', 'smb_vscan')
|
||||
dico.move('ecdl_smb_ports', 'smb_ports')
|
||||
dico.move('ecdl_smb_server_string', 'smb_server_string')
|
||||
dico.move('ecdl_smb_trash', 'smb_trash')
|
||||
dico.move('ecdl_smb_trash_dir', 'smb_trash_dir')
|
||||
dico.move('ecdl_smb_trash_purge', 'smb_trash_purge')
|
||||
dico.move('ecdl_smb_quotawarn' , 'smb_quotawarn')
|
||||
dico.move('ecdl_smb_guest', 'smb_guest')
|
||||
dico.move('ecdl_smb_wins_support', 'smb_wins_support')
|
||||
dico.move('ecdl_smb_adresse_ip_wins', 'smb_wins_server')
|
||||
dico.move('ecdl_smb_dns_proxy', 'smb_dns_proxy')
|
||||
dico.move('ecdl_smb_oplocks', 'smb_oplocks')
|
||||
dico.move('ecdl_smb_dos_attributes', 'smb_dos_attributes')
|
||||
dico.move('ecdl_smb_unixextensions', 'smb_unixextensions')
|
||||
dico.move('ecdl_smb_partage_nom', 'smb_partage_nom')
|
||||
dico.move('ecdl_smb_partage_path', 'smb_partage_path')
|
||||
dico.move('ecdl_smb_partage_visibilite', 'smb_partage_visibilite')
|
||||
dico.move('ecdl_smb_partage_ecriture', 'smb_partage_ecriture')
|
||||
dico.move('ecdl_regles_filtrage_supplementaires', 'activer_regles_filtrage_port_source')
|
||||
dico.move('ecdl_smb_os_level', 'smb_os_level')
|
||||
dico.move('ecdl_smb_domain_master', 'smb_domain_master')
|
||||
dico.move('ecdl_ca_cert', 'ldap_ca_cert')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_passwd', 'ldap_nss_base_passwd_filter')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_group', 'ldap_nss_base_group_filter')
|
||||
dico.move('ecdl_ldap_timeout', 'ldap_timeout')
|
||||
dico.move('ecdl_smb_netbios_name', 'smb_netbios_name')
|
||||
dico.move('ecdl_smb_workgroup', 'smb_workgroup')
|
||||
dico.move('ecdl_smb_usershare_max_shares', 'smb_usershare_max_shares')
|
||||
dico.move('ecdl_smb_activer_partages', 'smb_activer_partages')
|
||||
dico.remove('ecdl_smb_log_level')
|
||||
# fin de migration des modules ecdl
|
||||
|
||||
# migration des modules esbl
|
||||
if dico.get_val('activer_lister_repertoires_apache', 'Pas un eSBL') != 'Pas un eSBL':
|
||||
dico.fill_var('smb_log_level', 0)
|
||||
smb_activer_ordre_resolution_nom = dico.get_val('smb_activer_ordre_resolution_nom', 'non')
|
||||
if smb_activer_ordre_resolution_nom == 'oui':
|
||||
smb_name_resolve_order = " ".join(eval(dico.get_val('smb_procede_recherche_nom')))
|
||||
dico.fill_var('smb_name_resolve_order', smb_name_resolve_order)
|
||||
smb_ad_nom_long_controleur = dico.get_val('smb_ad_nom_long_controleur', "['']")
|
||||
if smb_ad_nom_long_controleur != "['']":
|
||||
dico.fill_var('smb_ad_server', smb_ad_nom_long_controleur)
|
||||
smb_ad_realm = dico.get_val('smb_ad_realm', "['']")
|
||||
if smb_ad_realm != "['']":
|
||||
dico.fill_var('smb_realm', smb_ad_realm)
|
||||
dico.move('activer_lister_repertoires_apache', 'apache_lister_repertoires')
|
||||
|
||||
# répartition des variables pour les répertoires ftp
|
||||
ftps = {}
|
||||
for ftp_rep, ftp_anon in zip(eval(dico.get_val('acces_ftp', '[]')),
|
||||
eval(dico.get_val('acces_ftp_anonymous', '[]'))):
|
||||
ftps[ftp_anon] = ftps.get(ftp_anon, []) + [ftp_rep]
|
||||
# si len(ftps['oui']) > 1, pas de reprise automatique
|
||||
# sinon ftps['oui'] -> ftp_anonymous_directory
|
||||
# ftps['non'] -> ftp_access_directory
|
||||
|
||||
if 'oui' in ftps and len(ftps['oui']) == 1:
|
||||
dico.fill_var('ftp_anonymous_directory', ftps['oui'][0])
|
||||
dico.fill_var('activer_ftp_anonymous_access', 'oui')
|
||||
if 'non' in ftps:
|
||||
dico.fill_var('ftp_access_directory', ftps['non'])
|
||||
dico.fill_var('activer_ftp_access', 'oui')
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
|
||||
dico.move('activer_pare_feu', 'activer_firewall')
|
||||
# fin de migration des modules esbl
|
||||
|
||||
# migration des modules essl
|
||||
if dico.get_val('ecdl_serveurs_ip', "Pas un eSSL") != "Pas un eSSL":
|
||||
# variables ftp_max*
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
# variables renommées
|
||||
dico.move('sites_distants_morea_ip', 'sites_distants_ip')
|
||||
dico.move('sites_distants_morea_netmask', 'sites_distants_netmask')
|
||||
dico.move('nagios_morea_ip', 'nagios_dist_ip')
|
||||
dico.move('nagios_morea_netmask', 'nagios_dist_netmask')
|
||||
dico.move('morea_routeur_ip', 'wan_routeur_ip')
|
||||
dico.move('morea_interface', 'wan_interface')
|
||||
dico.move('surf_lan_ip', 'sites_dist_ip')
|
||||
dico.move('surf_lan_netmask', 'sites_dist_netmask')
|
||||
dico.move('morea_route_adresse', 'wan_route_adresse')
|
||||
dico.move('morea_route_netmask', 'wan_route_netmask')
|
||||
# conversions de valeurs
|
||||
variante_type_mapping = {'standard': 'production',
|
||||
'Applis Web': 'Applis_Web',
|
||||
'eSSL Morea': 'eSSL',
|
||||
'eSSL Internet': 'eSSL_Internet',
|
||||
'eSSL SPC': 'eSSL_SPC',
|
||||
'ppp': 'PPP',
|
||||
'': 'production'}
|
||||
variante_type = eval(dico.get_val('variante_type', "['']"))[0]
|
||||
dico.fill_var('variante_type', variante_type_mapping[variante_type])
|
||||
|
||||
# migration des variables dhcp
|
||||
exxl_dhcp = dico.has_section('dhcp_lease_max')
|
||||
if dico.get_val('activer_dhcp', "['non']") == "['oui']" and exxl_dhcp:
|
||||
# récupération des valeurs de la multi
|
||||
ip_basse = eval(dico.get_val('ip_basse_dhcp', '[""]'))
|
||||
ip_haute = eval(dico.get_val('ip_haute_dhcp', '[""]'))
|
||||
restriction = eval(dico.get_val('activer_dhcp_hotes_autorises', "['']"))
|
||||
lease_default = eval(dico.get_val('dhcp_lease_default', "['']"))
|
||||
lease_max = eval(dico.get_val('dhcp_lease_max', "['']"))
|
||||
# récupération des valeurs communes simples
|
||||
network = [eval(dico.get_val('adresse_network_dhcp', "['']"))[0]]
|
||||
netmask = [eval(dico.get_val('adresse_netmask_dhcp', "['']"))[0]]
|
||||
nom_domaine_dhcp = [eval(dico.get_val('nom_domaine_dhcp', "['']"))[0]]
|
||||
gateway_dhcp = [eval(dico.get_val('adresse_ip_gw_dhcp', "['']"))[0]]
|
||||
# récupération des valeurs communes multiples
|
||||
dns_dhcp = eval(dico.get_val('adresse_ip_dns_dhcp', "['']"))
|
||||
wins = eval(dico.get_val('adresse_ip_wins_dhcp', "['']"))
|
||||
wins_primaire = wins[0]
|
||||
if len(wins) > 1:
|
||||
wins_secondaire = wins[1]
|
||||
else:
|
||||
wins_secondaire = wins_primaire
|
||||
ntp_dhcp = eval(dico.get_val('adresse_ip_ntp_dhcp', "['']"))
|
||||
# création des nouvelles listes, produit cartésien
|
||||
ranges, dns_dhcp, ntp_dhcp = zip(*list(product(zip(ip_basse, ip_haute, restriction, lease_default, lease_max), dns_dhcp, ntp_dhcp)))
|
||||
dns_dhcp = list(dns_dhcp)
|
||||
ntp_dhcp = list(ntp_dhcp)
|
||||
ip_basse, ip_haute, restriction, lease_default, lease_max = [list(l) for l in zip(*ranges)]
|
||||
nb_ranges = len(ip_basse)
|
||||
nom_domaine_dhcp = nom_domaine_dhcp*nb_ranges
|
||||
gateway_dhcp = gateway_dhcp*nb_ranges
|
||||
wins_primaire = [wins_primaire]*nb_ranges
|
||||
wins_secondaire = [wins_secondaire]*nb_ranges
|
||||
network = network*nb_ranges
|
||||
netmask = netmask*nb_ranges
|
||||
# chargement des valeurs dans le dictionnaire
|
||||
dico.fill_var('adresse_network_dhcp', network)
|
||||
dico.fill_var('adresse_netmask_dhcp',netmask)
|
||||
dico.fill_var('ip_basse_dhcp', ip_basse)
|
||||
dico.fill_var('ip_haute_dhcp', ip_haute)
|
||||
dico.fill_var('nom_domaine_dhcp', nom_domaine_dhcp)
|
||||
dico.fill_var('adresse_ip_gw_dhcp', gateway_dhcp)
|
||||
dico.fill_var('adresse_ip_dns_dhcp', dns_dhcp)
|
||||
dico.fill_var('adresse_ip_wins_primaire_dhcp', wins_primaire)
|
||||
dico.fill_var('adresse_ip_wins_secondaire_dhcp', wins_secondaire)
|
||||
dico.fill_var('adresse_ip_ntp_dhcp', ntp_dhcp)
|
||||
dico.fill_var('interdire_hotes_inconnus', restriction)
|
||||
dico.fill_var('dhcp_lease_default', lease_default)
|
||||
dico.fill_var('dhcp_lease_max', lease_max)
|
||||
|
||||
#envole
|
||||
if dico.get_val('activer_envole', "['non']") == "['oui']" and dico.get_val('force_envole', "['non']") == "['oui']":
|
||||
alias_envole = eval(dico.get_val('alias_envole'))[0]
|
||||
if alias_envole != '/':
|
||||
dico.fill_var('web_redirection', alias_envole)
|
||||
dico.remove('alias_envole')
|
||||
|
||||
def get_version(dico):
|
||||
"""
|
||||
recupère la version en fonction de la présence ou non
|
||||
de la variable 'serveur_maj2'
|
||||
|
||||
:param dico: ConfigParser object
|
||||
:return version: '2.2' ou '2.3'
|
||||
"""
|
||||
# ________ version du config.eol ________
|
||||
|
||||
if dico.has_section('serveur_maj2') and not \
|
||||
dico.has_section('activer_bash_completion'):
|
||||
version = '2.2'
|
||||
else:
|
||||
version = '2.3'
|
||||
return version
|
||||
|
||||
|
||||
def main(config_file):
|
||||
"""main entry point"""
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print __doc__
|
||||
sys.exit(1)
|
||||
main(sys.argv[1])
|
735
creole/upgrade24.py
Normal file
735
creole/upgrade24.py
Normal file
|
@ -0,0 +1,735 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
Utilitaire de mise à jour des variables
|
||||
pour les versions >= 2.4.1
|
||||
|
||||
"""
|
||||
from .upgrade import log, migration_23_to_tiramisu
|
||||
from .var_loader import convert_value
|
||||
from pyeole.i18n import i18n
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu.setting import undefined
|
||||
from distutils.version import StrictVersion
|
||||
from pyeole.encode import normalize
|
||||
_ = i18n('creole')
|
||||
|
||||
class Upgrade():
|
||||
"""
|
||||
Méthodes pour la mise à niveau des variables
|
||||
"""
|
||||
def __init__(self, config):
|
||||
owner = u'upgrade'
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
self.config = config
|
||||
self.owner = getattr(owners, owner)
|
||||
self.unknown_options = config.impl_get_information(u'unknown_options')
|
||||
|
||||
def get_old_value(self, variable, old_variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "disparue"
|
||||
"""
|
||||
try:
|
||||
old_obj = self.unknown_options[old_variable]
|
||||
if old_obj.get('old_format', False):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
opt = self.config.unwrap_from_path(path)
|
||||
val = migration_23_to_tiramisu(opt, old_obj['val'])
|
||||
else:
|
||||
val = old_obj['val']
|
||||
return val
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def get_value(self, variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "connue"
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
return self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
|
||||
def get_unvalid_value(self, variable, default=None):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
try:
|
||||
return self.config.impl_get_information('orig_value_{}'.format(path))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_noncalculated_value_for_auto(self, variable):
|
||||
"""
|
||||
Retourne la valeur contenue dans le fichier config.eol dans le cas où la variable
|
||||
est calculée (auto), forcé à la valeur par défaut, ...
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('get_noncalculated_value_for_auto: unknown variable {}').format(variable))
|
||||
return None
|
||||
values = self.config.cfgimpl_get_values()
|
||||
if values._contains(path):
|
||||
idx = 0
|
||||
vals = []
|
||||
while True:
|
||||
val = values._p_.getvalue(path, values._p_.getsession(), idx)
|
||||
if val is undefined:
|
||||
break
|
||||
vals.append(val)
|
||||
idx += 1
|
||||
if len(vals) > 0:
|
||||
return vals
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def var_exists(self, variable):
|
||||
try:
|
||||
self.get_path(variable)
|
||||
return True
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def get_path(self, variable):
|
||||
"""
|
||||
Retourne le chemin complet d'une variable
|
||||
"""
|
||||
return self.config.find_first(byname=variable, type_='path')
|
||||
|
||||
def modify_owner(self, path, value=None, index=None):
|
||||
"""
|
||||
Modifie le propriétaire d'une variable
|
||||
"""
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if index is not None:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=index)
|
||||
elif value is not None:
|
||||
for idx in xrange(len(value)):
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=idx)
|
||||
else:
|
||||
raise Exception('must have value or index for slave')
|
||||
|
||||
else:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner)
|
||||
|
||||
def is_default(self, variable, default=True):
|
||||
"""
|
||||
Retourne True si la valeur n'a pas été personnalisée par l'utilisateur
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
option = self.config.unwrap_from_path(path)
|
||||
return self.config.cfgimpl_get_values().is_default_owner(option)
|
||||
|
||||
def set_value(self, variable, value):
|
||||
"""
|
||||
Modifie la valeur d'une variable
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_(u"Try to set value to unknown option: {0} = {1}").format(variable, value))
|
||||
else:
|
||||
try:
|
||||
self.config._setattr(path, value,
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
self.config.impl_del_information('error_msg_{}'.format(path), raises=False)
|
||||
self.config.impl_del_information('orig_value_{}'.format(path), raises=False)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_settings()[option].remove('load_error')
|
||||
except ValueError:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
try:
|
||||
# the value could be in Creole 2.3 format #13957
|
||||
if not option.impl_is_multi() and isinstance(value, list) and len(value) == 1:
|
||||
value = value[0]
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, variable))
|
||||
return
|
||||
self.config._setattr(path, convert_value(option, value),
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
except Exception, err:
|
||||
log.error(_(u"{0} for {1}").format(err, variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
except Exception, err:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
log.error(_("{0} for {1}").format(normalize(str(err)), variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
|
||||
def del_value(self, variable):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to delete an unknown option: {0}').format(variable))
|
||||
else:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_values().__delitem__(option)
|
||||
log.info(_(u"Variable {0} reinitialized").format(variable))
|
||||
|
||||
def append_value(self, variable, value):
|
||||
"""
|
||||
Ajoute une valeur à une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to append a value to an unknown option: {0} += {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi.append(value)
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def modify_last_value(self, variable, value):
|
||||
"""
|
||||
Modifie la dernière valeur d'une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to modify last value of an unknown option: {0}[-1] = {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi[-1] = value
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def move(self, old_variable, new_variable):
|
||||
"""
|
||||
Déplace les données d'une variable "disparue"
|
||||
vers une nouvelle variable
|
||||
"""
|
||||
if old_variable in self.unknown_options:
|
||||
value = self.unknown_options[old_variable][u'val']
|
||||
path = self.get_path(new_variable)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, old_variable))
|
||||
return
|
||||
if option.impl_is_multi() and isinstance(value, list):
|
||||
for val in value:
|
||||
self.append_value(new_variable, val)
|
||||
else:
|
||||
self.set_value(new_variable, value)
|
||||
del(self.unknown_options[old_variable])
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old_variable, new_variable))
|
||||
|
||||
def copy(self, old_variable, new_variable, only_if_modified=True):
|
||||
"""
|
||||
Copie la valeur d'une variable existante vers une autre
|
||||
Si la valeur "old" est une multi et pas la "new" => copie la 1er valeur de la liste
|
||||
Si la valeur "old" n'est pas une multi et la "new" ne l'est pas => transforme la valeur en liste
|
||||
only_if_modified: si True ne copie que les valeurs qui sont modifiées
|
||||
"""
|
||||
try:
|
||||
# si les deux variables existe => migration
|
||||
old_path = self.get_path(old_variable)
|
||||
new_path = self.get_path(new_variable)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
old_option = self.config.unwrap_from_path(old_path)
|
||||
new_option = self.config.unwrap_from_path(new_path)
|
||||
# si la nouvelle option n'est pas modifié et si la valeur est modifié ou only_if_modified est False
|
||||
if self.config.cfgimpl_get_values().is_default_owner(new_option) and \
|
||||
(not only_if_modified or
|
||||
not self.config.cfgimpl_get_values().is_default_owner(old_option)):
|
||||
old_value = self.config.getattr(old_path,
|
||||
force_permissive=True)
|
||||
if old_option.impl_is_multi() and not new_option.impl_is_multi():
|
||||
if len(old_value) != 0:
|
||||
old_value = old_value[0]
|
||||
else:
|
||||
old_value = None
|
||||
if not old_option.impl_is_multi() and new_option.impl_is_multi():
|
||||
if old_value is None:
|
||||
old_value = []
|
||||
else:
|
||||
old_value = [old_value]
|
||||
self.set_value(new_variable, old_value)
|
||||
|
||||
|
||||
|
||||
class Upgrade_2_4_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.0 vers 2.4.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.0', '2.4.1'))
|
||||
|
||||
# renommage des variables "era_proxy_bypass"
|
||||
for i in range(1, 5):
|
||||
self.move('era_proxy_bypass_eth{0}'.format(i), 'proxy_bypass_network_eth{0}'.format(i))
|
||||
|
||||
# fusion des variables "proxy_bypass" et "wpad_exclude"
|
||||
if 'adresse_ip_wpad_exclude' in self.unknown_options:
|
||||
#le 1er argument sert a récupérer les propriétés des option (choiceoption, multi, ...)
|
||||
#on lui passe la variable de la 1er interface
|
||||
old_interfaces = self.get_old_value('proxy_bypass_network_eth1', 'interface_wpad_exclude')
|
||||
netmasks = self.get_old_value('proxy_bypass_netmask_eth1', 'adresse_netmask_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_network_eth1', 'adresse_ip_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_network_eth{0}'.format(i), value)
|
||||
self.modify_last_value('proxy_bypass_netmask_eth{0}'.format(i), netmasks[idx])
|
||||
del(self.unknown_options['adresse_ip_wpad_exclude'])
|
||||
del(self.unknown_options['adresse_netmask_wpad_exclude'])
|
||||
del(self.unknown_options['interface_wpad_exclude'])
|
||||
|
||||
# passage à oui des variables "proxy_bypass_ethX" si nécessaire
|
||||
for i in range(1, 5):
|
||||
if len(self.get_value('proxy_bypass_network_eth{0}'.format(i), [])) > 0:
|
||||
self.set_value('proxy_bypass_eth{0}'.format(i), u'oui')
|
||||
|
||||
# transfert des variables nom_domaine_wpad_exclude
|
||||
if 'nom_domaine_wpad_exclude' in self.unknown_options:
|
||||
old_interfaces = self.get_old_value('proxy_bypass_domain_eth1', 'nom_interface_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_domain_eth1', 'nom_domaine_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'nom_interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_domain_eth{0}'.format(i), value)
|
||||
del(self.unknown_options['nom_domaine_wpad_exclude'])
|
||||
del(self.unknown_options['nom_interface_wpad_exclude'])
|
||||
|
||||
# nom_serveur_scribe_dmz/ip_serveur_scribe_dmz => mandatory (#11713)
|
||||
if self.get_value('install_scribe_dmz') == u'oui':
|
||||
if self.get_value('nom_serveur_scribe_dmz') == None or self.get_value('ip_serveur_scribe_dmz') == None:
|
||||
self.set_value('install_scribe_dmz', u'non')
|
||||
|
||||
|
||||
class Upgrade_2_4_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.1 vers 2.4.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.1', '2.4.2'))
|
||||
# migration des variables eolesso vers client LDAP #10821
|
||||
self.copy('eolesso_port_ldap', 'ldap_port')
|
||||
self.copy('eolesso_ldap_reader', 'ldap_reader')
|
||||
self.copy('eolesso_ldap_reader_passfile', 'ldap_reader_passfile')
|
||||
self.copy('eolesso_ldap_match_attribute', 'ldap_match_attribute')
|
||||
self.copy('eolesso_ldap_filter_user', 'ldap_filter_user')
|
||||
self.copy('eolesso_ldap_filter_group', 'ldap_filter_group')
|
||||
self.copy('eolesso_ldap_dntree_user', 'ldap_dntree_user')
|
||||
self.copy('eolesso_ldap_dntree_group', 'ldap_dntree_group')
|
||||
self.copy('eolesso_ldap_fill_displayname', 'ldap_fill_displayname')
|
||||
self.copy('eolesso_ldap_fill_mail', 'ldap_fill_mail')
|
||||
self.copy('eolesso_ldap_fill_fonction', 'ldap_fill_fonction')
|
||||
self.copy('eolesso_ldap_fill_categorie', 'ldap_fill_categorie')
|
||||
self.copy('eolesso_ldap_fill_rne', 'ldap_fill_rne')
|
||||
self.copy('eolesso_ldap_fill_fredurne', 'ldap_fill_fredurne')
|
||||
self.copy('eolesso_ldap_fill_displaygroup', 'ldap_fill_displaygroup')
|
||||
|
||||
# migration des variables courier #10987
|
||||
courier_val = self.get_old_value('activer_recuperation_courriel', 'activer_courier')
|
||||
if courier_val is not None:
|
||||
if courier_val == 'non':
|
||||
self.set_value('activer_recuperation_courriel', 'non')
|
||||
elif not 'imap' in courier_val:
|
||||
self.set_value('activer_courier_imap', 'non')
|
||||
if 'pop' in courier_val:
|
||||
self.set_value('activer_courier_pop', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_5_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.X vers 2.5.0
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.X', '2.5.0'))
|
||||
|
||||
# migration des variables nut #11608
|
||||
monitor = self.get_value('nut_monitor_user')
|
||||
if monitor != []:
|
||||
self.set_value('nut_monitor', 'oui')
|
||||
|
||||
# migration des variables postgresql pour Zéphir #11974
|
||||
old_pg_shared_buffers = self.get_value('pg_shared_buffers')
|
||||
if old_pg_shared_buffers is not None:
|
||||
if int(old_pg_shared_buffers) == 3072:
|
||||
self.del_value('pg_shared_buffers')
|
||||
else:
|
||||
self.set_value('pg_shared_buffers_unit', u'kB')
|
||||
self.del_value('pg_effective_cache_size')
|
||||
|
||||
|
||||
class Upgrade_2_5_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.0 vers 2.5.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.0', '2.5.1'))
|
||||
|
||||
# migration des variables zone_forward (#11922)
|
||||
zone_forward = self.get_value('nom_zone_forward', [])
|
||||
if zone_forward != []:
|
||||
self.set_value('activer_zone_forward', 'oui')
|
||||
|
||||
# passage de bacula à bareos (#12425)
|
||||
for var in ['activer_bareos_dir', 'activer_bareos_sd',
|
||||
'bareos_dir_name', 'bareos_full_retention',
|
||||
'bareos_full_retention_unit', 'bareos_diff_retention',
|
||||
'bareos_diff_retention_unit', 'bareos_inc_retention',
|
||||
'bareos_inc_retention_unit', 'bareos_max_run_time',
|
||||
'bareos_compression', 'bareos_dir_password',
|
||||
'bareos_fd_password', 'bareos_sd_local',
|
||||
'bareos_sd_adresse', 'bareos_sd_password',
|
||||
'bareos_sd_name', 'bareos_sd_remote_dir_name',
|
||||
'bareos_sd_remote_ip', 'bareos_sd_remote_password']:
|
||||
self.move(var.replace('bareos', 'bacula'), var)
|
||||
|
||||
if self.get_value('activer_bareos_dir') == u'oui':
|
||||
#sauvegarde déjà programmé en sqlite3, ne gère pas la migration vers mysql
|
||||
self.set_value('bareos_db_type', 'sqlite3')
|
||||
|
||||
if self.get_value('ldap_ca_cert') == '/etc/certs/CA2008.pem':
|
||||
self.set_value('ldap_ca_cert', '/etc/certs/certificat.pem')
|
||||
|
||||
|
||||
class Upgrade_2_5_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.1 vers 2.5.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.1', '2.5.2'))
|
||||
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD sphynx #14881
|
||||
if self.var_exists('activer_resource_arv'):
|
||||
activer_haute_dispo = self.get_value('activer_haute_dispo')
|
||||
if activer_haute_dispo == 'maitre':
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
service_resource_startdelay = self.get_noncalculated_value_for_auto('service_resource_startdelay')
|
||||
need_update = False
|
||||
startdelay_index = 1
|
||||
need_disabled_arv = False
|
||||
if service_resource_startdelay is not None:
|
||||
if service_resource_name is not None:
|
||||
need_update = 'arv_rsc' in service_resource_name
|
||||
if need_update:
|
||||
startdelay_index = service_resource_name.index('arv_rsc')
|
||||
need_disabled_arv = not need_update
|
||||
else:
|
||||
need_update = True
|
||||
self.del_value('service_resource_name')
|
||||
self.del_value('service_resource_script')
|
||||
self.del_value('service_resource_interval')
|
||||
self.del_value('service_resource_timeout')
|
||||
self.del_value('service_resource_startdelay')
|
||||
if need_update and service_resource_startdelay[startdelay_index] != 15:
|
||||
self.set_value('service_resource_arv_startdelay', service_resource_startdelay[startdelay_index])
|
||||
if need_disabled_arv:
|
||||
self.set_value('activer_resource_arv', u'non')
|
||||
#
|
||||
vip_resource_adresseip = self.get_noncalculated_value_for_auto('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_name')
|
||||
self.del_value('vip_resource_if')
|
||||
self.del_value('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_location')
|
||||
if vip_resource_adresseip is not None:
|
||||
if len(vip_resource_adresseip) > 0:
|
||||
self.set_value('vip_externe', vip_resource_adresseip[0])
|
||||
if len(vip_resource_adresseip) > 1:
|
||||
self.set_value('vip_interne', vip_resource_adresseip[1])
|
||||
# migration HD non Sphynx #14951
|
||||
else:
|
||||
vip_resource_if = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
vip_netmask = []
|
||||
for vip_if in vip_resource_if:
|
||||
netmask_var = 'adresse_netmask_{0}'.format(vip_if.lower())
|
||||
vip_netmask.append(self.get_value(netmask_var))
|
||||
if len(vip_netmask) > 0:
|
||||
self.set_value('vip_resource_netmask', vip_netmask)
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
if len(service_resource_name) > 0:
|
||||
self.set_value('activer_service_resource', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_6_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.X vers 2.6.0
|
||||
"""
|
||||
|
||||
def get_eth_no(self, eth):
|
||||
"""
|
||||
Retourne le numéro X du nom de l'interface ethX
|
||||
"""
|
||||
try:
|
||||
return eth.split("eth")[1]
|
||||
except:
|
||||
log.error(_(u"Interface {0} name has not an 'ethX' format").format(eth))
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.X', '2.6.0'))
|
||||
|
||||
# migration des variables faisant référence au nom des interfaces ethX
|
||||
eth_vars = ['route_int', 'fw_rule_int', 'dhcrelay_server_interface', 'freerad_listen_int',
|
||||
'sw_force_ip_src', 'corosync_dial_if', 'dhcrelay_interfaces']
|
||||
for eth_var in eth_vars:
|
||||
eth_name = self.get_unvalid_value(eth_var)
|
||||
if isinstance(eth_name, list):
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
if eth == 'all':
|
||||
eth_no.append(eth)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif isinstance(eth_name, dict):
|
||||
eth_no = []
|
||||
for eth_key, eth_value in eth_name.items():
|
||||
if eth_value == 'all':
|
||||
eth_no.append(eth_value)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth_value))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_name is not None:
|
||||
eth_no = self.get_eth_no(eth_name)
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_var == 'dhcrelay_server_interface' and self.get_value('adresse_ip_dhcp_dhcrelay') is not None:
|
||||
# migration de l'ancienne valeur par défaut de dhcrelay_server_interface #18329
|
||||
self.set_value(eth_var, u'3')
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD non sphynx
|
||||
if not self.var_exists('activer_resource_arv'):
|
||||
eth_name = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
self.set_value('vip_resource_if', eth_no)
|
||||
|
||||
|
||||
class Upgrade_2_6_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.0 vers 2.6.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.0', '2.6.1'))
|
||||
|
||||
# migration des variables NTLM/SMB : multi -> non multi (#18277)
|
||||
if self.var_exists('nom_serveur_smb'):
|
||||
for varname in ('nom_serveur_smb', 'nom_domaine_smb', 'ip_serveur_smb'):
|
||||
value = self.get_unvalid_value(varname)
|
||||
if isinstance(value, list) and len(value) > 1:
|
||||
self.set_value(varname, value[0])
|
||||
|
||||
# nom_carte_ethX => multi-valuées (#18609)
|
||||
for numint in range(0, 5):
|
||||
varname = 'nom_carte_eth{}'.format(numint)
|
||||
value = self.get_unvalid_value(varname)
|
||||
if value != None:
|
||||
self.set_value(varname, [value])
|
||||
|
||||
# migration variable 'module_type' pour le module esbl ('ESBL') -> ('eSBL') (#21677)
|
||||
if self.get_value('eole_module') == u'esbl':
|
||||
self.set_value('module_type', u'eSBL')
|
||||
|
||||
|
||||
class Upgrade_2_6_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.1 vers 2.6.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.1', '2.6.2'))
|
||||
|
||||
adresse_network_dhcp = self.get_value('adresse_network_dhcp')
|
||||
if adresse_network_dhcp:
|
||||
plages = []
|
||||
for idx in xrange(len(adresse_network_dhcp)):
|
||||
plages.append(u'plage{}'.format(idx))
|
||||
self.set_value('nom_plage_dhcp', plages)
|
||||
if self.var_exists('acces_distant_backend_ead'):
|
||||
self.set_value('acces_distant_backend_ead', 'oui')
|
||||
for interface in [str(n) for n in range(5)]:
|
||||
variable = 'frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, 'oui')
|
||||
variable = 'ip_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
variable = 'netmask_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
# Upgrade Seth
|
||||
# AD firewall - mix old multi variables ad_clients_ip and
|
||||
# ad_servers_ip in ad_peer_ip
|
||||
ad_servers_ip = self.get_old_value('ad_peer_ip', 'ad_servers_ip')
|
||||
ad_clients_ip = self.get_old_value('ad_peer_ip', 'ad_clients_ip')
|
||||
if ad_servers_ip or ad_clients_ip:
|
||||
self.set_value('ad_filter_network', 'oui')
|
||||
if ad_servers_ip:
|
||||
ad_servers_netmask = self.get_old_value('ad_peer_netmask', 'ad_servers_netmask')
|
||||
for ip, netmask in zip(ad_servers_ip, [nm[1] for nm in sorted(ad_servers_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_servers_ip'])
|
||||
del(self.unknown_options['ad_servers_netmask'])
|
||||
if ad_clients_ip:
|
||||
ad_clients_netmask = self.get_old_value('ad_peer_netmask', 'ad_clients_netmask')
|
||||
for ip, netmask in zip(ad_clients_ip, [nm[1] for nm in sorted(ad_clients_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_clients_ip'])
|
||||
del(self.unknown_options['ad_clients_netmask'])
|
||||
# Force SID
|
||||
force_sid = self.get_value('ad_domain_sid')
|
||||
if force_sid:
|
||||
self.set_value('ad_force_domain_sid', 'oui')
|
||||
# Squid modified variables : minutes -> seconds
|
||||
for squidvar in ['forward_timeout', 'connect_timeout', 'read_timeout', 'request_timeout', 'persistent_request_timeout']:
|
||||
squidval = self.get_value(squidvar)
|
||||
if squidval is not None and not self.is_default(squidvar):
|
||||
self.set_value(squidvar, squidval*60)
|
||||
# Exim relay : force to "activate" when upgrade from Scribe 2.6.1 only
|
||||
if self.var_exists('synchro_aaf'):
|
||||
self.set_value('exim_relay', 'oui')
|
||||
if self.get_value('activer_dhcp') == 'oui' and self.is_default('exim_relay_dhcp'):
|
||||
self.set_value('exim_relay_dhcp', 'oui')
|
||||
# Autosign certificat modified by user must be manual
|
||||
if self.get_value('cert_type') == u'autosigné':
|
||||
cert_is_modified = False
|
||||
# set manuel to access to variable
|
||||
self.set_value('cert_type', u'manuel')
|
||||
for cert in ['server_cert', 'server_key', 'server_pem']:
|
||||
if not self.is_default(cert):
|
||||
cert_is_modified = True
|
||||
break
|
||||
if not cert_is_modified:
|
||||
self.set_value('cert_type', u'autosigné')
|
||||
# Store autosign certificat in manual type
|
||||
if self.get_value('cert_type') == u'manuel':
|
||||
for cert, filename in [('server_cert', u'/etc/ssl/certs/eole.crt'), ('server_pem', u'/etc/ssl/certs/eole.pem')]:
|
||||
if self.is_default(cert):
|
||||
self.set_value(cert, filename)
|
||||
# gaspacho agent needs to pass by port 8080 has in 2.6.1 and ealier
|
||||
if self.var_exists('gaspacho_https'):
|
||||
self.set_value('gaspacho_https', 'non')
|
||||
|
||||
|
||||
def upgrade2(major_version, old_release, current_release, config):
|
||||
"""
|
||||
major_version: version des scripts de migration (ex : 2.4)
|
||||
old_release: version du config.eol à migrer (ex : 2.4.0)
|
||||
current_release: version du serveur (ex : 2.5.1)
|
||||
config: objet de configuration Tiramisu
|
||||
"""
|
||||
def _get_max_release():
|
||||
"""
|
||||
Calcul du dernier numéro de release disponible pour la version majeure
|
||||
"""
|
||||
ends = 0
|
||||
for func in globals():
|
||||
if func.startswith(func_start):
|
||||
ends = max(ends, int(func.split('_')[-1]))
|
||||
return ends
|
||||
|
||||
old_version = '.'.join(old_release.split('.')[0:2])
|
||||
current_version = '.'.join(current_release.split('.')[0:2])
|
||||
func_start = 'Upgrade_' + "_".join(major_version.split('.'))
|
||||
if StrictVersion(current_version) == StrictVersion(old_version):
|
||||
# upgrade au sein d'une même version
|
||||
# ex : 2.5.1 -> 2.5.4 en 2.5
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = int(current_release.split('.')[-1])
|
||||
elif StrictVersion(major_version) == StrictVersion(old_version):
|
||||
# upgrade "de base" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.4
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = _get_max_release()
|
||||
elif StrictVersion(major_version) == StrictVersion(current_version):
|
||||
# upgrade "final" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.6
|
||||
starts = -1
|
||||
ends = int(current_release.split('.')[-1])
|
||||
else:
|
||||
# upgrade "intermédiaire" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.5
|
||||
starts = -1
|
||||
ends = _get_max_release()
|
||||
|
||||
for i in xrange(starts + 1, ends + 1):
|
||||
func = func_start + '_' + str(i)
|
||||
if func in globals():
|
||||
upgrade = globals()[func](config)
|
||||
upgrade.run()
|
197
creole/utils.py
Normal file
197
creole/utils.py
Normal file
|
@ -0,0 +1,197 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
utilitaires créole
|
||||
"""
|
||||
|
||||
import sys
|
||||
from .error import NoneError, OutOfRange
|
||||
from .config import charset
|
||||
try:
|
||||
from pyeole.ansiprint import *
|
||||
except:
|
||||
pass
|
||||
import time, hashlib, random, unicodedata
|
||||
|
||||
# définition des classes d'adresse IP existantes
|
||||
classes = {
|
||||
u'128.0.0.0' : u'1'
|
||||
, u'192.0.0.0' : u'2'
|
||||
, u'224.0.0.0' : u'3'
|
||||
, u'240.0.0.0' : u'4'
|
||||
, u'248.0.0.0' : u'5'
|
||||
, u'252.0.0.0' : u'6'
|
||||
, u'254.0.0.0' : u'7'
|
||||
, u'255.0.0.0' : u'8'
|
||||
, u'255.128.0.0' : u'9'
|
||||
, u'255.192.0.0' : u'10'
|
||||
, u'255.224.0.0' : u'11'
|
||||
, u'255.240.0.0' : u'12'
|
||||
, u'255.248.0.0' : u'13'
|
||||
, u'255.252.0.0' : u'14'
|
||||
, u'255.254.0.0' : u'15'
|
||||
, u'255.255.0.0' : u'16'
|
||||
, u'255.255.128.0' : u'17'
|
||||
, u'255.255.192.0' : u'18'
|
||||
, u'255.255.224.0' : u'19'
|
||||
, u'255.255.240.0' : u'20'
|
||||
, u'255.255.248.0' : u'21'
|
||||
, u'255.255.252.0' : u'22'
|
||||
, u'255.255.254.0' : u'23'
|
||||
, u'255.255.255.0' : u'24'
|
||||
, u'255.255.255.128' : u'25'
|
||||
, u'255.255.255.192' : u'26'
|
||||
, u'255.255.255.224' : u'27'
|
||||
, u'255.255.255.240' : u'28'
|
||||
, u'255.255.255.248' : u'29'
|
||||
, u'255.255.255.252' : u'30'
|
||||
, u'255.255.255.254' : u'31'
|
||||
, u'255.255.255.255' : u'32'
|
||||
}
|
||||
|
||||
def string_to_bool(string):
|
||||
"""
|
||||
Transforme les chaines 'True' ou 'False' en valeurs booléennes
|
||||
"""
|
||||
if string == "":
|
||||
raise ValueError('empty string')
|
||||
result = eval(string)
|
||||
if result not in [True, False]:
|
||||
raise TypeError("string must be like 'True' or 'False'")
|
||||
else: return result
|
||||
|
||||
|
||||
def get_text_node(node):
|
||||
"""
|
||||
@param node: node minidom contenant du texte
|
||||
Utilitaire minidom permettant de récupérer le texte d'un node texte
|
||||
"""
|
||||
texte = ""
|
||||
nodelist = node.childNodes
|
||||
for textnode in nodelist:
|
||||
if textnode.nodeType == textnode.TEXT_NODE:
|
||||
texte = texte + textnode.data
|
||||
return texte
|
||||
|
||||
|
||||
# utilitaires pour la
|
||||
# ligne de commande
|
||||
|
||||
def raw(text):
|
||||
"""
|
||||
Question en ligne de commande : permet de repérer si l'utilisateur a renvoyé quelque chose
|
||||
|
||||
@param text: le libellé de message
|
||||
@return: la variable demandée
|
||||
"""
|
||||
var = raw_input(text + " : ")
|
||||
if var:
|
||||
return var
|
||||
else:
|
||||
raise NoneError
|
||||
|
||||
|
||||
def stringify(string):
|
||||
"""
|
||||
Encodage des chaînes avec le charset local
|
||||
"""
|
||||
try:
|
||||
return string.encode(charset)
|
||||
except:
|
||||
return string
|
||||
|
||||
def encode_list(_list):
|
||||
""" encode une liste en utf-8 si les éléments sont de type dico ou str ou liste, unicode"""
|
||||
encoded_list = []
|
||||
for element in _list:
|
||||
if type(element) == str:
|
||||
encoded_list.append(encode_str(element))
|
||||
elif type(element) == dict:
|
||||
encoded_list.append(encode_dico(element))
|
||||
elif type(element) == list:
|
||||
encoded_list.append(encode_list(element))
|
||||
elif type(element) == unicode:
|
||||
encoded_list.append(encode_str(element))
|
||||
else:
|
||||
encoded_list.append(element)
|
||||
return encoded_list
|
||||
|
||||
def encode_str(string):
|
||||
""" encode une string ou un unicode en utf8 """
|
||||
try:
|
||||
string = string.encode(charset)
|
||||
except:
|
||||
pass
|
||||
return string
|
||||
|
||||
def encode_dico(dico):
|
||||
""" encode un dico en utf8 dans le cas ou les valeurs soient de type dico, liste, str, unicode """
|
||||
for key in dico.keys():
|
||||
if type(dico[key]) == str:
|
||||
dico[key] = encode_str(dico[key])
|
||||
elif type(dico[key]) == unicode:
|
||||
dico[key] = encode_str(dico[key])
|
||||
elif type(dico[key]) == dict:
|
||||
dico[key] = encode_dico(dico[key])
|
||||
elif type(dico[key]) == list:
|
||||
dico[key] = encode_list(dico[key])
|
||||
return dico
|
||||
|
||||
|
||||
def select_list(selection):
|
||||
"""
|
||||
Utilitaire de construction d'une sélection en ligne de commande
|
||||
@param selection : liste
|
||||
@return : l'identifiant sélectionné (entier)
|
||||
"""
|
||||
# affichage de la liste (ordonnée)
|
||||
for i in selection:
|
||||
print(selection.index(i) , ':', stringify(i))
|
||||
# print selection.index(i) , ':', i[0]
|
||||
|
||||
# recuperation du numero
|
||||
try:
|
||||
number = int(raw(stringify(_("Choose a number in the list"))))
|
||||
except:
|
||||
raise OutOfRange
|
||||
if number not in range(len(selection)):
|
||||
raise OutOfRange
|
||||
return number
|
||||
|
||||
def gen_random(length=None):
|
||||
"""
|
||||
length: longueur de la chaine aléatoire attendu
|
||||
"""
|
||||
try:
|
||||
random_id = str(time.time()).split('.')[0]
|
||||
random_str = hashlib.sha224('{}/{}'.format(random_id, str(random.randrange(2**100))).encode('utf-8')).hexdigest()
|
||||
return random_str[:length]
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def normalize_family(family_name, check_name=True):
|
||||
"""
|
||||
il ne faut pas d'espace, d'accent, de majuscule, de tiré, ...
|
||||
dans le nom des familles
|
||||
"""
|
||||
if sys.version_info[0] < 3:
|
||||
f = unicode(family_name)
|
||||
else:
|
||||
f = family_name
|
||||
f = f.replace('-', '_')
|
||||
#f = f.replace(u'é', 'e')
|
||||
#f = f.replace(u'è', 'e')
|
||||
nfkd_form = unicodedata.normalize('NFKD', f)
|
||||
f = u"".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
f = f.replace(' ', '_')
|
||||
f = f.lower()
|
||||
try:
|
||||
int(f[0])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise ValueError(u'Le nom de la famille ne doit pas commencer par un chiffre : {0}'.format(f))
|
||||
if check_name and f.lower() in ['containers']:
|
||||
raise ValueError(u'nom de la famille interdit {0}'.format(f))
|
||||
return f
|
||||
|
28
creole/valid/__init__.py
Normal file
28
creole/valid/__init__.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# -*- coding:utf-8 -*-
|
||||
|
||||
"""
|
||||
callbacks de validation personnalisés pour tiramisu
|
||||
|
||||
**utilisation**
|
||||
|
||||
faire des callbacks standards en cas de validation
|
||||
sur la configuration entière.
|
||||
la possibilité de validation personnalisable doit
|
||||
être utilisée *uniquement* pour des validations locales
|
||||
|
||||
**important**
|
||||
|
||||
la fonction ne doit pas lever d'exception, elle doit
|
||||
aboutir.
|
||||
|
||||
api
|
||||
:param value: premier paramètre, valeur de l'option
|
||||
les autres paramètres doivent être des
|
||||
paramètres **nommés**
|
||||
:return: True ou False suivant que l'option a été validée ou non
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
12
creole/valid/string.py
Normal file
12
creole/valid/string.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from formencode.validators import UnicodeString
|
||||
from formencode.api import Invalid
|
||||
|
||||
def valid_string(value, min=None, max=None, not_empty=True):
|
||||
try:
|
||||
UnicodeString(min=min, max=max, not_empty=not_empty
|
||||
).to_python(value)
|
||||
return True
|
||||
except Invalid:
|
||||
return False
|
1750
creole/var_loader.py
Normal file
1750
creole/var_loader.py
Normal file
File diff suppressed because it is too large
Load diff
67
creole/wpkg_secrets.py
Normal file
67
creole/wpkg_secrets.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
#! /usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
|
||||
import base64
|
||||
|
||||
KEY_LENGTH = 40
|
||||
KEYS = [
|
||||
0x50,
|
||||
0xF7,
|
||||
0x82,
|
||||
0x69,
|
||||
0xEA,
|
||||
0x2D,
|
||||
0xDD,
|
||||
0x2D,
|
||||
0x6A,
|
||||
0xB4,
|
||||
0x33,
|
||||
0x8F,
|
||||
0xD5,
|
||||
0xC7,
|
||||
0x90,
|
||||
0x9C,
|
||||
0x22,
|
||||
0x95,
|
||||
0x61,
|
||||
0xE5,
|
||||
0x65,
|
||||
0xF6,
|
||||
0xB0,
|
||||
0x4B,
|
||||
0x94,
|
||||
0x47,
|
||||
0xB0,
|
||||
0xBD,
|
||||
0x73,
|
||||
0x58,
|
||||
0x56,
|
||||
0x87,
|
||||
0x79,
|
||||
0x7B,
|
||||
0xE6,
|
||||
0xB0,
|
||||
0xD2,
|
||||
0x20,
|
||||
0x28,
|
||||
0xE1
|
||||
]
|
||||
|
||||
def bitwise(s):
|
||||
res = ''
|
||||
idx = 0
|
||||
for i in range(len(s)):
|
||||
res += chr(ord(s[i]) ^ KEYS[idx])
|
||||
idx+=1
|
||||
if idx > (KEY_LENGTH - 1):
|
||||
idx = 0
|
||||
return res
|
||||
|
||||
def wcrypt(s):
|
||||
s = bitwise(s)
|
||||
return base64.encodestring(s)[:-1] # encodestring renvoie la chaine avec un '\n', on le vire
|
||||
|
||||
def wdecrypt(s):
|
||||
s = base64.decodestring(s)
|
||||
return bitwise(s)
|
161
creole/xml_compare.py
Normal file
161
creole/xml_compare.py
Normal file
|
@ -0,0 +1,161 @@
|
|||
try:
|
||||
import doctest
|
||||
doctest.OutputChecker
|
||||
except (AttributeError, ImportError): # Python < 2.4
|
||||
import util.doctest24 as doctest
|
||||
try:
|
||||
import xml.etree.ElementTree as ET
|
||||
except ImportError:
|
||||
import elementtree.ElementTree as ET
|
||||
from xml.parsers.expat import ExpatError as XMLParseError
|
||||
|
||||
RealOutputChecker = doctest.OutputChecker
|
||||
|
||||
|
||||
def debug(*msg):
|
||||
import sys
|
||||
print >> sys.stderr, ' '.join(map(str, msg))
|
||||
|
||||
|
||||
class HTMLOutputChecker(RealOutputChecker):
|
||||
|
||||
def check_output(self, want, got, optionflags):
|
||||
normal = RealOutputChecker.check_output(self, want, got, optionflags)
|
||||
if normal or not got:
|
||||
return normal
|
||||
try:
|
||||
want_xml = make_xml(want)
|
||||
except XMLParseError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
got_xml = make_xml(got)
|
||||
except XMLParseError:
|
||||
pass
|
||||
else:
|
||||
if xml_compare(want_xml, got_xml):
|
||||
return True
|
||||
return False
|
||||
|
||||
def output_difference(self, example, got, optionflags):
|
||||
actual = RealOutputChecker.output_difference(
|
||||
self, example, got, optionflags)
|
||||
want_xml = got_xml = None
|
||||
try:
|
||||
want_xml = make_xml(example.want)
|
||||
want_norm = make_string(want_xml)
|
||||
except XMLParseError as e:
|
||||
if example.want.startswith('<'):
|
||||
want_norm = '(bad XML: %s)' % e
|
||||
# '<xml>%s</xml>' % example.want
|
||||
else:
|
||||
return actual
|
||||
try:
|
||||
got_xml = make_xml(got)
|
||||
got_norm = make_string(got_xml)
|
||||
except XMLParseError as e:
|
||||
if example.want.startswith('<'):
|
||||
got_norm = '(bad XML: %s)' % e
|
||||
else:
|
||||
return actual
|
||||
s = '%s\nXML Wanted: %s\nXML Got : %s\n' % (
|
||||
actual, want_norm, got_norm)
|
||||
if got_xml and want_xml:
|
||||
result = []
|
||||
xml_compare(want_xml, got_xml, result.append)
|
||||
s += 'Difference report:\n%s\n' % '\n'.join(result)
|
||||
return s
|
||||
|
||||
|
||||
def xml_sort(children):
|
||||
tcl1 = {}
|
||||
#idx = 0
|
||||
|
||||
for child in children:
|
||||
if 'name' in child.attrib:
|
||||
key = child.attrib['name']
|
||||
else:
|
||||
key = child.tag
|
||||
if key not in tcl1:
|
||||
tcl1[key] = []
|
||||
tcl1[key].append(child)
|
||||
cl1_keys = list(tcl1.keys())
|
||||
cl1_keys.sort()
|
||||
cl1 = []
|
||||
for key in cl1_keys:
|
||||
cl1.extend(tcl1[key])
|
||||
return cl1
|
||||
|
||||
def xml_compare(x1, x2):
|
||||
if x1.tag != x2.tag:
|
||||
print ('Tags do not match: %s and %s' % (x1.tag, x2.tag))
|
||||
return False
|
||||
for name, value in x1.attrib.items():
|
||||
if x2.attrib.get(name) != value:
|
||||
print ('Attributes do not match: %s=%r, %s=%r'
|
||||
% (name, value, name, x2.attrib.get(name)))
|
||||
return False
|
||||
for name in x2.attrib:
|
||||
if name not in x1.attrib:
|
||||
print ('x2 has an attribute x1 is missing: %s'
|
||||
% name)
|
||||
return False
|
||||
if not text_compare(x1.text, x2.text):
|
||||
print ('text: %r != %r' % (x1.text, x2.text))
|
||||
return False
|
||||
if not text_compare(x1.tail, x2.tail):
|
||||
print ('tail: %r != %r' % (x1.tail, x2.tail))
|
||||
return False
|
||||
|
||||
cl1 = xml_sort(x1.getchildren())
|
||||
cl2 = xml_sort(x2.getchildren())
|
||||
|
||||
if len(cl1) != len(cl2):
|
||||
cl1_tags = []
|
||||
for c in cl1:
|
||||
cl1_tags.append(c.tag)
|
||||
cl2_tags = []
|
||||
for c in cl2:
|
||||
cl2_tags.append(c.tag)
|
||||
print ('children length differs, %i != %i (%s != %s)'
|
||||
% (len(cl1), len(cl2), cl1_tags, cl2_tags))
|
||||
return False
|
||||
i = 0
|
||||
for c1, c2 in zip(cl1, cl2):
|
||||
i += 1
|
||||
if not xml_compare(c1, c2):
|
||||
if 'name' in c1.attrib:
|
||||
name = c1.attrib['name']
|
||||
else:
|
||||
name = i
|
||||
print ('in tag "%s" with name "%s"'
|
||||
% (c1.tag, name))
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def text_compare(t1, t2):
|
||||
if not t1 and not t2:
|
||||
return True
|
||||
if t1 == '*' or t2 == '*':
|
||||
return True
|
||||
return (t1 or '').strip() == (t2 or '').strip()
|
||||
|
||||
|
||||
def make_xml(s):
|
||||
return ET.XML('<xml>%s</xml>' % s)
|
||||
|
||||
|
||||
def make_string(xml):
|
||||
if isinstance(xml, (str, unicode)):
|
||||
xml = make_xml(xml)
|
||||
s = ET.tostring(xml)
|
||||
if s == '<xml />':
|
||||
return ''
|
||||
assert s.startswith('<xml>') and s.endswith('</xml>'), repr(s)
|
||||
return s[5:-6]
|
||||
|
||||
|
||||
def install():
|
||||
doctest.OutputChecker = HTMLOutputChecker
|
||||
|
107
creole/xmlreflector.py
Normal file
107
creole/xmlreflector.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
# coding: utf-8
|
||||
from os.path import join, isfile, basename, isdir
|
||||
from os import listdir
|
||||
from base64 import decodestring
|
||||
from io import BytesIO
|
||||
from collections import OrderedDict
|
||||
import sys
|
||||
|
||||
from lxml.etree import DTD, parse, tostring, XMLParser # pylint: disable=E0611
|
||||
|
||||
from .i18n import _
|
||||
from .utils import normalize_family
|
||||
from .error import CreoleDictConsistencyError
|
||||
from .config import VIRTBASE, VIRTROOT, VIRTMASTER, templatedir
|
||||
|
||||
HIGH_COMPATIBILITY = True
|
||||
|
||||
class XMLReflector(object):
|
||||
"""Helper class for loading the Creole XML file,
|
||||
parsing it, validating against the Creole DTD,
|
||||
writing the xml result on the disk
|
||||
"""
|
||||
def __init__(self):
|
||||
self.dtd = None
|
||||
|
||||
def parse_dtd(self, dtdfilename):
|
||||
"""Loads the Creole DTD
|
||||
|
||||
:raises IOError: if the DTD is not found
|
||||
|
||||
:param dtdfilename: the full filename of the Creole DTD
|
||||
"""
|
||||
if not isfile(dtdfilename):
|
||||
raise IOError(_("no such DTD file: {}").format(dtdfilename))
|
||||
with open(dtdfilename, 'r') as dtdfd:
|
||||
self.dtd = DTD(dtdfd)
|
||||
|
||||
def parse_xmlfile(self, xmlfile, from_zephir=None, zephir2=False):
|
||||
"""Parses and validates some Creole XML against the Creole DTD
|
||||
|
||||
:returns: the root element tree object
|
||||
"""
|
||||
if from_zephir:
|
||||
if zephir2:
|
||||
document = parse(BytesIO(xmlfile), XMLParser(remove_blank_text=True))
|
||||
else:
|
||||
document = parse(BytesIO(decodestring(xmlfile)), XMLParser(remove_blank_text=True))
|
||||
else:
|
||||
document = parse(xmlfile)
|
||||
assert self.dtd.validate(document), _("not a valid xml file: {}").format(xmlfile)
|
||||
return document.getroot()
|
||||
|
||||
def load_xml_from_folders(self, xmlfolders, from_zephir):
|
||||
"""Loads all the XML files located in the xmlfolders' list
|
||||
|
||||
:param xmlfolders: list of full folder's name
|
||||
"""
|
||||
documents = []
|
||||
if from_zephir:
|
||||
for idx, xmlfile in enumerate(xmlfolders):
|
||||
documents.append(('generate_{}'.format(idx), self.parse_xmlfile(xmlfile, from_zephir=from_zephir)))
|
||||
else:
|
||||
if not isinstance(xmlfolders, list):
|
||||
xmlfolders = [xmlfolders]
|
||||
for xmlfolder in xmlfolders:
|
||||
if isinstance(xmlfolder, list) or isinstance(xmlfolder, tuple):
|
||||
# directory group : collect files from each
|
||||
# directory and sort them before loading
|
||||
group_files = []
|
||||
for idx, subdir in enumerate(xmlfolder):
|
||||
if isdir(subdir):
|
||||
for filename in listdir(subdir):
|
||||
group_files.append((filename, idx, subdir))
|
||||
else:
|
||||
group_files.append(basename(subdir), idx, dirname(subdir))
|
||||
def sort_group(file1, file2):
|
||||
if file1[0] == file2[0]:
|
||||
# sort by initial xmlfolder order if same name
|
||||
return file1[1].__cmp__(file2[1])
|
||||
# sort by filename
|
||||
elif file1[0] > file2[0]:
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
group_files.sort(sort_group)
|
||||
filenames = [join(f[2], f[0]) for f in group_files]
|
||||
elif isdir(xmlfolder):
|
||||
filenames = []
|
||||
for filename in listdir(xmlfolder):
|
||||
filenames.append(join(xmlfolder, filename))
|
||||
filenames.sort()
|
||||
else:
|
||||
filenames = [xmlfolder]
|
||||
for xmlfile in filenames:
|
||||
if xmlfile.endswith('.xml'):
|
||||
#xmlfile_path = join(xmlfolder, xmlfile)
|
||||
documents.append((xmlfile, self.parse_xmlfile(xmlfile)))
|
||||
return documents
|
||||
|
||||
def save_xmlfile(self, xmlfilename, xml): # pylint: disable=R0201
|
||||
"""Write a bunch of XML on the disk
|
||||
"""
|
||||
with open(xmlfilename, 'w') as xmlfh:
|
||||
if sys.version_info[0] < 3:
|
||||
xmlfh.write(tostring(xml, pretty_print=True, encoding="UTF-8", xml_declaration=True))
|
||||
else:
|
||||
xmlfh.write(tostring(xml, pretty_print=True, encoding="UTF-8", xml_declaration=True).decode('utf8'))
|
235
data/creole.dtd
Normal file
235
data/creole.dtd
Normal file
|
@ -0,0 +1,235 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<!-- ===================================================================== -->
|
||||
|
||||
<!-- Definition de la DTD du fichier creole -->
|
||||
|
||||
<!-- ===================================================================== -->
|
||||
|
||||
<!--
|
||||
# Conception :
|
||||
# Eole (http://eole.orion.education.fr)
|
||||
|
||||
# Copyright (C) 2005-2018
|
||||
|
||||
# distribue sous la licence GPL-2
|
||||
|
||||
# En attendant une traduction officielle de la GPL, la notice de
|
||||
# copyright demeure en anglais.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# Se reporter a la documentation envoyee avec le programme pour la notice.
|
||||
|
||||
-->
|
||||
<!--================ -->
|
||||
<!-- root element -->
|
||||
<!-- =============== -->
|
||||
|
||||
<!ELEMENT creole (containers | files | family_action | variables | constraints | help)*>
|
||||
|
||||
<!-- ============== -->
|
||||
<!-- files element -->
|
||||
<!-- ============== -->
|
||||
|
||||
<!ELEMENT family_action (action)>
|
||||
<!ATTLIST family_action name CDATA #REQUIRED>
|
||||
<!ATTLIST family_action description CDATA #IMPLIED>
|
||||
<!ATTLIST family_action color CDATA #IMPLIED>
|
||||
<!ATTLIST family_action image CDATA #IMPLIED>
|
||||
<!ELEMENT action ((input* | profile* | ewtapp* | tag* | saltaction*)*)>
|
||||
<!ATTLIST action type (form|custom|external|reader|apache) "custom">
|
||||
<!ATTLIST action title CDATA #REQUIRED>
|
||||
<!ATTLIST action description CDATA #REQUIRED>
|
||||
<!ATTLIST action rewrite CDATA #IMPLIED>
|
||||
<!ATTLIST action image CDATA #IMPLIED>
|
||||
<!ATTLIST action actionlist CDATA #IMPLIED>
|
||||
<!-- for apache action -->
|
||||
<!ATTLIST action apache_path CDATA #IMPLIED>
|
||||
<!ATTLIST action apache_path_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!-- for external action -->
|
||||
<!ATTLIST action url CDATA #IMPLIED>
|
||||
<!ATTLIST action url_type (URLOption|SymLinkOption) "URLOption">
|
||||
<!-- for form action -->
|
||||
<!ATTLIST action save (True|False) "False">
|
||||
<!ELEMENT files ((service* | service_access* | service_restriction* | package* | file*)*)>
|
||||
|
||||
<!ELEMENT containers ((container* | all*)*)>
|
||||
|
||||
<!ELEMENT container ((service* | service_access* | service_restriction* | interface* | package* | file* | disknod* | host* | fstab*)*) >
|
||||
<!ATTLIST container name CDATA #REQUIRED >
|
||||
<!ATTLIST container id CDATA #IMPLIED >
|
||||
<!ATTLIST container group CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT all ((service* | interface* | package* | file* | disknod* | host* | fstab*)*) >
|
||||
|
||||
<!ELEMENT service (#PCDATA)>
|
||||
<!ATTLIST service servicelist CDATA #IMPLIED >
|
||||
<!ATTLIST service instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST service method (systemd|upstart|apache|network) "systemd">
|
||||
<!ATTLIST service redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT input (#PCDATA)>
|
||||
<!ELEMENT profile (#PCDATA)>
|
||||
<!ELEMENT ewtapp (#PCDATA)>
|
||||
<!ELEMENT tag (#PCDATA)>
|
||||
<!ELEMENT saltaction (#PCDATA)>
|
||||
|
||||
<!ELEMENT service_access ((port | tcpwrapper)*)>
|
||||
<!ATTLIST service_access service CDATA #REQUIRED >
|
||||
|
||||
<!ELEMENT port (#PCDATA)> <!--port_type-->
|
||||
<!ATTLIST port port_type (PortOption|SymLinkOption) "PortOption">
|
||||
<!ATTLIST port service_accesslist CDATA #IMPLIED >
|
||||
<!ATTLIST port protocol (tcp|udp) "tcp">
|
||||
|
||||
<!ELEMENT tcpwrapper (#PCDATA)> <!--tcpwrapper_type-->
|
||||
<!ATTLIST tcpwrapper tcpwrapper_type (UnicodeOption|SymLinkOption) "UnicodeOption">
|
||||
<!ATTLIST tcpwrapper service_accesslist CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT service_restriction (ip*)>
|
||||
<!ATTLIST service_restriction service CDATA #REQUIRED >
|
||||
|
||||
<!ELEMENT ip (#PCDATA)> <!--ip_type-->
|
||||
<!ATTLIST ip service_restrictionlist CDATA #IMPLIED >
|
||||
<!ATTLIST ip ip_type (NetworkOption|SymLinkOption) "NetworkOption">
|
||||
<!ATTLIST ip interface_type (UnicodeOption|SymLinkOption) "UnicodeOption">
|
||||
<!ATTLIST ip interface CDATA #REQUIRED> <!--interface_type-->
|
||||
<!ATTLIST ip netmask_type (NetmaskOption|SymLinkOption) "NetmaskOption">
|
||||
<!ATTLIST ip netmask CDATA "255.255.255.255"> <!--netmask_type-->
|
||||
|
||||
<!ELEMENT interface (#PCDATA)>
|
||||
<!ATTLIST interface interfacelist CDATA #IMPLIED >
|
||||
<!ATTLIST interface linkto CDATA #REQUIRED >
|
||||
<!ATTLIST interface ip CDATA #REQUIRED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface ip_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface mask CDATA #REQUIRED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface mask_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface bcast CDATA #IMPLIED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface bcast_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface gateway CDATA #IMPLIED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface gateway_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface method (bridge|macvlan) "macvlan" >
|
||||
<!ATTLIST interface redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT host EMPTY >
|
||||
<!ATTLIST host hostlist CDATA #IMPLIED >
|
||||
<!ATTLIST host name CDATA #REQUIRED > <!--SymLinkOption-->
|
||||
<!ATTLIST host name_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST host ip CDATA #REQUIRED > <!--SymLinkOption-->
|
||||
<!ATTLIST host ip_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST host crossed (True|False) "True" >
|
||||
<!ATTLIST host instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST host comment CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT fstab EMPTY >
|
||||
<!ATTLIST fstab name CDATA #REQUIRED> <!--name_type-->
|
||||
<!ATTLIST fstab name_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!ATTLIST fstab type (bind|normal) "bind">
|
||||
<!ATTLIST fstab fstype (auto|ext3|ext4|nfs|smb) "auto">
|
||||
<!ATTLIST fstab mount_point CDATA #IMPLIED> <!--mount_point_type-->
|
||||
<!ATTLIST fstab mount_point_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!ATTLIST fstab options CDATA #IMPLIED>
|
||||
<!ATTLIST fstab checks CDATA #IMPLIED>
|
||||
<!ATTLIST fstab fstablist CDATA #IMPLIED>
|
||||
<!ATTLIST fstab instance_mode (when_container|when_no_container|always) "when_container">
|
||||
|
||||
<!ELEMENT package (#PCDATA)>
|
||||
<!ATTLIST package instance_mode (when_container|when_no_container|always) "always">
|
||||
|
||||
<!ELEMENT disknod (#PCDATA)>
|
||||
|
||||
<!ELEMENT file EMPTY>
|
||||
<!ATTLIST file name CDATA #REQUIRED >
|
||||
<!ATTLIST file source CDATA #IMPLIED>
|
||||
<!ATTLIST file mode CDATA #IMPLIED >
|
||||
<!ATTLIST file owner CDATA #IMPLIED >
|
||||
<!ATTLIST file group CDATA #IMPLIED >
|
||||
<!ATTLIST file filelist CDATA #IMPLIED >
|
||||
<!ATTLIST file mkdir (True|False) "False">
|
||||
<!ATTLIST file instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST file rm (True|False) "False">
|
||||
<!ATTLIST file del_comment CDATA #IMPLIED >
|
||||
<!ATTLIST file redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT variables (family*, separators*)>
|
||||
<!ELEMENT family (#PCDATA | variable)*>
|
||||
<!ATTLIST family name CDATA #REQUIRED>
|
||||
<!ATTLIST family description CDATA #IMPLIED>
|
||||
<!ATTLIST family mode (basic|normal|expert) "basic">
|
||||
<!ATTLIST family icon CDATA #IMPLIED>
|
||||
<!ATTLIST family hidden (True|False) "False">
|
||||
|
||||
<!ELEMENT variable (#PCDATA | value)*>
|
||||
<!ATTLIST variable name CDATA #REQUIRED>
|
||||
<!ATTLIST variable type CDATA #IMPLIED>
|
||||
<!ATTLIST variable description CDATA #IMPLIED>
|
||||
<!ATTLIST variable hidden (True|False) "False">
|
||||
<!ATTLIST variable disabled (True|False) "False">
|
||||
<!ATTLIST variable multi (True|False) "False">
|
||||
<!ATTLIST variable redefine (True|False) "False">
|
||||
<!ATTLIST variable exists (True|False) "True">
|
||||
<!ATTLIST variable mandatory (True|False) "False">
|
||||
<!ATTLIST variable auto_freeze (True|False) "False">
|
||||
<!ATTLIST variable auto_save (True|False) "False">
|
||||
<!ATTLIST variable mode (basic|normal|expert) "normal">
|
||||
<!ATTLIST variable remove_check (True|False) "False">
|
||||
<!ATTLIST variable remove_condition (True|False) "False">
|
||||
|
||||
<!ELEMENT separators (separator*)>
|
||||
|
||||
<!ELEMENT separator (#PCDATA)>
|
||||
<!ATTLIST separator name CDATA #REQUIRED>
|
||||
<!ATTLIST separator never_hidden CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT value (#PCDATA)>
|
||||
|
||||
<!ELEMENT constraints ((fill* | check* | condition* | auto* | group*)*)>
|
||||
<!ELEMENT fill (param*)>
|
||||
<!ATTLIST fill name CDATA #REQUIRED>
|
||||
<!ATTLIST fill target CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT check (param*)>
|
||||
<!ATTLIST check name CDATA #REQUIRED>
|
||||
<!ATTLIST check target CDATA #REQUIRED>
|
||||
<!ATTLIST check level (error|warning) "error">
|
||||
<!ATTLIST check probe (True|False) "False">
|
||||
|
||||
<!ELEMENT auto ((param)*)>
|
||||
<!ATTLIST auto name CDATA #REQUIRED>
|
||||
<!ATTLIST auto target CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT condition ((target | param)+ )>
|
||||
<!ATTLIST condition name CDATA #REQUIRED>
|
||||
<!ATTLIST condition source CDATA #REQUIRED>
|
||||
<!ATTLIST condition fallback (True|False) "False">
|
||||
|
||||
<!ELEMENT group (slave+)>
|
||||
<!ATTLIST group master CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT param (#PCDATA)>
|
||||
<!ATTLIST param type (string|eole|number|container|context|python) "string">
|
||||
<!ATTLIST param name CDATA #IMPLIED>
|
||||
<!ATTLIST param hidden (True|False) "True">
|
||||
<!ATTLIST param optional (True|False) "False">
|
||||
|
||||
<!ELEMENT target (#PCDATA)>
|
||||
<!ATTLIST target type (family|filelist|servicelist|interfacelist|variable|service_accesslist|service_restrictionlist|hostlist|fstablist|actionlist) "variable">
|
||||
<!ATTLIST target optional (True|False) "False">
|
||||
|
||||
<!ELEMENT slave (#PCDATA)>
|
||||
|
||||
<!ELEMENT help ((variable* | family*)*)>
|
||||
|
64
data/diag.py
Executable file
64
data/diag.py
Executable file
|
@ -0,0 +1,64 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
# Eole NG - 2009
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# http://eole.orion.education.fr - eole@ac-dijon.fr
|
||||
#
|
||||
# Licence CeCill
|
||||
# cf: http://www.cecill.info/licences.fr.html
|
||||
###########################################################################
|
||||
|
||||
import sys
|
||||
import socket
|
||||
from os.path import isfile
|
||||
from os import system, stat
|
||||
from pyeole.httprequest import HTTPRequest
|
||||
from creole.config import configeol
|
||||
from creole.client import CreoleClient
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
# adresse IP et port du serveur d'enregistrement
|
||||
server = "http://194.167.18.21/apps/AutoDiag/index.n/diagnose"
|
||||
md5file = "/etc/eole/.server.MD5"
|
||||
module = "%s-%s" % (client.get_creole('eole_module'), client.get_creole('eole_version'))
|
||||
|
||||
def get_md5():
|
||||
""" calcul de l'identifiant md5 """
|
||||
if not isfile(md5file) or stat(md5file).st_size == 0:
|
||||
system("md5sum %s | awk '{print $1}' > %s" % (configeol, md5file))
|
||||
fp = file(md5file)
|
||||
return (fp.read().split()[0])
|
||||
|
||||
def get_proxy():
|
||||
""" récupération du proxy à utiliser """
|
||||
if client.get_creole('activer_proxy_client') == 'oui':
|
||||
return "http://{0}:{1}".format(
|
||||
client.get_creole('proxy_client_adresse'),
|
||||
client.get_creole('proxy_client_port'))
|
||||
return ''
|
||||
|
||||
if __name__ == "__main__":
|
||||
id5 = get_md5()
|
||||
rne = client.get_creole('numero_etab')
|
||||
data = {"ID5":id5, "module":module, "rne":rne, "dep":rne[0:3]}
|
||||
socket.setdefaulttimeout(5)
|
||||
proxy = get_proxy()
|
||||
if proxy != '':
|
||||
# essai avec proxy
|
||||
try:
|
||||
req = HTTPRequest(proxy={'http':proxy})
|
||||
req.request(server, post_datas=data)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
sys.exit(0)
|
||||
# essai sans proxy
|
||||
try:
|
||||
req = HTTPRequest()
|
||||
req.request(server, post_datas=data)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(0)
|
2
data/funcs/__init__.py
Normal file
2
data/funcs/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
"""Module de fonctions supplémentaires accessibles à creole. Tous les fichiers python
|
||||
contenus dans ce répertoire sont lus par le module eosfunc de creole"""
|
69
data/gen_certif.py
Executable file
69
data/gen_certif.py
Executable file
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
script de generation d'un certificat ssl
|
||||
prend un nom de fichier facultatif en argument (destination du certificat)
|
||||
|
||||
usage::
|
||||
|
||||
soit
|
||||
%prog (-fc) [nom_certif]
|
||||
soit
|
||||
%prog (-f)
|
||||
|
||||
si [nom_certif] non renseigne, regenere tous les certificats par defaut ainsi que la ca locale.
|
||||
Sinon, ne genere que [nom_certif]
|
||||
|
||||
-f :force la regeneration du (ou des) certificat(s) s'il(s) existe(nt)
|
||||
-c : dans le cas de la generation d'un seul certificat, on copie la clef
|
||||
|
||||
"""
|
||||
import sys, os
|
||||
from optparse import OptionParser
|
||||
|
||||
from creole import cert
|
||||
from pyeole.encode import normalize
|
||||
|
||||
def parse_command_line():
|
||||
parser = OptionParser(__doc__)
|
||||
parser.add_option("-c",
|
||||
action="store_true", dest="copy", default=False,
|
||||
help="copie de la clef")
|
||||
|
||||
parser.add_option("-f",
|
||||
action="store_true", dest="regen", default=False,
|
||||
help="force la regeneration de la clef")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
if len(args) > 1:
|
||||
parser.error("Il faut au maximum un certificat")
|
||||
return options, args
|
||||
|
||||
options, args = parse_command_line()
|
||||
|
||||
regen = options.regen
|
||||
copy = options.copy
|
||||
|
||||
if len(args) == 1:
|
||||
certfile = args[0]
|
||||
else:
|
||||
certfile = None
|
||||
|
||||
try:
|
||||
cert.rehash_if_needed()
|
||||
if certfile != None:
|
||||
certfile = os.path.abspath(certfile)
|
||||
dest_dir = os.path.dirname(certfile)
|
||||
if not os.path.isdir(dest_dir):
|
||||
print "Répertoire de destination inexistant (%s)" % dest_dir
|
||||
sys.exit(1)
|
||||
print "Generation du certificat machine"
|
||||
cert.gen_certif(certfile, regen=regen, copy_key=copy)
|
||||
else:
|
||||
# génération de tous les certificats (CA, eole, scribe...)
|
||||
cert.gen_certs(regen=regen)
|
||||
sys.exit(0)
|
||||
except Exception, err:
|
||||
print "Erreur : "
|
||||
print u'{0}'.format(normalize(err))
|
||||
sys.exit(1)
|
26
data/testpatches.py
Executable file
26
data/testpatches.py
Executable file
|
@ -0,0 +1,26 @@
|
|||
#! /usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
Test des patches pour diagnose
|
||||
réutilisation du code de zephir-client
|
||||
"""
|
||||
import sys
|
||||
from glob import glob
|
||||
from os.path import basename
|
||||
from creole import utils
|
||||
from creole.config import patch_dir
|
||||
from zephir.monitor.agents import patches
|
||||
from os.path import join
|
||||
|
||||
patchs = glob(join(patch_dir, '*.patch'))
|
||||
patchs.extend(glob(join(patch_dir, 'variante', '*.patch')))
|
||||
err = []
|
||||
for patch in patchs:
|
||||
verif = patches.verify_patch(patch).values()
|
||||
if len(verif) > 0 and len(verif[0]) > 0:
|
||||
err.append(basename(patch))
|
||||
if len(err) != 0:
|
||||
utils.print_red('Erreur')
|
||||
print "fichiers : %s" % (", ".join(err),)
|
||||
else:
|
||||
utils.print_green('Ok')
|
10
deprecated/FonctionsEoleNg
Executable file
10
deprecated/FonctionsEoleNg
Executable file
|
@ -0,0 +1,10 @@
|
|||
#!/bin/sh
|
||||
|
||||
echo "La bibliothèque shell FonctionsEoleNg ne doit plus être utilisée." >&2
|
||||
if [ -n "${0}" ]
|
||||
then
|
||||
echo "Merci de corriger le code de '${0}'" >&2
|
||||
fi
|
||||
echo ''
|
||||
echo "Voir la documentation http://dev-eole.ac-dijon.fr/projects/eole/wiki/PrepareEOLE24" >&2
|
||||
exit 255
|
100
doc/api/epydoc.css
Normal file
100
doc/api/epydoc.css
Normal file
|
@ -0,0 +1,100 @@
|
|||
|
||||
/* Body color */
|
||||
body { background: #ffffff; color: #000000; }
|
||||
|
||||
/* Tables */
|
||||
table.summary, table.details, table.index
|
||||
{ background: #e8f0f8; color: #000000; }
|
||||
tr.summary, tr.details, tr.index
|
||||
{ background: #70b0f0; color: #000000;
|
||||
text-align: left; font-size: 120%; }
|
||||
tr.group { background: #c0e0f8; color: #000000;
|
||||
text-align: left; font-size: 120%;
|
||||
font-style: italic; }
|
||||
|
||||
/* Documentation page titles */
|
||||
h2.module { margin-top: 0.2em; }
|
||||
h2.class { margin-top: 0.2em; }
|
||||
|
||||
/* Headings */
|
||||
h1.heading { font-size: +140%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h2.heading { font-size: +125%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h3.heading { font-size: +110%; font-style: italic;
|
||||
font-weight: normal; }
|
||||
|
||||
/* Base tree */
|
||||
pre.base-tree { font-size: 80%; margin: 0; }
|
||||
|
||||
/* Details Sections */
|
||||
table.func-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.func-detail { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
table.var-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.var-details { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
/* Function signatures */
|
||||
.sig { background: transparent; color: #000000;
|
||||
font-weight: bold; }
|
||||
.sig-name { background: transparent; color: #006080; }
|
||||
.sig-arg, .sig-kwarg, .sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
.sig-default { background: transparent; color: #602000; }
|
||||
.summary-sig { background: transparent; color: #000000; }
|
||||
.summary-sig-name { background: transparent; color: #204080; }
|
||||
.summary-sig-arg, .summary-sig-kwarg, .summary-sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
|
||||
/* Doctest blocks */
|
||||
.py-src { background: transparent; color: #000000; }
|
||||
.py-prompt { background: transparent; color: #005050;
|
||||
font-weight: bold;}
|
||||
.py-string { background: transparent; color: #006030; }
|
||||
.py-comment { background: transparent; color: #003060; }
|
||||
.py-keyword { background: transparent; color: #600000; }
|
||||
.py-output { background: transparent; color: #404040; }
|
||||
pre.doctestblock { background: #f4faff; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
table pre.doctestblock
|
||||
{ background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
|
||||
/* Variable values */
|
||||
pre.variable { background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 0;
|
||||
border: 1px solid #708890; }
|
||||
.variable-linewrap { background: transparent; color: #604000; }
|
||||
.variable-ellipsis { background: transparent; color: #604000; }
|
||||
.variable-quote { background: transparent; color: #604000; }
|
||||
.re { background: transparent; color: #000000; }
|
||||
.re-char { background: transparent; color: #006030; }
|
||||
.re-op { background: transparent; color: #600000; }
|
||||
.re-group { background: transparent; color: #003060; }
|
||||
.re-ref { background: transparent; color: #404040; }
|
||||
|
||||
/* Navigation bar */
|
||||
table.navbar { background: #a0c0ff; color: #0000ff;
|
||||
border: 2px groove #c0d0d0; }
|
||||
th.navbar { background: #a0c0ff; color: #0000ff; }
|
||||
th.navselect { background: #70b0ff; color: #000000; }
|
||||
.nomargin { margin: 0; }
|
||||
|
||||
/* Links */
|
||||
a:link { background: transparent; color: #0000ff; }
|
||||
a:visited { background: transparent; color: #204080; }
|
||||
a.navbar:link { background: transparent; color: #0000ff;
|
||||
text-decoration: none; }
|
||||
a.navbar:visited { background: transparent; color: #204080;
|
||||
text-decoration: none; }
|
||||
|
||||
/* Lists */
|
||||
ul { margin-top: 0; }
|
60
doc/certifs.txt
Normal file
60
doc/certifs.txt
Normal file
|
@ -0,0 +1,60 @@
|
|||
génération des certificats
|
||||
==========================
|
||||
|
||||
mode opératoire
|
||||
|
||||
|
||||
au premier lancement de ``gen_certif.py``
|
||||
------------------------------------------
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère la CA
|
||||
- génère les certificats par défaut (clef privée, requète de certificat)
|
||||
- signature des certificats
|
||||
|
||||
aux lancements ultérieurs
|
||||
-------------------------
|
||||
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère le certificat passé en argument
|
||||
|
||||
::
|
||||
|
||||
gen_certif.py (-f) [nom_certif]
|
||||
|
||||
si [nom_certif] non renseigné, regénère tous les certificats par défaut
|
||||
ainsi que la CA locale. Sinon, ne génère que [nom_certif]
|
||||
-f :force la regénération du (ou des) certificat(s) s'il(s) existe(nt)
|
||||
|
||||
|
||||
``regen``
|
||||
|
||||
attribut permettant de forcer (ou pas) la regénération
|
||||
si ``regen==True`` alors les cerficats sont regénérés même s'ils existent
|
||||
si ``regen==False`` alors les cerficats ne sont générés que s'ils
|
||||
n'existent pas.
|
||||
|
||||
api
|
||||
----
|
||||
|
||||
- génération d'un seul certificat :
|
||||
|
||||
``cert.gen_certif(certfile,regen=regen, copy_key=copy)``
|
||||
|
||||
|
||||
- génération de tous les certificats :
|
||||
|
||||
``cert.gen_certs(regen=regen)``
|
||||
|
||||
|
||||
|
||||
::
|
||||
|
||||
gen_certs()
|
||||
|-> gen_ca()
|
||||
|-> certif_loader()
|
||||
|-> gen_certif()
|
||||
|-> finalise_certs()
|
||||
|
||||
|
2
doc/clean.sh
Executable file
2
doc/clean.sh
Executable file
|
@ -0,0 +1,2 @@
|
|||
rm -f *.html
|
||||
rm -f api/*.html
|
15
doc/commande.txt
Normal file
15
doc/commande.txt
Normal file
|
@ -0,0 +1,15 @@
|
|||
|
||||
|
||||
process
|
||||
-------
|
||||
|
||||
- point d'entrée : `process.py` méthode *run()*
|
||||
- lecture des fichiers dictionnaires *xml*
|
||||
- lecture du fichier */etc/eole/config.eol* pour remplir l'objet
|
||||
dictionnaire
|
||||
|
||||
|
||||
mapping avec la ligne de commande
|
||||
---------------------------------
|
||||
|
||||
.. TODO
|
377
doc/default.css
Normal file
377
doc/default.css
Normal file
|
@ -0,0 +1,377 @@
|
|||
/*
|
||||
:Author: David Goodger
|
||||
:Contact: goodger at users.sourceforge.net
|
||||
:date: $Date: 2004/11/11 23:11:44 $
|
||||
:version: $Revision: 1.1 $
|
||||
:copyright: This stylesheet has been placed in the public domain.
|
||||
|
||||
Default cascading style sheet for the HTML output of Docutils.
|
||||
*/
|
||||
|
||||
/* "! important" is used here to override other ``margin-top`` and
|
||||
``margin-bottom`` styles that are later in the stylesheet or
|
||||
more specific. See <http://www.w3.org/TR/CSS1#the-cascade>. */
|
||||
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: Georgia, arial, sans-serif;
|
||||
padding: 3em;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 130%;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
width: 70%;
|
||||
margin: 2em auto;
|
||||
padding: 1em;
|
||||
background-color: #FFEEEE;
|
||||
border: 1px solid #EEDDDD;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.title {
|
||||
font-size: 180%;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 ! important }
|
||||
|
||||
.last {
|
||||
margin-bottom: 0 ! important }
|
||||
|
||||
.hidden {
|
||||
display: none }
|
||||
|
||||
a.toc-backref {
|
||||
text-decoration: none ;
|
||||
color: black }
|
||||
|
||||
blockquote.epigraph {
|
||||
margin: 2em 5em ; }
|
||||
|
||||
dd {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
/* Uncomment (& remove this text!) to get bold-faced definition list terms
|
||||
dt {
|
||||
font-weight: bold }
|
||||
*/
|
||||
|
||||
div.abstract {
|
||||
margin: 2em 5em }
|
||||
|
||||
div.abstract p.topic-title {
|
||||
font-weight: bold ;
|
||||
text-align: center }
|
||||
|
||||
div.admonition, div.attention, div.caution, div.danger, div.error,
|
||||
div.hint, div.important, div.note, div.tip, div.warning {
|
||||
margin: 2em ;
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.admonition p.admonition-title, div.hint p.admonition-title,
|
||||
div.important p.admonition-title, div.note p.admonition-title,
|
||||
div.tip p.admonition-title {
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.attention p.admonition-title, div.caution p.admonition-title,
|
||||
div.danger p.admonition-title, div.error p.admonition-title,
|
||||
div.warning p.admonition-title {
|
||||
color: red ;
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.compound .compound-first, div.compound .compound-middle {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
div.compound .compound-last, div.compound .compound-middle {
|
||||
margin-top: 0.5em }
|
||||
|
||||
div.dedication {
|
||||
margin: 2em 5em ;
|
||||
text-align: center ;
|
||||
font-style: italic }
|
||||
|
||||
div.dedication p.topic-title {
|
||||
font-weight: bold ;
|
||||
font-style: normal }
|
||||
|
||||
div.figure {
|
||||
margin-left: 2em }
|
||||
|
||||
div.footer, div.header {
|
||||
font-size: smaller }
|
||||
|
||||
div.line-block {
|
||||
display: block ;
|
||||
margin-top: 1em ;
|
||||
margin-bottom: 1em }
|
||||
|
||||
div.line-block div.line-block {
|
||||
margin-top: 0 ;
|
||||
margin-bottom: 0 ;
|
||||
margin-left: 1.5em }
|
||||
|
||||
div.sidebar {
|
||||
margin-left: 1em ;
|
||||
border: medium outset ;
|
||||
padding: 0em 1em ;
|
||||
background-color: #ffffee ;
|
||||
width: 40% ;
|
||||
float: right ;
|
||||
clear: right }
|
||||
|
||||
div.sidebar p.rubric {
|
||||
font-family: sans-serif ;
|
||||
font-size: medium }
|
||||
|
||||
div.system-messages {
|
||||
margin: 5em }
|
||||
|
||||
div.system-messages h1 {
|
||||
color: red }
|
||||
|
||||
div.system-message {
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.system-message p.system-message-title {
|
||||
color: red ;
|
||||
font-weight: bold }
|
||||
|
||||
div.topic {
|
||||
margin: 2em }
|
||||
|
||||
h1.title {
|
||||
text-align: center }
|
||||
|
||||
h2.subtitle {
|
||||
text-align: center }
|
||||
|
||||
hr {
|
||||
width: 75% }
|
||||
|
||||
ol.simple, ul.simple {
|
||||
margin-bottom: 1em }
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal }
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha }
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha }
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman }
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman }
|
||||
|
||||
p.attribution {
|
||||
text-align: right ;
|
||||
margin-left: 50% }
|
||||
|
||||
p.caption {
|
||||
font-style: italic }
|
||||
|
||||
p.credits {
|
||||
font-style: italic ;
|
||||
font-size: smaller }
|
||||
|
||||
p.label {
|
||||
white-space: nowrap }
|
||||
|
||||
p.rubric {
|
||||
font-weight: bold ;
|
||||
font-size: larger ;
|
||||
color: maroon ;
|
||||
text-align: center }
|
||||
|
||||
p.sidebar-title {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold ;
|
||||
font-size: larger }
|
||||
|
||||
p.sidebar-subtitle {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
p.topic-title {
|
||||
font-weight: bold }
|
||||
|
||||
pre.address {
|
||||
margin-bottom: 0 ;
|
||||
margin-top: 0 ;
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
pre.line-block {
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
.literal {
|
||||
color: #333;
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
pre.literal-block, pre.doctest-block {
|
||||
margin-left: 2em ;
|
||||
margin-right: 2em ;
|
||||
padding: 1em;
|
||||
color: #333;
|
||||
background-color: #EEE;}
|
||||
|
||||
span.classifier {
|
||||
font-family: sans-serif ;
|
||||
font-style: oblique }
|
||||
|
||||
span.classifier-delimiter {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
span.interpreted {
|
||||
font-family: sans-serif }
|
||||
|
||||
span.option {
|
||||
white-space: nowrap }
|
||||
|
||||
span.option-argument {
|
||||
font-style: italic }
|
||||
|
||||
span.pre {
|
||||
white-space: pre }
|
||||
|
||||
span.problematic {
|
||||
color: red }
|
||||
|
||||
table {
|
||||
margin-top: 0.5em ;
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
table.citation {
|
||||
border-left: solid thin gray }
|
||||
|
||||
table.docinfo {
|
||||
margin: 2em 4em }
|
||||
|
||||
table.footnote {
|
||||
border-left: solid thin black }
|
||||
|
||||
td, th {
|
||||
padding-left: 0.5em ;
|
||||
padding-right: 0.5em ;
|
||||
vertical-align: top }
|
||||
|
||||
th.docinfo-name, th.field-name {
|
||||
font-weight: bold ;
|
||||
text-align: left ;
|
||||
white-space: nowrap }
|
||||
|
||||
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
||||
font-size: 100% }
|
||||
|
||||
tt {
|
||||
background-color: #eeeeee
|
||||
}
|
||||
|
||||
ul.auto-toc {
|
||||
list-style-type: none }
|
||||
|
||||
.code-block {
|
||||
font-family: Courier New, Courier, monospace;
|
||||
font-size: 14px;
|
||||
margin: 0 2em;
|
||||
padding: 1em;
|
||||
color: #000;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #DDD;
|
||||
}
|
||||
|
||||
/* Python markup *********************************************/
|
||||
/*Python keyword*/
|
||||
.p_word {
|
||||
color: #036;
|
||||
}
|
||||
/*Python identifier*/
|
||||
.p_identifier {
|
||||
color: #36C;
|
||||
}
|
||||
/*Python number*/
|
||||
.p_number {
|
||||
color: #36C;
|
||||
}
|
||||
/*other text*/
|
||||
.p_default {
|
||||
color: #036;
|
||||
}
|
||||
/*Python operator*/
|
||||
.p_operator {
|
||||
color: #036;
|
||||
}
|
||||
/*Python comment*/
|
||||
.p_commentline {
|
||||
color: #036;
|
||||
}
|
||||
/*function name*/
|
||||
.p_defname {
|
||||
color: #F63;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*class name*/
|
||||
.p_classname {
|
||||
color: #F00;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_character {
|
||||
color: green;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_string {
|
||||
color: green;
|
||||
}
|
||||
/*triple-quoted strings*/
|
||||
.p_triple {
|
||||
color: green;
|
||||
}
|
||||
|
||||
/* HTML markup *********************************************/
|
||||
/*an html tag*/
|
||||
.h_tag {
|
||||
color: #36C;
|
||||
}
|
||||
/*text in a tag*/
|
||||
.h_default {
|
||||
color: #036;
|
||||
}
|
||||
/*attribute name*/
|
||||
.h_attribute {
|
||||
color: #6C3;
|
||||
}
|
||||
/*a double-quoted attribute value*/
|
||||
.h_doublestring {
|
||||
color: green;
|
||||
}
|
||||
/*attribute equals sign, for example*/
|
||||
.h_other {
|
||||
color: #036;
|
||||
}
|
||||
|
1
doc/docutils.sh
Executable file
1
doc/docutils.sh
Executable file
|
@ -0,0 +1 @@
|
|||
buildhtml.py --embed --stylesheet default.css --output-encoding iso-8859-1 --prune .svn --prune api/ --prune pydoctor --prune data .
|
3
doc/epydoc.sh
Executable file
3
doc/epydoc.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
cd ../creole
|
||||
epydoc --html --no-private --output ../doc/api .
|
||||
|
57
doc/template.txt
Normal file
57
doc/template.txt
Normal file
|
@ -0,0 +1,57 @@
|
|||
Templates créole
|
||||
================
|
||||
|
||||
comportement des templates
|
||||
--------------------------
|
||||
|
||||
Template_
|
||||
|
||||
.. _Template: api/creole.template.Template-class.html
|
||||
|
||||
validations
|
||||
-----------
|
||||
|
||||
Template.verify_
|
||||
|
||||
.. _Template.verify: api/creole.template.Template-class.html#verify
|
||||
|
||||
|
||||
|
||||
fichiers cibles
|
||||
fichiers modèle qui vont être instanciés au final (fichier destination)
|
||||
|
||||
|
||||
- le fichier source (templatisé) *doit* exister ainsi que le
|
||||
fichier de destination (le fichier de configuration effectif)
|
||||
portant le même nom :
|
||||
|
||||
- le fichier cible, c'est-à-dire le fichier de configuration
|
||||
instancié, doit être présent
|
||||
|
||||
|
||||
>>> import creole
|
||||
>>> from creole.template import Template
|
||||
|
||||
>>> try:
|
||||
... t = Template('nexistepas.txt', templatedir= '/tmp')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /tmp/nexistepas.txt n'existe pas
|
||||
>>>
|
||||
|
||||
.. note:: les deux vérifications (template source et fichier
|
||||
destination) sont faites en même temps
|
||||
|
||||
- le répertoire source *doit* exister
|
||||
|
||||
>>> try:
|
||||
... t = Template('/etc/group', templatedir= '/reperoire/qui/n/existe/pas')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /reperoire/qui/n/existe/pas/group n'existe pas
|
||||
>>>
|
||||
|
41
doc/utils.txt
Normal file
41
doc/utils.txt
Normal file
|
@ -0,0 +1,41 @@
|
|||
utilitaires techniques créole
|
||||
=============================
|
||||
|
||||
utilitaire de tests
|
||||
-------------------
|
||||
|
||||
|
||||
creolecat_
|
||||
|
||||
.. _creolecat: api/creole.creolecat-module.html
|
||||
|
||||
|
||||
un utilitaire de tests est mis à disposition pour ceux qui
|
||||
souhaitent tester leur fichiers de template sans pour autant lancer
|
||||
une instanciation:
|
||||
|
||||
usage::
|
||||
|
||||
creolecat.py -x <path>/eole.xml -o <path>/test.txt testtemplate.tmpl
|
||||
|
||||
testtemplate est le fichier à instancier
|
||||
|
||||
lancer l'option --help pour plus de détails
|
||||
|
||||
utilitaire de conversion
|
||||
------------------------
|
||||
|
||||
conversion dans l'ancien langage de templating (notations *[%*)
|
||||
|
||||
pour ceux qui avaient déjà commencé leur activités de templating pour
|
||||
créole 2 (donc avec une autre notation), un utilitaire de conversion
|
||||
est mis à disposition.
|
||||
Il est dans la lib python creole et s'appelle creole2cheetah_
|
||||
|
||||
.. _creole2cheetah: api/creole.creole2cheetah-module.html
|
||||
|
||||
usage :
|
||||
|
||||
cd creole
|
||||
[creole] ./creole2cheetah.py [nom du fichier source] > [nom du fichier destination]
|
||||
|
201
doc/variables.txt
Normal file
201
doc/variables.txt
Normal file
|
@ -0,0 +1,201 @@
|
|||
Variables créole
|
||||
================
|
||||
|
||||
typeole_
|
||||
|
||||
.. _typeole: api/creole.typeole-module.html
|
||||
|
||||
variable créole
|
||||
|
||||
instance d'un objet type eole, à un nom de variable correspond
|
||||
peut-être plusieurs valeurs
|
||||
|
||||
>>> from creole import typeole
|
||||
>>> var = typeole.EoleVar('mavariable')
|
||||
>>> var.val
|
||||
[]
|
||||
>>> var.set_value('valeur')
|
||||
>>> var.set_value('defaut', default=True)
|
||||
>>> var.val
|
||||
['valeur']
|
||||
>>> var.valdefault
|
||||
['defaut']
|
||||
>>> var.description = 'variable de test'
|
||||
>>> var.description
|
||||
'variable de test'
|
||||
>>>
|
||||
|
||||
il est possible de créer une variable Eole à l'aide
|
||||
d'une factory :
|
||||
|
||||
>>> var2 = typeole.type_factory('string', 'test_string', valeole=["eole"], valdefault=["def"])
|
||||
>>> var2.get_value()
|
||||
['def']
|
||||
>>>
|
||||
|
||||
des vérifications de type sont faites au moment du *set_value()*
|
||||
|
||||
collecte des variables créole
|
||||
-----------------------------
|
||||
|
||||
collecte
|
||||
|
||||
Récupération de variables qui serviront a la constitution du dictionnaire Eole
|
||||
|
||||
Les données du dictionnaire sont collectées à partir de différents fichiers dans un premier format XML.
|
||||
|
||||
sur une machine cible, une collecte des variables eole est faite avec parse_dico_::
|
||||
|
||||
from creole.parsedico import parse_dico
|
||||
parse_dico()
|
||||
|
||||
.. ce test n'est pas lancé car il peut y avoir un dico sur le poste
|
||||
de travail
|
||||
|
||||
.. _parse_dico: api/creole.parsedico-module.html
|
||||
|
||||
Le dictionnaire créole est vide. Pour le remplir, il faut
|
||||
récupérer des données depuis un fichier xml initial::
|
||||
|
||||
my_dict = EoleDict()
|
||||
my_dict.read(join('/etc/eole/','eole.xml'))
|
||||
|
||||
.. TODO: read_string(self, xml_string)
|
||||
|
||||
Utilisation du dictionnaire
|
||||
---------------------------
|
||||
|
||||
dictionnaire
|
||||
|
||||
fichier au format xml contenant :
|
||||
- une liste de fichiers
|
||||
- une liste de variables
|
||||
|
||||
famille
|
||||
|
||||
Il s'agit d'un regroupement de variable utilisé pour la saisie : on parle alors de famille de variables
|
||||
|
||||
groupe
|
||||
|
||||
Il s'agit de variables de type `liste` dont les éléments sont liées aux éléments correspondants des autres
|
||||
eth[2] aura un lien avec netmask[2] et network[2].
|
||||
|
||||
Plutôt que d'utiliser `parsedico`, construisons un dictionnaire creole EoleDict_ :
|
||||
|
||||
>>> from creole import cfgparser
|
||||
>>> from creole import typeole
|
||||
>>>
|
||||
>>> dico = cfgparser.EoleDict()
|
||||
>>> dico.variables['ip_eth'] = typeole.type_factory('string', 'ip_eth', val=['ip0', 'ip1', 'ip2'])
|
||||
>>> dico.variables['nom_etab'] = typeole.type_factory('string', 'nom_etab', val=['etab'])
|
||||
>>> dico.variables['vrai'] = typeole.type_factory('boolean', 'vrai', val=[True])
|
||||
>>> dico.variables['faux'] = typeole.type_factory('string', 'faux', val=['faux'])
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string', 'toto', val=['toto'])
|
||||
|
||||
voici comment accéder aux variables créole
|
||||
|
||||
>>> assert dico.get_value('ip_eth') == ['ip0', 'ip1', 'ip2']
|
||||
>>> assert dico.get_value('nom_etab') == ['etab']
|
||||
|
||||
.. _EoleDict : api/creole.cfgparser.EoleDict-class.html
|
||||
|
||||
|
||||
variables de template
|
||||
-----------------------
|
||||
|
||||
|
||||
lorsqu'on utilise un appel de bas niveau de traitement de template,
|
||||
c'est-à-dire l'appel direct à la
|
||||
méthode process_ d'un template, il faut vérifier qu'une variable
|
||||
est bien instanciée avec le bon contexte de dictionnaire :
|
||||
|
||||
.. _process: api/creole.template.Template-class.html
|
||||
|
||||
>>> from creole.cfgparser import EoleDict
|
||||
>>> from creole import typeole
|
||||
>>> from creole.template import Template
|
||||
>>> dico = EoleDict()
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string',
|
||||
... 'toto', val=['toto'], context=dico)
|
||||
>>> t = Template('data/dst/test.tmpl', templatedir= 'data/src')
|
||||
>>> t.verify()
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' not in res
|
||||
>>> dico.set_value('toto', 'titi')
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' in res
|
||||
|
||||
le contexte `dico` est passé à la variable `toto`::
|
||||
|
||||
dico.variables['toto'] = typeole.type_factory('string',
|
||||
'toto', val=['toto'], context=dico)
|
||||
|
||||
variables automatiques
|
||||
----------------------
|
||||
|
||||
variable automatique
|
||||
|
||||
variable présente dans le dictionnaire xml mais pas dans le fichier *.ini* de configuration.
|
||||
la valeur de cette variable (sont appel à *.get_value()* est soumis à une fonction de traitement
|
||||
spécifiée dans le xml, qui calcule la variable au lieu de formater sa valeur.
|
||||
|
||||
Une variable automatique simple n'est pas traitée différemment d'une variable dont la valeur est présente dans le dictionnaire et qui est soumise à une condition de vérification de sa valeur. Simplement, aucune vérification n'est effectuée et la valeur est calculée.
|
||||
|
||||
déclaration de la variable::
|
||||
|
||||
<variable name='server_mem' type='string' description='memoire du serveur' auto='True' />
|
||||
|
||||
déclaration de la fonction de remplissage::
|
||||
|
||||
<fill name='server_mem' target='server_mem' />
|
||||
|
||||
deux fonctions strictement automatiques sont implémentées: `server_mem` et `kernel_version`
|
||||
|
||||
variable semi-automatique
|
||||
|
||||
variable remplit automatiquement dans le cas d'une condition sur une autre variable,
|
||||
si cette condition n'est pas remplie, c'est l'uilisateur qui la remplit (ou une autre fonction).
|
||||
la condition est traitée à deux niveaux, dans la fonction de remplissage, et au niveau de l'affichage.
|
||||
On remplit donc deux fonctions pour ce conditionnement (une fonction fill avec la variable
|
||||
conditionnante en paramètre et une fonction condition qui conditionne l'affichage de la variable.
|
||||
exemple : récupération des adresses eth dans le cas où l'on a pas de dhcp.
|
||||
|
||||
déclaration de la variable semi-auto::
|
||||
|
||||
<variable name='eth0' type='string' auto='True'/>
|
||||
|
||||
déclaration de la variable qui définit la condition::
|
||||
|
||||
<variable name='dhcp' type='boolean' description='Activation du dhcp' >
|
||||
<value>non</value>
|
||||
</variable>
|
||||
<check name='valid_enum' target='dhc'>
|
||||
<param>['oui','non']</param>
|
||||
</check>
|
||||
|
||||
déclaration de la fonction de contrôle d'automatisme, la variable eth0 est remplie automatiquement par la fonction
|
||||
*auto_eth* si le paramètre dhcp est égal à la condition *oui*::
|
||||
|
||||
<fill name='auto_eth' target='eth0'>
|
||||
<param>eth0</param>
|
||||
<param name='condition'>oui</param>
|
||||
<param type='eole' name='parametre'>dhcp</param>
|
||||
</fill>
|
||||
|
||||
déclaration de la fonction de contrôle d'éditabilité::
|
||||
|
||||
<condition name='hidden_if_in' source='dhc'>
|
||||
<param>oui</param>
|
||||
<target type='variable'>eth0</target>
|
||||
</condition>
|
||||
|
||||
pour l'instant sont diposnible auto_eth, auto_netmask, auto_broadcast et auto_network.
|
||||
|
||||
|
||||
|
8
doc/xml.txt
Normal file
8
doc/xml.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
le fichier de configuration créole
|
||||
==================================
|
||||
|
||||
format xml
|
||||
----------
|
||||
|
||||
Pour plus de documentation sur le format xml du dictionnaire créole,
|
||||
se référer à la documentation l'éditeur xml créole ( *exc*)
|
187
en.man8/Maj-Auto.8
Normal file
187
en.man8/Maj-Auto.8
Normal file
|
@ -0,0 +1,187 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Auto command
|
||||
.\"
|
||||
.TH Maj-Auto 8 "2016 september" "Maj-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Maj-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from security updates and critical bugfixes, but not the latest improvements.
|
||||
.br
|
||||
To take advantage of feature additions to the current version of the server, use the \fBMaj-Release\fR.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fR command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Auto.8 2.5.2
|
52
en.man8/Maj-Release.8
Normal file
52
en.man8/Maj-Release.8
Normal file
|
@ -0,0 +1,52 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Maj-Release 8 "2015 december" "Maj-Release 2.5.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Release \- Automatic release update for EOLE servers
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Release
|
||||
command allows you to manually initiate the release update to the last stables releases.
|
||||
.br
|
||||
It is not an upgrade to a new version.
|
||||
.br
|
||||
You benefit from the latest improvements for the actual server version as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B Upgrade-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Release.8 2.5.0
|
185
en.man8/Query-Auto.8
Normal file
185
en.man8/Query-Auto.8
Normal file
|
@ -0,0 +1,185 @@
|
|||
.\"
|
||||
.\" Manual page for Query-Auto command
|
||||
.\"
|
||||
.TH Query-Auto 8 "2015 september" "Query-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Query-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Query-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Query-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from the latest improvements , updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Query-Auto.8 2.5.2
|
48
en.man8/Upgrade-Auto.8
Normal file
48
en.man8/Upgrade-Auto.8
Normal file
|
@ -0,0 +1,48 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Upgrade-Auto 8 "2015 december" "Version 2.4.2" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Upgrade-Auto \- EOLE distribution upgrade tool
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Upgrade-Auto
|
||||
command allows you to manually initiate a module upgrade to the lastest stables versions.
|
||||
.br
|
||||
You benefit from the latest improvements as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Upgrade-Auto.8 2.4.2
|
73
en.man8/maj-auto.8
Normal file
73
en.man8/maj-auto.8
Normal file
|
@ -0,0 +1,73 @@
|
|||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.4.
|
||||
.TH MAJ-AUTO "1" "October 2014" "Maj-Auto 2.4.1" "User Commands"
|
||||
.SH NAME
|
||||
Maj-Auto \- manual page for Maj-Auto 2.4.1
|
||||
.SH DESCRIPTION
|
||||
usage: Maj\-Auto|Query\-Auto [\-h] [\-c CONTAINER]
|
||||
.IP
|
||||
[\-l {debug,info,warning,error,critical}] [\-v] [\-d]
|
||||
[\-n] [\-f] [\-C | \fB\-D]\fR [\-r] [\-R] [\-\-download]
|
||||
[\-S EOLE_MIRROR] [\-U UBUNTU_MIRROR] [\-W]
|
||||
.PP
|
||||
Manage EOLE server automatic update
|
||||
.SS "optional arguments:"
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using QueryAuto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-C\fR, \fB\-\-candidat\fR
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR, \fB\-\-devel\fR
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.SS "container:"
|
||||
.TP
|
||||
\fB\-c\fR CONTAINER, \fB\-\-container\fR CONTAINER
|
||||
Name of LXC container
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode
|
||||
.SH "SEE ALSO"
|
||||
The full documentation for
|
||||
.B Maj-Auto
|
||||
is maintained as a Texinfo manual. If the
|
||||
.B info
|
||||
and
|
||||
.B Maj-Auto
|
||||
programs are properly installed at your site, the command
|
||||
.IP
|
||||
.B info Maj-Auto
|
||||
.PP
|
||||
should give you access to the complete manual.
|
1
en.man8/upgrade-auto.8
Symbolic link
1
en.man8/upgrade-auto.8
Symbolic link
|
@ -0,0 +1 @@
|
|||
Upgrade-Auto.8
|
200
eole.mk
Normal file
200
eole.mk
Normal file
|
@ -0,0 +1,200 @@
|
|||
#
|
||||
# NE PAS EDITER CE FICHIER
|
||||
#
|
||||
# Utiliser <appli>.mk à inclure à la fin de Makefile
|
||||
|
||||
#################
|
||||
# Sanity checks #
|
||||
#################
|
||||
|
||||
ifeq (, $(DESTDIR))
|
||||
$(warning $$(DESTDIR) is empty, installation will be done in /)
|
||||
endif
|
||||
|
||||
ifeq (, $(filter-out XXX-XXX, $(strip $(SOURCE))))
|
||||
$(error $$(SOURCE) variable has incorrect value '$(SOURCE)')
|
||||
endif
|
||||
|
||||
#########################
|
||||
# Variables definitions #
|
||||
#########################
|
||||
|
||||
INSTALL := install
|
||||
INSTALL_DATA := install -m 644
|
||||
INSTALL_PROGRAM := install -m 755
|
||||
INSTALL_DIRECTORY := install -m 755 -d
|
||||
INSTALL_RECURSIVE := cp -dr --no-preserve=ownership
|
||||
|
||||
# Base
|
||||
eole_DIR := $(DESTDIR)/usr/share/eole
|
||||
|
||||
ifeq ($(strip $(EOLE_VERSION)), 2.3)
|
||||
diagnose_PROG_DIR := $(eole_DIR)/diagnose/module
|
||||
else
|
||||
diagnose_PROG_DIR := $(eole_DIR)/diagnose/
|
||||
endif
|
||||
|
||||
# Creole
|
||||
creole_DIR := $(eole_DIR)/creole
|
||||
dicos_DATA_DIR := $(creole_DIR)/dicos
|
||||
tmpl_DATA_DIR := $(creole_DIR)/distrib
|
||||
pretemplate_PROG_DIR := $(eole_DIR)/pretemplate
|
||||
posttemplate_PROG_DIR := $(eole_DIR)/posttemplate
|
||||
postservice_PROG_DIR := $(eole_DIR)/postservice
|
||||
firewall_DATA_DIR := $(eole_DIR)/firewall
|
||||
bareos_restore_DATA_DIR := $(eole_DIR)/bareos/restore
|
||||
bareos_fichier_DATA_DIR := $(DESTDIR)/etc/bareos/bareosfichiers.d
|
||||
schedule_scripts_PROG_DIR := $(eole_DIR)/schedule/scripts
|
||||
extra_REC_DIR := $(creole_DIR)/extra
|
||||
|
||||
# Zéphir
|
||||
zephir_DATA_DIR := $(DESTDIR)/usr/share/zephir
|
||||
zephir_configs_DATA_DIR := $(zephir_DATA_DIR)/monitor/configs
|
||||
zephir_srv_DATA_DIR := $(zephir_configs_DATA_DIR)/services
|
||||
|
||||
# SSO
|
||||
sso_DATA_DIR := $(DESTDIR)/usr/share/sso
|
||||
sso_filtres_DATA_DIR := $(sso_DATA_DIR)/app_filters
|
||||
sso_user-info_DATA_DIR := $(sso_DATA_DIR)/user_infos
|
||||
|
||||
# EAD
|
||||
ead_DATA_DIR := $(DESTDIR)/usr/share/ead2/backend/config
|
||||
ead_actions_DATA_DIR := $(ead_DATA_DIR)/actions
|
||||
ead_perms_DATA_DIR := $(ead_DATA_DIR)/perms
|
||||
ead_roles_DATA_DIR := $(ead_DATA_DIR)/roles
|
||||
|
||||
# Program libraries goes under /usr/lib/<PROGRAM>/
|
||||
lib_$(SOURCE)_DATA_DIR := $(DESTDIR)/usr/lib/$(SOURCE)
|
||||
|
||||
# Scripts Eole
|
||||
scripts_PROG_DIR := $(eole_DIR)/sbin
|
||||
lib_eole_DATA_DIR := $(DESTDIR)/usr/lib/eole
|
||||
|
||||
# LDAP
|
||||
ldap_passwords_DATA_DIR := $(eole_DIR)/annuaire/password_files
|
||||
|
||||
# LXC
|
||||
lxc_DATA_DIR := $(eole_DIR)/lxc
|
||||
lxc_fstab_DATA_DIR := $(lxc_DATA_DIR)/fstab
|
||||
lxc_hosts_DATA_DIR := $(lxc_DATA_DIR)/hosts
|
||||
|
||||
# SQL
|
||||
sql_DATA_DIR := $(eole_DIR)/mysql/$(SOURCE)
|
||||
sql_gen_DATA_DIR := $(sql_DATA_DIR)/gen
|
||||
sql_updates_DATA_DIR := $(sql_DATA_DIR)/updates
|
||||
|
||||
sql_conf_gen_DATA_DIR := $(eole_DIR)/applications/gen
|
||||
sql_conf_passwords_DATA_DIR := $(eole_DIR)/applications/passwords
|
||||
sql_conf_updates_DATA_DIR := $(eole_DIR)/applications/updates/$(SOURCE)
|
||||
|
||||
# Certifs
|
||||
certs_DATA_DIR := $(eole_DIR)/certs
|
||||
|
||||
# Logrotate
|
||||
logrotate_DATA_DIR := $(DESTDIR)/etc/logrotate.d
|
||||
|
||||
|
||||
# Python modules
|
||||
ifneq ($(DESTDIR),)
|
||||
PYTHON_OPTS := --root $(DESTDIR)
|
||||
endif
|
||||
|
||||
# Translation
|
||||
TRANSLATION_SRC := translation
|
||||
TRANSLATION_DEST := $(DESTDIR)/usr/share/locale
|
||||
PO_FILES = $(wildcard $(TRANSLATION_SRC)/*/*.po)
|
||||
MO_FOLDERS = $(addprefix $(TRANSLATION_DEST), $(addsuffix LC_MESSAGES,$(subst $(TRANSLATION_SRC),,$(dir $(PO_FILES)))))
|
||||
|
||||
#############################################
|
||||
# Common directories and files installation #
|
||||
#############################################
|
||||
|
||||
all:
|
||||
|
||||
$(MO_FOLDERS):
|
||||
$(INSTALL_DIRECTORY) $@
|
||||
|
||||
$(PO_FILES): $(MO_FOLDERS)
|
||||
msgfmt -o $(TRANSLATION_DEST)$(subst $(TRANSLATION_SRC),,$(addsuffix LC_MESSAGES,$(dir $@)))/$(notdir $(@:.po=.mo)) $@
|
||||
|
||||
install-lang: $(PO_FILES)
|
||||
|
||||
install:: install-dirs install-files install-lang
|
||||
|
||||
# $1 = command to run
|
||||
# $2 = source directory
|
||||
# $3 = destination directory
|
||||
define fc_install_file
|
||||
if [ -d $2 ]; then \
|
||||
for file in `ls -1 $2/`; do \
|
||||
$1 $2/$$file $3 || true; \
|
||||
done; \
|
||||
fi
|
||||
endef
|
||||
|
||||
##
|
||||
## Directory creation
|
||||
##
|
||||
|
||||
# use % to catch local name in $*
|
||||
# data, program and recursive directory require a corresponding
|
||||
# directory in local sources
|
||||
%_DATA_DIR %_PROG_DIR %REC_DIR:
|
||||
test ! -d $(subst _,/,$*) || $(INSTALL_DIRECTORY) $($@)
|
||||
|
||||
# Create the directory referenced by the variable without a local one.
|
||||
%_DIR:
|
||||
$(INSTALL_DIRECTORY) $($@)
|
||||
|
||||
##
|
||||
## Install files present directly under data, program and recursive directories
|
||||
##
|
||||
|
||||
# $* : name of variable
|
||||
# $($*): value of variable
|
||||
%-instdata:
|
||||
$(call fc_install_file, $(INSTALL_DATA), $(subst _,/,$(subst _DATA_DIR,,$*)), $($*))
|
||||
|
||||
%-instprog:
|
||||
$(call fc_install_file, $(INSTALL_PROGRAM), $(subst _,/,$(subst _PROG_DIR,,$*)), $($*))
|
||||
|
||||
%-instrec:
|
||||
$(call fc_install_file, $(INSTALL_RECURSIVE), $(subst _,/,$(subst _REC_DIR,,$*)), $($*))
|
||||
|
||||
|
||||
# Use second expansion as variables may be created in included
|
||||
# Makefiles
|
||||
.SECONDEXPANSION:
|
||||
|
||||
# List of all directories
|
||||
installdirs_LIST = $(foreach V, $(filter %_DIR, $(.VARIABLES)), \
|
||||
$(if $(filter file, $(origin $(V))), \
|
||||
$(V)))
|
||||
# List of data directories
|
||||
installdata_LIST = $(filter %_DATA_DIR, $(installdirs_LIST))
|
||||
# List of program directories
|
||||
installprog_LIST = $(filter %_PROG_DIR, $(installdirs_LIST))
|
||||
# List of recursive directories
|
||||
installrec_LIST = $(filter %_REC_DIR, $(installdirs_LIST))
|
||||
|
||||
# Expand directories to create as dependency
|
||||
# Use double-colon to permit user to define additionnal install-dirs
|
||||
install-dirs:: $$(installdirs_LIST)
|
||||
|
||||
# Expand files to install as dependency
|
||||
# Use double-colon to permit user to define additionnal install-files
|
||||
install-files:: install-data-files install-prog-files install-rec-dirs
|
||||
|
||||
install-data-files: $$(patsubst %,%-instdata,$$(installdata_LIST))
|
||||
|
||||
install-prog-files: $$(patsubst %,%-instprog,$$(installprog_LIST))
|
||||
|
||||
install-rec-dirs: $$(patsubst %,%-instrec,$$(installrec_LIST))
|
||||
|
||||
# Installation of python modules
|
||||
ifeq ($(shell test -f setup.py && echo 0), 0)
|
||||
install-files::
|
||||
python3 setup.py install --no-compile --install-layout=deb $(PYTHON_OPTS)
|
||||
endif
|
||||
|
||||
.PHONY: install install-dirs install-files install-data-files install-prog-files install-rec-dirs
|
64
extra/schedule/00_schedule.xml
Normal file
64
extra/schedule/00_schedule.xml
Normal file
|
@ -0,0 +1,64 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<family_action name="Tâches planifiées"
|
||||
description="Gestion des tâches planifiées"
|
||||
color="#8cd98c"
|
||||
image="icons/appointment-new.svg">
|
||||
<action type="form"
|
||||
title="Tâches planifiées"
|
||||
save="True"
|
||||
description="Paramétrer les tâches planifiées (heure, jour)"
|
||||
image="icons/x-office-calendar.svg">
|
||||
<input>Programmer</input>
|
||||
<profile>ead_admin</profile>
|
||||
<ewtapp>ead</ewtapp>
|
||||
<tag>maj</tag>
|
||||
<tag>schedule</tag>
|
||||
</action>
|
||||
</family_action>
|
||||
<variables>
|
||||
<family name="schedule" description="Heure et jour d'exécution des tâches planifiées">
|
||||
<variable description="Heure" name='hour' type='number' auto_save='True'/>
|
||||
<variable description="Minute" name='minute' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches hebdomadaires (1 : lundi)" name='weekday' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches mensuelles la première semaine du mois (1 : lundi)" name='monthday' type='number' auto_save='True'/>
|
||||
</family>
|
||||
</variables>
|
||||
<constraints>
|
||||
<check name='valid_enum' target='schedule.schedule.weekday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.monthday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.hour'>
|
||||
<param>[1, 2, 3, 4, 5]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.minute'>
|
||||
<param type='python'>range(0, 60)</param>
|
||||
</check>
|
||||
<fill name="random_int" target='schedule.schedule.hour'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>5</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.minute'>
|
||||
<param type='number'>0</param>
|
||||
<param type='number'>59</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.weekday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.monthday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
<param name='exclude' type='eole'>schedule.schedule.weekday</param>
|
||||
</fill>
|
||||
<check name='valid_differ' target='schedule.schedule.monthday'>
|
||||
<param type='eole'>schedule.schedule.weekday</param>
|
||||
</check>
|
||||
</constraints>
|
||||
<help/>
|
||||
</creole>
|
||||
|
12
extra/schedule/01_majauto.xml
Normal file
12
extra/schedule/01_majauto.xml
Normal file
|
@ -0,0 +1,12 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='majauto'
|
||||
description="Mise à jour automatique">
|
||||
<variable name="description" type="string" hidden="True"><value>Mise à jour du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>weekly</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
13
extra/schedule/02_rebootauto.xml
Normal file
13
extra/schedule/02_rebootauto.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='z_rebootauto'
|
||||
description="Redémarrage automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Redémarrage du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
13
extra/schedule/03_shutdownauto.xml
Normal file
13
extra/schedule/03_shutdownauto.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="z_shutdownauto"
|
||||
description="Arrêt automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Arrêt du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
13
extra/schedule/04_reconfigureauto.xml
Normal file
13
extra/schedule/04_reconfigureauto.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="y_reconfigureauto"
|
||||
description="Reconfigure automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Reconfigure du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
3
extra/schedule/sls/eole/cron.sls
Normal file
3
extra/schedule/sls/eole/cron.sls
Normal file
|
@ -0,0 +1,3 @@
|
|||
cron:
|
||||
eole.file:
|
||||
- name: /etc/cron.d/schedule
|
3
extra/schedule/sls/eole/init.sls
Normal file
3
extra/schedule/sls/eole/init.sls
Normal file
|
@ -0,0 +1,3 @@
|
|||
include:
|
||||
- schedule.cron
|
||||
- schedule.manage
|
3
extra/schedule/sls/eole/manage.sls
Normal file
3
extra/schedule/sls/eole/manage.sls
Normal file
|
@ -0,0 +1,3 @@
|
|||
schedule:
|
||||
cmd.run:
|
||||
- name: /usr/share/eole/sbin/manage_schedule --apply
|
109
fr.man8/CreoleGet.8
Normal file
109
fr.man8/CreoleGet.8
Normal file
|
@ -0,0 +1,109 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleGet.
|
||||
.\"
|
||||
.TH CreoleGet 8 "04 Avril 2017" "Version 2.6.1" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleGet \- Récupération de la valeur d'une variable Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B VARIABLE
|
||||
[
|
||||
.B DEFAULT
|
||||
]
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --groups
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --list
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload-eol
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
.B \-h
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleGet
|
||||
est un utilitaire très pratique pour récupérer la valeur d'une
|
||||
variable Creole
|
||||
|
||||
.SH ARGUMENTS
|
||||
.TP
|
||||
\fBVARIABLE\fP
|
||||
nom de la variable à lire
|
||||
.TP
|
||||
\fBDEFAULT\fP
|
||||
valeur à renvoyer en cas d'erreur (variable inconnue ou désactivée)
|
||||
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-d\fP
|
||||
active le mode de débogage
|
||||
.TP
|
||||
\fB-l\fP
|
||||
paramètrage du niveau de log (debug|info|warning|error|critical)
|
||||
.TP
|
||||
\fB-v\fP
|
||||
active le mode verbeux
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH ACTIONS
|
||||
.TP
|
||||
\fB--groups\fP
|
||||
liste les groupes de conteneurs
|
||||
|
||||
.TP
|
||||
\fB--list\fP
|
||||
liste l'ensemble des variables creole
|
||||
|
||||
.TP
|
||||
\fB--reload\fP
|
||||
recharge toute la configuration creole (dictionnaires et valeurs)
|
||||
|
||||
.TP
|
||||
\fB--reload-eol\fP
|
||||
recharge les valeurs de configuration creole
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
|
57
fr.man8/CreoleLint.8
Normal file
57
fr.man8/CreoleLint.8
Normal file
|
@ -0,0 +1,57 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleLint.
|
||||
.\"
|
||||
.TH CreoleLint 8 "11 octobre 2013" "Version 2.4" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleLint \- Outil de validation des dictionnaires et templates Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleLint
|
||||
[
|
||||
.B \-t TMPL DIR
|
||||
] [
|
||||
.B \-l info|warning|error
|
||||
] [
|
||||
.B \-n LINT_NAME
|
||||
] [
|
||||
.B \-d
|
||||
] [
|
||||
.B \-h
|
||||
]
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleLint
|
||||
est un utilitaire très pratique pour valider la syntaxe du dictionnaire et des templates. L'outil effectue une série de tests dans le but de détecter les erreurs les plus fréquentes.
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-t\fP
|
||||
répertoire des templates
|
||||
.TP
|
||||
\fB-l\fP
|
||||
niveau des messages (info, warning ou error)
|
||||
.TP
|
||||
\fB-n\fP
|
||||
n'exécuter qu'un lint
|
||||
.TP
|
||||
\fB-d\fP
|
||||
dico-only, ne lance le lint que sur les dictionnaires (et pas sur les templates, donc)
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
.\" Maj-Cd.8 1.0
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue