for creole's zephir2 branch
This commit is contained in:
commit
841643e76e
|
@ -0,0 +1,4 @@
|
|||
# Backup and swap files
|
||||
*~
|
||||
*#
|
||||
*.swp
|
|
@ -0,0 +1,24 @@
|
|||
################################
|
||||
# Makefile pour creole
|
||||
################################
|
||||
|
||||
SOURCE=creole
|
||||
EOLE_VERSION=2.7
|
||||
EOLE_RELEASE=2.7.0
|
||||
|
||||
################################
|
||||
# Début de zone à ne pas éditer
|
||||
################################
|
||||
|
||||
include eole.mk
|
||||
include apps.mk
|
||||
|
||||
################################
|
||||
# Fin de zone à ne pas éditer
|
||||
################################
|
||||
|
||||
# Makefile rules dedicated to application
|
||||
# if exists
|
||||
ifneq (, $(strip $(wildcard $(SOURCE).mk)))
|
||||
include $(SOURCE).mk
|
||||
endif
|
|
@ -0,0 +1,64 @@
|
|||
#
|
||||
# NE PAS EDITER CE FICHIER
|
||||
#
|
||||
# Voir Makefile
|
||||
|
||||
|
||||
##########################
|
||||
# Application web envole #
|
||||
##########################
|
||||
ifneq (, $(filter oui web, $(PKGAPPS)))
|
||||
#
|
||||
# Sanity check
|
||||
#
|
||||
ifeq (, $(filter-out X.X, $(strip $(VERSION))))
|
||||
$(error $$(VERSION) variable has incorrect value '$(VERSION)')
|
||||
endif
|
||||
|
||||
# Where to store web application files
|
||||
WEB_PATH := $(DESTDIR)/var/www/html
|
||||
|
||||
# Envole
|
||||
sharenvole_PROG_DIR := $(DESTDIR)/usr/share/envole/$(SOURCE)
|
||||
|
||||
src_$(SOURCE)-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)
|
||||
src_plugins-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)/plugin
|
||||
src_lang-$(VERSION)_REC_DIR := $(WEB_PATH)/$(SOURCE)/lang
|
||||
|
||||
endif
|
||||
|
||||
##########################
|
||||
# Application EOLE flask #
|
||||
##########################
|
||||
ifneq (, $(filter flask, $(PKGAPPS)))
|
||||
#
|
||||
# Sanity check
|
||||
#
|
||||
ifeq (, $(filter-out XXX, $(strip $(FLASK_MODULE))))
|
||||
$(error $$(FLASK_MODULE) variable has incorrect value '$(FLASK_MODULE)')
|
||||
endif
|
||||
|
||||
ifeq (, $(strip $(wildcard src/$(FLASK_MODULE).conf)))
|
||||
$(error missing eoleflask configuration file 'src/$(FLASK_MODULE).conf')
|
||||
endif
|
||||
|
||||
# Everything is related to mount point
|
||||
APPS_MOUNT_POINT := $(shell sed -ne 's|^"MOUNT_POINT"[[:space:]]*:[[:space:]]*"/\([^"]*\)",|\1|p' \
|
||||
src/$(FLASK_MODULE).conf)
|
||||
|
||||
ifeq (, $(strip $(APPS_MOUNT_POINT)))
|
||||
$(error no "MOUNT_POINT" in eoleflask configuration file 'src/$(FLASK_MODULE).conf')
|
||||
endif
|
||||
|
||||
# eole-flask configuration
|
||||
src_DATA_DIR := $(DESTDIR)/etc/eole/flask/available
|
||||
|
||||
# Where to store flask application files
|
||||
FLASK_PATH := $(eole_DIR)/flask/$(APPS_MOUNT_POINT)
|
||||
|
||||
# static files
|
||||
src_$(FLASK_MODULE)_static_REC_DIR := $(FLASK_PATH)/static
|
||||
src_$(FLASK_MODULE)_templates_REC_DIR := $(FLASK_PATH)/templates
|
||||
src_$(FLASK_MODULE)_instance_REC_DIR := $(FLASK_PATH)/resources
|
||||
|
||||
endif
|
|
@ -0,0 +1,153 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Run templatisation on a template name or file
|
||||
|
||||
`CreoleCat` support two modes:
|
||||
|
||||
- run on a template name with option -t: the name is looked up in
|
||||
``/usr/share/eole/creole/distrib/``. The output files are
|
||||
calculated unless you explicitely specify ``-o``.
|
||||
|
||||
- run on a file with options -s: this mode requires the use of
|
||||
``-o`` option.
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from os.path import basename, join, split
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
|
||||
from creole.template import CreoleTemplateEngine
|
||||
import creole.config as cfg
|
||||
from creole.client import CreoleClient, CreoleClientError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Instancie un template creole",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging()])
|
||||
parser.add_argument("-t", "--template", metavar="NAME",
|
||||
help=u"nom du fichier template creole présent "
|
||||
"dans /usr/share/eole/creole/distrib")
|
||||
parser.add_argument("-s", "--source", metavar="PATH",
|
||||
help=u"chemin d’un fichier template")
|
||||
parser.add_argument("-o", "--output", metavar="OUTPUTFILE",
|
||||
help=u"chemin du fichier généré")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (opts.template is None and opts.source is None) \
|
||||
or (opts.template and opts.source):
|
||||
parser.error("Vous devez spécifier une des options"
|
||||
"'--template' ou '--source'.")
|
||||
|
||||
if opts.source is not None and not os.access(opts.source, os.F_OK):
|
||||
parser.error("Fichier source inexistant"
|
||||
" ou illisible: {0}".format(opts.source))
|
||||
|
||||
if opts.output is None:
|
||||
if opts.source is not None:
|
||||
opts.output = ""
|
||||
else:
|
||||
if opts.template is not None \
|
||||
and opts.output == join(cfg.distrib_dir, opts.template):
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier template: {0}".format(opts.output) )
|
||||
if opts.source is not None and opts.output == opts.source:
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier source: {0}".format(opts.output) )
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def _find_file(name, ctx):
|
||||
candidates = client.to_grouped_lists(ctx['files'], keyname='source')
|
||||
for source, filevar in candidates.items():
|
||||
if name != basename(source):
|
||||
continue
|
||||
elif filevar[0].get('activate', False):
|
||||
return filevar[0]
|
||||
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
options = parse_cmdline()
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
|
||||
engine = CreoleTemplateEngine()
|
||||
|
||||
filevar = { 'source': options.source,
|
||||
'name': options.output,
|
||||
'full_name': options.output,
|
||||
'activate' : True,
|
||||
'del_comment': u'',
|
||||
'mkdir' : False,
|
||||
'rm' : False,
|
||||
}
|
||||
|
||||
if options.container is not None:
|
||||
# force container context
|
||||
groups = [client.get_container_infos(options.container)]
|
||||
elif options.output is not None:
|
||||
# Source without container, for root context
|
||||
groups = [client.get_container_infos('root')]
|
||||
else:
|
||||
groups = []
|
||||
for group in client.get_groups():
|
||||
groups.append(client.get_group_infos(group))
|
||||
|
||||
instanciated_files = []
|
||||
for group in groups:
|
||||
if filevar['source'] is not None:
|
||||
instanciated_files.append(filevar)
|
||||
engine.process(filevar, group)
|
||||
elif options.template is not None:
|
||||
found_file = _find_file(options.template, group)
|
||||
if found_file:
|
||||
instanciated_files.append(found_file)
|
||||
if options.output is None:
|
||||
engine._instance_file(found_file, group)
|
||||
else:
|
||||
# Override output
|
||||
found_file['name'] = options.output
|
||||
found_file['full_name'] = options.output
|
||||
# Do not get through verify and
|
||||
# change_properties
|
||||
engine._copy_to_template_dir(found_file)
|
||||
engine.process(found_file, group)
|
||||
|
||||
if not instanciated_files:
|
||||
# No file get instanciated
|
||||
raise CreoleClientError("Fichier template inexistant:"
|
||||
" {0}".format(options.template))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Get a creole variable value.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from creole.client import CreoleClient
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.encode import normalize
|
||||
|
||||
_RETURN_VALUES = u"""Multiple values are separated with NEWLINE character '\\n',
|
||||
or SPACE character if several variables are displayed."""
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Get creole variable",
|
||||
epilog=_RETURN_VALUES,
|
||||
parents=[scriptargs.logging()])
|
||||
|
||||
parser.add_argument('variable', nargs='?',
|
||||
help=u"Nom de variable creole")
|
||||
parser.add_argument('default', nargs='?',
|
||||
help=u"Valeur par défaut si la variable n’existe pas")
|
||||
|
||||
incompatible_options = parser.add_mutually_exclusive_group()
|
||||
|
||||
incompatible_options.add_argument('--groups', action="store_true", default=False,
|
||||
help=u"Liste les groupes de conteneurs")
|
||||
|
||||
incompatible_options.add_argument('--list', action="store_true", default=False,
|
||||
help=u"Liste l'ensemble des variables creole")
|
||||
|
||||
incompatible_options.add_argument('--reload', action="store_true", default=False,
|
||||
help=u"Recharge toute la configuration creole")
|
||||
|
||||
incompatible_options.add_argument('--reload-eol', action="store_true", default=False,
|
||||
help=u"Recharge les valeurs de configuration creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
def output(value, strip_master=False):
|
||||
"""
|
||||
formatage de l'affichage
|
||||
"""
|
||||
if isinstance(value, list):
|
||||
#FIXME: ['val1', None, 'val2']
|
||||
for val in value:
|
||||
if isinstance(val, dict):
|
||||
sys.stderr.write(u'{}\n'.format(val['err']))
|
||||
else:
|
||||
sys.stdout.write(u'{}\n'.format(val))
|
||||
elif isinstance(value, dict):
|
||||
# in case several keys/values are returned
|
||||
list_keys = value.keys()
|
||||
list_keys.sort()
|
||||
for var in list_keys:
|
||||
values = value[var]
|
||||
if isinstance(values, list):
|
||||
values_ = u''
|
||||
for val in values:
|
||||
if val and not isinstance(val, dict):
|
||||
values_ += u" {}".format(val)
|
||||
values = values_
|
||||
elif values is None:
|
||||
values = u''
|
||||
else:
|
||||
values = u'{}'.format(values)
|
||||
if strip_master:
|
||||
varname = var.split('.')[-1]
|
||||
else:
|
||||
varname = var
|
||||
sys.stdout.write(u'{}="{}"\n'.format(varname, values.strip()))
|
||||
elif value is None or value == u'':
|
||||
sys.stdout.write(u'\n')
|
||||
else:
|
||||
sys.stdout.write(u'{0}\n'.format(value))
|
||||
#return ret.rstrip('\n')
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
client = CreoleClient()
|
||||
var = options.variable
|
||||
if options.groups:
|
||||
output(client.get_groups())
|
||||
elif options.list:
|
||||
output(client.get_creole(), True)
|
||||
elif options.reload:
|
||||
client.reload_config()
|
||||
elif options.reload_eol:
|
||||
client.reload_eol()
|
||||
elif not var:
|
||||
raise Exception(u"Veuillez spécifier un nom de variable Creole")
|
||||
else:
|
||||
if options.default is not None:
|
||||
kwargs = {'default':options.default}
|
||||
else:
|
||||
kwargs = {}
|
||||
if '.' in var:
|
||||
output(client.get(var))
|
||||
else:
|
||||
output(client.get_creole(var, **kwargs))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(normalize(err), exc_info=True)
|
||||
else:
|
||||
log.error(normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
#Fix #18701
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('UTF8')
|
||||
main()
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
#! /usr/bin/python
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from creole.lint.creolelint import validate
|
||||
from creole.lint.ansiwriter import AnsiWriter
|
||||
|
||||
def parse_cmdline():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option("-t", "--template", dest="tmpl",
|
||||
default=None, help="nom du template Creole")
|
||||
parser.add_option("-l", "--level", dest="writelevel", default='warning',
|
||||
help="level d'affichage des messages")
|
||||
parser.add_option("-n", "--name", dest="name",
|
||||
default=None, help="nom du lint a tester")
|
||||
parser.add_option("-d", "--dico-only", action="store_true",
|
||||
dest="only_on_dico",
|
||||
default=False, help="lint uniquement sur les dicos")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
options, args = parse_cmdline()
|
||||
tmpl = options.tmpl
|
||||
writelevel = options.writelevel
|
||||
|
||||
#if options.writelevel not in errorlevel.values():
|
||||
# raise Exception('Niveau %s inconnu'% options.writelevel)
|
||||
only_on_template = False
|
||||
only_on_dico = options.only_on_dico
|
||||
if tmpl is not None:
|
||||
only_on_template = True
|
||||
if options.name:
|
||||
keywords = [options.name]
|
||||
writelevel = 'info'
|
||||
else:
|
||||
keywords = []
|
||||
if not only_on_template:
|
||||
# keywords.extend(['orphans_def',
|
||||
# 'orphans_set', 'orphans_for', 'orphans_tmpl_files',
|
||||
# 'define', 'syntax_for', 'syntax_var', 'syntax_var2',
|
||||
# 'syntax_function', 'valid_client_option'])
|
||||
keywords.extend(['valid_dtd', 'wrong_dicos_name',
|
||||
'tabs_in_dicos', 'hidden_if_in_dicos',
|
||||
'condition_without_target',
|
||||
'obligatoire_in_dicos',
|
||||
'valid_slave_value',
|
||||
'valid_var_label', 'valid_separator_label',
|
||||
'valid_help_label',
|
||||
'activation_var_without_help',
|
||||
'family_without_help',
|
||||
'family_without_icon',
|
||||
'old_fw_file'])
|
||||
if not only_on_dico:
|
||||
keywords.extend(['valid_parse_tmpl'])
|
||||
keywords.append('builtins')
|
||||
ansi = AnsiWriter(writelevel)
|
||||
try:
|
||||
for keyword in keywords:
|
||||
validate(keyword, ansi, tmpl)
|
||||
except Exception, err:
|
||||
from traceback import print_exc
|
||||
print_exc()
|
||||
#print u"Erreur : {0}".format(err)
|
||||
sys.exit(1)
|
||||
|
||||
main()
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from sys import argv
|
||||
from os import getppid
|
||||
from importlib import import_module
|
||||
from pyeole.command_line import ArgumentParser
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
allowed_functions = ('acquire', 'release', 'is_locked')
|
||||
module = import_module('pyeole.lock')
|
||||
module.PID = getppid()
|
||||
arguments = ArgumentParser(module, allowed_functions, argv[0])
|
||||
arguments.parse_args(argv[1:])
|
||||
arguments.trigger_callback()
|
|
@ -0,0 +1,54 @@
|
|||
#!/bin/bash
|
||||
|
||||
# exécute une commande dans un conteneur
|
||||
|
||||
SSHCMD="ssh -q -o LogLevel=ERROR -o StrictHostKeyChecking=no"
|
||||
|
||||
commande=$1
|
||||
container=$2
|
||||
# ne lancer la commande que si dans un conteneur (ssh)
|
||||
onlyifcontainer=$3
|
||||
silent=$4
|
||||
CMD='eval'
|
||||
|
||||
ExecContainer()
|
||||
{
|
||||
ip="$1"
|
||||
cmd="$2"
|
||||
tcpcheck 2 $ip:22 &>/dev/null || return 1
|
||||
$SSHCMD root@$ip "$cmd"
|
||||
}
|
||||
|
||||
if [[ ${container} == "all" ]]
|
||||
then
|
||||
if [[ $(CreoleGet mode_conteneur_actif) == "oui" ]]
|
||||
then
|
||||
for grp in $(CreoleGet --groups)
|
||||
do
|
||||
if [[ ${grp} != 'root' ]] && [[ ${grp} != 'all' ]]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_${grp}")
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo "Exécution de la commande [${commande}] dans le conteneur ${grp}"
|
||||
echo
|
||||
fi
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
else
|
||||
if [ -n "$container" ]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_$container")
|
||||
fi
|
||||
if [ -n "$container_ip" ] && [ ! "$container_ip" = "127.0.0.1" ]
|
||||
then
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
elif [ "$onlyifcontainer" != "yes" ]
|
||||
then
|
||||
eval "$commande"
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.service import manage_services
|
||||
from creole.reconfigure import services
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
def parse_cmdline():
|
||||
|
||||
service_actions=['apply', 'configure', 'enable', 'disable', 'status',
|
||||
'start', 'stop', 'restart', 'reload']
|
||||
|
||||
parser = argparse.ArgumentParser(description="Action sur les services",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging('info')])
|
||||
parser.add_argument('service', help="Nom du service")
|
||||
parser.add_argument('action', choices=service_actions,
|
||||
help="Action à effectuer")
|
||||
parser.add_argument("-f", "--force", action="store_true", default=False,
|
||||
help="Ne pas valider l'état de service")
|
||||
parser.add_argument("-s", "--silent", action="store_true", default=False,
|
||||
help="Ne pas affichier sur la console")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
if opts.silent:
|
||||
opts.log_level = 'error'
|
||||
|
||||
|
||||
return opts
|
||||
|
||||
def main():
|
||||
options = parse_cmdline()
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
display = 'console'
|
||||
if options.silent:
|
||||
display = 'log'
|
||||
if options.service == 'all':
|
||||
if options.action == 'restart':
|
||||
services('stop', display_title=False, try_restart_lxc=False)
|
||||
services('start', display_title=False, try_restart_lxc=False)
|
||||
else:
|
||||
services(options.action, display_title=False, try_restart_lxc=False)
|
||||
ret = True
|
||||
else:
|
||||
ret = manage_services(options.action, options.service,
|
||||
container=options.container, force=options.force,
|
||||
display=display)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
from sys import exit
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.ansiprint import print_red
|
||||
from pyeole.log import init_logging
|
||||
from creole.var_loader import convert_value
|
||||
from creole.loader import creole_loader, config_save_values
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Set Creole variable",
|
||||
parents=[scriptargs.logging()])
|
||||
parser.add_argument("--default", action="store_true", default=False,
|
||||
help=u"remettre à la valeur par défaut")
|
||||
parser.add_argument('variable', nargs=1,
|
||||
help=u"Nom de variable Creole")
|
||||
parser.add_argument('value', nargs='?',
|
||||
help=u"Valeur de la variable Creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
if options.default and options.value:
|
||||
print_red("En cas de remise à la valeur par défaut, il ne faut pas spécifier de valeur")
|
||||
exit(1)
|
||||
|
||||
if not options.default and options.value is None:
|
||||
print_red("Veuiller spécifier la valeur")
|
||||
exit(1)
|
||||
|
||||
def main():
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
config = creole_loader(rw=True, owner='creoleset', load_extra=True)
|
||||
var = options.variable[0]
|
||||
if '.' in var:
|
||||
if var.startswith('.'):
|
||||
var = var[1:]
|
||||
namespace = var.split('.')[0]
|
||||
else:
|
||||
namespace = 'creole'
|
||||
var = config.find_first(byname=var, type_='path',
|
||||
force_permissive=True)
|
||||
if options.default:
|
||||
homeconfig, name = config.cfgimpl_get_home_by_path(var)
|
||||
homeconfig.__delattr__(name)
|
||||
else:
|
||||
option = config.unwrap_from_path(var)
|
||||
value = options.value
|
||||
if option.impl_is_multi():
|
||||
values = []
|
||||
for val in value.split('\n'):
|
||||
values.append(convert_value(option, val))
|
||||
value = values
|
||||
else:
|
||||
value = convert_value(option, value)
|
||||
setattr(config, var, value)
|
||||
config_save_values(config, namespace)
|
||||
except PropertiesOptionError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red(u"Erreur de propriété : {0}".format(err))
|
||||
exit(1)
|
||||
except ValueError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Valeur invalide : {0}".format(err))
|
||||
exit(1)
|
||||
except AttributeError:
|
||||
if options.debug:
|
||||
log.debug("AttributeError", exc_info=True)
|
||||
print_red("Nom de variable inconnue : {0}".format(options.variable[0]))
|
||||
exit(1)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Erreur inconnue : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,454 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import atexit
|
||||
import time
|
||||
import locale
|
||||
|
||||
from os import unlink, environ, system
|
||||
from subprocess import Popen, PIPE
|
||||
from os.path import basename, isfile
|
||||
|
||||
from creole import reconfigure, fonctionseole
|
||||
from creole.client import CreoleClient, TimeoutCreoleClientError, NotFoundError, CreoleClientError
|
||||
from creole.error import UserExit, UserExitError
|
||||
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
|
||||
from pyeole.lock import acquire, release, is_locked
|
||||
from pyeole.log import init_logging, set_formatter
|
||||
from pyeole.ihm import question_ouinon, only_root, catch_signal
|
||||
from pyeole.encode import normalize
|
||||
|
||||
from pyeole.pkg import EolePkg, _configure_sources_mirror, report
|
||||
|
||||
from pyeole.diagnose import test_tcp
|
||||
from pyeole import scriptargs
|
||||
|
||||
from pyeole.i18n import i18n
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
#import logging
|
||||
|
||||
log = None
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# FIXME : refactorer le système de lock de zephir-client (ref #6660)
|
||||
from zephir.lib_zephir import lock, unlock
|
||||
zephir_libs = True
|
||||
except Exception:
|
||||
zephir_libs = False
|
||||
|
||||
def release_lock():
|
||||
if zephir_libs:
|
||||
unlock('maj')
|
||||
if is_locked('majauto', level='system'):
|
||||
release('majauto', level='system')
|
||||
|
||||
def user_exit(*args, **kwargs):
|
||||
"""
|
||||
sortie utilisateur "propre"
|
||||
"""
|
||||
log.warn(_(u'! Abandoning configuration !'))
|
||||
log.warn(_(u'System may be in an incoherent state.\n\n'))
|
||||
raise UserExitError()
|
||||
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(prog='Maj-Auto|Query-Auto',
|
||||
description=_(u"Manage EOLE server automatic update"),
|
||||
parents=[scriptargs.logging('info')],
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
action='store_true',
|
||||
help=_(u"run in dry-run mode (force to True when using Query-Auto)."))
|
||||
parser.add_argument('-f', '--force',
|
||||
action='store_true',
|
||||
help=_(u"bypass Zephir authorizations."))
|
||||
parser.add_argument('-F', '--force-update',
|
||||
action='store_true',
|
||||
help=_(u"update your server without any confirmation."))
|
||||
|
||||
parser.add_argument('-s', '--simulate',
|
||||
action='store_true',
|
||||
help=_(u"ask apt-get to simulate packages installation"))
|
||||
|
||||
# Level of upgrade
|
||||
maj_level = parser.add_mutually_exclusive_group()
|
||||
maj_level.add_argument('-C', '--candidat', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use testing packages."))
|
||||
maj_level.add_argument('-D', '--devel', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use development packages."))
|
||||
|
||||
parser.add_argument('--release',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
# Action when upgrade is OK
|
||||
parser.add_argument('-r', '--reconfigure',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade."))
|
||||
|
||||
parser.add_argument('-R', '--reboot',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade and reboot if necessary (implies -r)."))
|
||||
parser.add_argument('--download', action='store_true',
|
||||
help=_(u'only download packages in cache.'))
|
||||
# Mirror selection
|
||||
parser.add_argument('-S', '--eole-mirror',
|
||||
help=_(u"EOLE repository server."))
|
||||
parser.add_argument('-U', '--ubuntu-mirror',
|
||||
help=_(u"Ubuntu repository server."))
|
||||
parser.add_argument('-V', '--envole-mirror',
|
||||
help=_(u"Envole repository server."))
|
||||
parser.add_argument('-c', '--cdrom', action="store_true",
|
||||
help=_(u"use CDROM as source."))
|
||||
|
||||
# sortie EAD
|
||||
parser.add_argument('-W', action='store_true',
|
||||
help=_(u"specific output for EAD."))
|
||||
# mode sans creoled
|
||||
parser.add_argument('-i', '--ignore', action='store_true',
|
||||
help=_(u"ignore local configuration if creoled not responding."))
|
||||
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if getattr(opts, 'level', None) is None:
|
||||
opts.level = u'updates'
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
if opts.reboot:
|
||||
opts.reconfigure = True
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
global log
|
||||
opts = parse_cmdline()
|
||||
if opts.W:
|
||||
# variable set for pyeole.ansiprint
|
||||
environ['ModeTxt'] = 'yes'
|
||||
reporting = not (opts.dry_run or opts.simulate or opts.download)
|
||||
if not reporting:
|
||||
z_proc = 'QUERY-MAJ'
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level)
|
||||
else:
|
||||
z_proc = 'MAJ'
|
||||
report_file = '/var/lib/eole/reports/rapport-maj.log'
|
||||
if isfile(report_file):
|
||||
unlink(report_file)
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level,
|
||||
filename=report_file)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level,
|
||||
filename=report_file)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level,
|
||||
filename=report_file)
|
||||
set_formatter(log, u'file', u'brief')
|
||||
set_formatter(log, u'file', u'with-levelname-date')
|
||||
set_formatter(pkg_log, u'file', u'with-levelname-date')
|
||||
set_formatter(diag_log, u'file', u'with-levelname-date')
|
||||
report(2)
|
||||
locale.setlocale(locale.LC_TIME, "fr_FR.utf8")
|
||||
log.info(_(u'Update at {0}').format(time.strftime("%A %d %B %Y %H:%M:%S")))
|
||||
raised_err = None
|
||||
error_msg = None
|
||||
try:
|
||||
# gestion du ctrl+c
|
||||
catch_signal(user_exit)
|
||||
acquire('majauto', level='system')
|
||||
atexit.register(release_lock)
|
||||
client = CreoleClient()
|
||||
eole_level = 'stable'
|
||||
envole_level = 'stable'
|
||||
try:
|
||||
version = client.get_creole('eole_release')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
version = EOLE_RELEASE
|
||||
else:
|
||||
raise err
|
||||
if opts.candidat is not False:
|
||||
z_level = " en candidate"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.candidat) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde va en candidat
|
||||
eole_level = 'proposed'
|
||||
envole_level = 'proposed'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.candidat:
|
||||
eole_level = 'proposed'
|
||||
if 'envole' in opts.candidat:
|
||||
envole_level = 'proposed'
|
||||
elif opts.devel is not False:
|
||||
z_level = " en devel"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.devel) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde vas en candidat
|
||||
eole_level = 'unstable'
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.devel:
|
||||
eole_level = 'unstable'
|
||||
if 'envole' in opts.devel:
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
z_level = ""
|
||||
if opts.release:
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
new_release = opts.release.split('.')
|
||||
if len(new_release) != 3 or \
|
||||
u'.'.join(new_release[0:2]) != EOLE_VERSION or \
|
||||
int(new_release[2]) not in range(current_release+1, int(LAST_RELEASE) + 1):
|
||||
raise Exception(_('Unknown release number'))
|
||||
z_level += " en {0}".format(opts.release)
|
||||
version = opts.release
|
||||
if opts.cdrom:
|
||||
z_level += " via le CDROM"
|
||||
#distro = 'stable'
|
||||
fonctionseole.zephir("INIT", "Début{0}".format(z_level), z_proc)
|
||||
if zephir_libs and not fonctionseole.init_proc('MAJ'):
|
||||
if opts.force:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour forcée par l'utilisateur",
|
||||
z_proc)
|
||||
else:
|
||||
log.warn(_(u"Update is locked, please contact Zéphir administrator"))
|
||||
log.warn(_(u"Use -f option if you want to force execution"))
|
||||
raise UserExitError()
|
||||
lock('maj')
|
||||
PKGMGR = EolePkg('apt', ignore=opts.ignore)
|
||||
if opts.dry_run:
|
||||
PKGMGR.set_option('APT::Get::Simulate', 'true')
|
||||
|
||||
try:
|
||||
module = client.get_creole('eole_module')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
module = 'module'
|
||||
else:
|
||||
raise err
|
||||
try:
|
||||
uai = client.get_creole('numero_etab')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
uai = None
|
||||
else:
|
||||
raise err
|
||||
|
||||
head = "*** {0} {1}"
|
||||
if uai:
|
||||
head += " ({2})"
|
||||
head += " ***\n"
|
||||
|
||||
log.info(head.format(module, version, uai))
|
||||
|
||||
if not opts.force_update:
|
||||
raising_level = u''
|
||||
if opts.release:
|
||||
raising_level = _(u"(CHANGE RELEASE LEVEL)")
|
||||
elif u'unstable' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(UNSTABLE VERSION)")
|
||||
elif u'proposed' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(TESTING VERSION)")
|
||||
|
||||
if raising_level != u'':
|
||||
log.warn(_(u"{0} - Raising update level may prevent "
|
||||
u"lowering back to stable version.").format(raising_level))
|
||||
try:
|
||||
assert question_ouinon(_(u"Do you wish to proceed?")) == 'oui'
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour{0} forcée par l'utilisateur".format(z_level),
|
||||
z_proc)
|
||||
except (AssertionError, EOFError) as err:
|
||||
log.warn(_(u"Cancelling!"))
|
||||
raise UserExit()
|
||||
|
||||
PKGMGR.check()
|
||||
|
||||
#serveurs à utiliser pour les dépôts Ubuntu et EOLE
|
||||
_configure_sources_mirror(PKGMGR.pkgmgr, ubuntu=opts.ubuntu_mirror,
|
||||
eole=opts.eole_mirror, envole=opts.envole_mirror,
|
||||
ignore=opts.ignore, cdrom=opts.cdrom,
|
||||
release=version, eole_level=eole_level,
|
||||
envole_level=envole_level)
|
||||
|
||||
|
||||
PKGMGR.update(silent=True)
|
||||
upgrades = PKGMGR.get_upgradable_list()
|
||||
|
||||
install = 0
|
||||
upgrade = 0
|
||||
delete = 0
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
if isInstalled:
|
||||
if candidateVersion is None:
|
||||
delete += 1
|
||||
else:
|
||||
upgrade += 1
|
||||
else:
|
||||
install += 1
|
||||
|
||||
total_pkg = install+upgrade
|
||||
|
||||
headers = []
|
||||
if total_pkg == 0:
|
||||
log.info(_(u"Update successful."))
|
||||
log.info(_(u"Nothing to install."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"Aucun paquet à installer{0}".format(z_level),
|
||||
z_proc)
|
||||
if reporting:
|
||||
report(3)
|
||||
sys.exit(0)
|
||||
|
||||
headers.append(_(u"{0} new,", u"{0} news,", install).format(install))
|
||||
headers.append(_(u"{0} upgrade,", u"{0} upgrades,", upgrade).format(upgrade))
|
||||
headers.append(_(u"{0} delete", u"{0} deletes", delete).format(delete))
|
||||
log.info(' '.join(headers))
|
||||
|
||||
for line in PKGMGR.list_upgrade(upgrades=upgrades):
|
||||
log.info(line)
|
||||
|
||||
if opts.dry_run:
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets à mettre à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
sys.exit(0)
|
||||
|
||||
if opts.download:
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
pkgs = []
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
pkgs.append(name)
|
||||
PKGMGR.fetch_archives(container=container, packages=pkgs)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets téléchargés{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
elif opts.simulate:
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour (simulation){1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
else:
|
||||
PKGMGR.download_upgrade()
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
log.info(_(u"Update successful."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
if opts.release:
|
||||
ret_code = system('/usr/share/zephir/scripts/upgrade_distrib.py --auto')
|
||||
if ret_code != 0:
|
||||
error_msg = str('erreur à la mise à jour vers la release {0}'.format(opts.release))
|
||||
else:
|
||||
log.info(_('Upgrade post Maj-Release, please wait'))
|
||||
release('majauto', level='system')
|
||||
cmd = ['/usr/bin/Maj-Auto', '-F']
|
||||
process = Popen(cmd, stdin=PIPE, stderr=PIPE, stdout=PIPE, shell=False)
|
||||
ret_code = process.wait()
|
||||
if ret_code != 0:
|
||||
error_msg = str(_('error in post maj release'))
|
||||
if opts.reconfigure:
|
||||
# rechargement des modules python (#7832)
|
||||
# cf. http://code.activestate.com/recipes/81731-reloading-all-modules/
|
||||
if globals().has_key('init_modules'):
|
||||
for m in [x for x in sys.modules.keys() if x not in init_modules]:
|
||||
del(sys.modules[m])
|
||||
else:
|
||||
init_modules = sys.modules.keys()
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration automatique",
|
||||
z_proc)
|
||||
elif not opts.release:
|
||||
log.warn(_(u"At least one packages has been updated,"
|
||||
u" use command [reconfigure] to apply modifications."))
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration du serveur à planifier",
|
||||
z_proc)
|
||||
|
||||
except (UserExit, UserExitError) as err:
|
||||
if reporting:
|
||||
report(1, 'Stopped by user')
|
||||
fonctionseole.zephir("FIN", "Abandon par l'utilisateur", z_proc)
|
||||
sys.exit(1)
|
||||
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
clue = _(". If restarting creoled service does not help, try {} command with '-i' option.")
|
||||
error_msg = str(err) + clue.format('Query-Auto' if opts.dry_run else 'Maj-Auto')
|
||||
raised_err = err
|
||||
|
||||
except Exception as err:
|
||||
error_msg = str(err)
|
||||
raised_err = err
|
||||
else:
|
||||
if reporting:
|
||||
report(0, reconf=opts.reconfigure)
|
||||
|
||||
if error_msg is not None:
|
||||
fonctionseole.zephir("ERR", error_msg, z_proc, console=False)
|
||||
if reporting:
|
||||
if raised_err is not None:
|
||||
report(1, normalize(err))
|
||||
else:
|
||||
report(1, error_msg)
|
||||
if log is None:
|
||||
# No logger defined, error in argument parsing
|
||||
raise
|
||||
if opts.log_level == 'debug' and raised_err is not None:
|
||||
log.error(err, exc_info=True)
|
||||
else:
|
||||
log.error(error_msg)
|
||||
sys.exit(1)
|
||||
|
||||
if opts.reconfigure:
|
||||
try:
|
||||
reconfigure.main(force_options={'auto': opts.reboot, 'log_level': opts.log_level},
|
||||
force_args=[], need_lock=False)
|
||||
except Exception as err:
|
||||
fonctionseole.zephir("ERR", str(err), z_proc, console=False)
|
||||
if reporting:
|
||||
report(1, normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --cdrom $@
|
|
@ -0,0 +1,116 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2015 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
from os import system
|
||||
from sys import exit
|
||||
import re
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
from pyeole.i18n import i18n
|
||||
from pyeole.ihm import print_red
|
||||
|
||||
import argparse
|
||||
from pyeole import scriptargs
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
description = _(u"This script will upgrade to a new release of this distribution")
|
||||
parser = argparse.ArgumentParser(prog='Maj-Release',
|
||||
description=description,
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
|
||||
parser.add_argument('--release', help=_(u"Target release number"))
|
||||
|
||||
parser.add_argument('-f', '--force', action='store_true',
|
||||
help=_(u"Do not ask confirmation"))
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
opts = parse_cmdline()
|
||||
|
||||
print(_(u"This script will upgrade to a new release of this distribution"))
|
||||
all_releases = []
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
choices = range(current_release+1, int(LAST_RELEASE)+1)
|
||||
# Last is firt displayed
|
||||
if choices == []:
|
||||
print_red(_(u"No stable new release available"))
|
||||
exit(1)
|
||||
choices.reverse()
|
||||
for release_suffix in choices:
|
||||
all_releases.append(EOLE_VERSION + '.' + str(release_suffix))
|
||||
|
||||
while True:
|
||||
if opts.release is not None:
|
||||
choice = opts.release
|
||||
else:
|
||||
for idx, release in enumerate(all_releases):
|
||||
print("{0}: {1}".format(idx+1, release))
|
||||
print(_(u"q|quit: abort"))
|
||||
|
||||
try:
|
||||
choice = raw_input("[1] : ")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
print_red(_("\nUpgrade aborted by user"))
|
||||
exit(0)
|
||||
|
||||
if choice == '':
|
||||
# User hit enter
|
||||
choice = 1
|
||||
elif choice in all_releases:
|
||||
# User entrer release number
|
||||
choice = all_releases.index(choice) + 1
|
||||
else:
|
||||
try:
|
||||
choice = int(choice)
|
||||
except ValueError:
|
||||
if re.match(r'^q(uit)?', choice):
|
||||
print_red(_(u"Voluntary stay of proceedings"))
|
||||
exit(0)
|
||||
else:
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
|
||||
if not 1 <= choice <= len(choices):
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
release = all_releases[choice - 1]
|
||||
if opts.force:
|
||||
force = '--force-update'
|
||||
else:
|
||||
force = ''
|
||||
|
||||
majrel = system('/usr/bin/Maj-Auto --release {0} {1}'.format(release, force))
|
||||
|
||||
exit(majrel)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --dry-run $@
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Cd --dry-run $@
|
|
@ -0,0 +1,7 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Stop all services
|
||||
CreoleService all stop
|
||||
|
||||
# Start only enabled ones
|
||||
CreoleService all start
|
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "La commande Upgrade-Auto ne permet plus de changer de sous-version du serveur EOLE."
|
||||
echo "Merci d'utiliser la commande Maj-Release à la place."
|
||||
exit 1
|
|
@ -0,0 +1,65 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# diagnose
|
||||
#
|
||||
# Verifie l'instanciation d'un serveur
|
||||
#
|
||||
###########################################################################
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
CREOLE_FILE="/etc/eole/config.eol"
|
||||
RELEASE_FILE="/etc/eole/release"
|
||||
DIAG_DIR="/usr/share/eole/diagnose"
|
||||
err_prefix="Diagnose impossible"
|
||||
|
||||
TestFile $CREOLE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit 1
|
||||
fi
|
||||
TestFile $RELEASE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit
|
||||
fi
|
||||
TestDir $DIAG_DIR
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : pas script diagnose disponible"
|
||||
exit 1
|
||||
fi
|
||||
TestCreoled
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : creoled est arrêté"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Opt=""
|
||||
while getopts "LWT" Option
|
||||
do
|
||||
case $Option in
|
||||
L ) export Verbose="yes";;
|
||||
W ) export ModeEad="yes";;
|
||||
T ) export ModeTxt="yes";;
|
||||
* ) exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
. $RELEASE_FILE
|
||||
DETAILS="$(CreoleGet nom_machine) $(CreoleGet numero_etab)"
|
||||
[ -z "$EOLE_RELEASE" ] && EOLE_RELEASE=$EOLE_VERSION
|
||||
EchoGras "*** Test du module $EOLE_MODULE version $EOLE_RELEASE ($DETAILS) ***"
|
||||
echo
|
||||
run-parts $DIAG_DIR
|
||||
EchoGras "*** FIN DU DIAGNOSTIC ***"
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
from os import listdir, system, chdir
|
||||
from os.path import isfile, join, basename
|
||||
from creole import config
|
||||
|
||||
modif_dir = basename(config.modif_dir)
|
||||
distrib_dir = basename(config.distrib_dir)
|
||||
patch_dir = basename(config.patch_dir)
|
||||
|
||||
system('clear')
|
||||
|
||||
# on travaille dans le répertoire eole
|
||||
chdir(config.eoleroot)
|
||||
|
||||
print "** Génération des patches à partir de %s **\n" % modif_dir
|
||||
for modfile in listdir(modif_dir):
|
||||
if modfile.endswith('~'):
|
||||
continue
|
||||
if not isfile(join(distrib_dir, modfile)):
|
||||
print "ATTENTION : le fichier original %s n'existe pas !" % join(distrib_dir, modfile)
|
||||
continue
|
||||
print "Génération du patch %s.patch" % modfile
|
||||
system("diff -uNr %s %s > %s.patch" % (join(distrib_dir,modfile), join(modif_dir,modfile), join(patch_dir,modfile)))
|
||||
|
||||
print "\n** Fin de la génération des patch **\n"
|
|
@ -0,0 +1,137 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# EOLE - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# gen_rpt
|
||||
#
|
||||
# Génère un rapport d'anomalie
|
||||
#
|
||||
###########################################################################
|
||||
|
||||
TestConf()
|
||||
{
|
||||
[ -e "$1" ] && return 0
|
||||
tput setaf 3
|
||||
echo "* Erreur $0 : le fichier de configuration $1 absent"
|
||||
echo "* Instanciez votre serveur"
|
||||
tput sgr0
|
||||
exit 1
|
||||
}
|
||||
|
||||
clear
|
||||
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
numero_etab=$(CreoleGet numero_etab)
|
||||
CONFIGEOL='/etc/eole/config.eol'
|
||||
EOLEDIRS="/usr/share/eole/creole/dicos"
|
||||
PATCHDIR="/usr/share/eole/creole/patch"
|
||||
TestConf $CONFIGEOL
|
||||
EOLERELEASE="/etc/eole/release"
|
||||
if [ ! -e $EOLERELEASE ]; then
|
||||
EchoRouge "Fichier $EOLERELEASE est introuvable"
|
||||
exit 1
|
||||
fi
|
||||
. $EOLERELEASE
|
||||
Module="${EOLE_MODULE}-${EOLE_VERSION}"
|
||||
echo "Récupération des informations ..."
|
||||
RepRpt="/tmp/GenRpt"
|
||||
rm -fr $RepRpt 2> /dev/null
|
||||
mkdir $RepRpt
|
||||
mkdir $RepRpt/log
|
||||
mkdir $RepRpt/eole
|
||||
mkdir $RepRpt/system
|
||||
Rpt=$RepRpt"/Rpt-"$Module"-"$numero_etab
|
||||
Mel="eole@ac-dijon.fr"
|
||||
|
||||
# les fichiers texte
|
||||
echo "Config.eol"
|
||||
/bin/cp -f $CONFIGEOL $RepRpt/eole
|
||||
echo "Diagnose"
|
||||
/usr/bin/diagnose -LT >> $RepRpt/diagnose.txt 2>&1
|
||||
echo Pstree
|
||||
pstree >> $RepRpt/system/pstree.txt 2>&1
|
||||
echo Lshw
|
||||
lshw >> $RepRpt/system/lshw.txt 2>&1
|
||||
echo Lsusb
|
||||
lsusb >> $RepRpt/system/lsusb.txt 2>&1
|
||||
echo Lspci
|
||||
lspci >> $RepRpt/system/lspci.txt 2>&1
|
||||
echo Iptables
|
||||
iptables -nvL > $RepRpt/system/iptables.txt 2>&1
|
||||
iptables -nvL -t nat >> $RepRpt/system/iptables.txt 2>&1
|
||||
echo History
|
||||
grep -v "^#" /root/.bash_history > $RepRpt/system/history.txt
|
||||
echo Paquets
|
||||
dpkg-query -W > $RepRpt/system/packages.txt 2>&1
|
||||
# les gz
|
||||
echo Syslog
|
||||
for log in rsyslog su sudo kernel cron auth chpasswd exim ; do
|
||||
[ -d /var/log/rsyslog/local/$log ] && gzip -rc /var/log/rsyslog/local/$log > $RepRpt/log/$log.gz
|
||||
done
|
||||
echo Dmesg
|
||||
dmesg > $RepRpt/log/dmesg.log 2>&1
|
||||
gzip $RepRpt/log/dmesg.log
|
||||
echo Creole.log
|
||||
gzip -c /var/log/reconfigure.log > $RepRpt/log/reconfigure.log.gz
|
||||
echo Dicos
|
||||
gzip -rc $EOLEDIRS > $RepRpt/eole/dicos.gz
|
||||
echo Patch
|
||||
gzip -rc $PATCHDIR > $RepRpt/eole/patch.gz
|
||||
echo Stats
|
||||
gzip -rc /usr/share/zephir/monitor/stats > $RepRpt/stats.gz
|
||||
|
||||
# spécifique Scribe
|
||||
if [ -f /var/www/ead/extraction/tmp/rapport.txt ];then
|
||||
echo "Rapport d'extraction"
|
||||
gzip -rc /var/www/ead/extraction/tmp/rapport.txt > $RepRpt/log/extraction.log.gz
|
||||
fi
|
||||
if [ -f /var/log/controle-vnc/main.log ];then
|
||||
echo 'Log client scribe'
|
||||
gzip -rc /var/log/controle-vnc/main.log > $RepRpt/log/controle-vnc.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Scribe/Horus/Eclair
|
||||
if [ -d /var/lib/eole/reports ];then
|
||||
echo "Rapport (sauvegarde/maj/...)"
|
||||
gzip -rc /var/lib/eole/reports > $RepRpt/log/rapport.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Amon
|
||||
if [ -f '/usr/share/eole/test-rvp' ];then
|
||||
echo 'Rvp'
|
||||
/usr/sbin/ipsec status &> $RepRpt/ipsec.status 2>&1
|
||||
fi
|
||||
|
||||
# Rapport debsums
|
||||
if [ -x '/usr/share/eole/debsums/show-reports.py' ]; then
|
||||
echo "Rapport debsums"
|
||||
/usr/share/eole/debsums/show-reports.py > ${RepRpt}/log/rapport-debsums.log 2>&1
|
||||
fi
|
||||
|
||||
echo
|
||||
Archive=$Module-$numero_etab".tar.gz"
|
||||
echo "Création de l'archive locale $Archive"
|
||||
tar -C /tmp -czf $Archive GenRpt
|
||||
echo
|
||||
|
||||
Question_ouinon "Envoyer l'archive par email ?"
|
||||
if [ $? -eq 1 ];then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Destinataire du message : "
|
||||
echo -n "[$Mel] : "
|
||||
read mail
|
||||
if [ "$mail" == "" ];then
|
||||
mail=$Mel
|
||||
fi
|
||||
echo -n "Commentaire : "
|
||||
read comment
|
||||
echo "$comment"|mutt -a $Archive -s "Rapport $Module de $numero_etab" -c $mail -e "set copy=no"
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# Force interactive mode
|
||||
main(force_options={'interactive': True})
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
|
@ -0,0 +1,173 @@
|
|||
#!/bin/bash
|
||||
##########################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# Gestion des modules en mode dialogue
|
||||
# avec les comptes système eole, eole2
|
||||
#
|
||||
##########################################################
|
||||
|
||||
# un seul manage ?
|
||||
pmanage=`pgrep manage-eole`
|
||||
nbmanage=`echo $pmanage | wc -w`
|
||||
|
||||
# fichiers temporaires
|
||||
temp="/tmp/InBox-Eol-"
|
||||
tempfile="$temp$$"
|
||||
|
||||
TitreGen="Eole - Gestion du Serveur"
|
||||
|
||||
##########################################################
|
||||
# Fonctions reprises de FonctionsEole
|
||||
##########################################################
|
||||
|
||||
MenuBox()
|
||||
{
|
||||
#${1="Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
NBlignes=${NBlignes=5}
|
||||
Menu="$3"
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--menu "$1" 16 50 $NBlignes \
|
||||
$Menu 2> $tempfile
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="`cat $tempfile`";;
|
||||
1) # Cancel
|
||||
eval $2="CANCEL";;
|
||||
255) # ESC
|
||||
if test -s $tempfile ;
|
||||
then
|
||||
eval $2=`cat $tempfile`
|
||||
else
|
||||
eval $2="ESC"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
OkBox()
|
||||
{
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --cancel-label Abandon\
|
||||
--msgbox "$1" 0 0
|
||||
}
|
||||
|
||||
QuestionBox()
|
||||
{
|
||||
#${1=Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--yesno "$1" 16 50
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="OUI";;
|
||||
1) # Cancel
|
||||
eval $2="NON";;
|
||||
255) # ESC
|
||||
eval $2="ESC" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
Entree(){
|
||||
echo
|
||||
echo "Tapez <Entrée>"
|
||||
read Bidon
|
||||
}
|
||||
|
||||
CleanExit(){
|
||||
echo "Au revoir !"
|
||||
rm -f $tempfile
|
||||
exit $1
|
||||
}
|
||||
|
||||
##########################################################
|
||||
# Programme principal
|
||||
##########################################################
|
||||
|
||||
if [ $nbmanage -gt 1 ]
|
||||
then
|
||||
MenuBox "D'autres instances de manage-eole ont été détectées" Rep "1 Quitter_sans_tuer 2 Quitter_et_tuer"
|
||||
rm -f "$temp*"
|
||||
if [ "$Rep" == "2" ]
|
||||
then
|
||||
for pid in $pmanage
|
||||
do
|
||||
kill -9 $pid
|
||||
done
|
||||
fi
|
||||
CleanExit 0
|
||||
fi
|
||||
|
||||
OkBox "Administration EOLE\n\nPour Vous Deplacer sur l'Ecran\nUtiliser votre Souris\nOu la touche tabulation.\n\n"
|
||||
|
||||
Rep=""
|
||||
while [ 1 ]
|
||||
do
|
||||
# FIXME/TODO : ajouter des entrées de menu !
|
||||
MenuBox "Votre Choix" Rep "1 Diagnostic 2 Reconfiguration 3 Paquets_en_Maj 4 Mise_A_Jour 8 Redemarrer_Serveur 9 Arret_Serveur ! Shell_Linux Q Quitter"
|
||||
|
||||
if [ "$Rep" == "CANCEL" ]
|
||||
then
|
||||
CleanExit 1
|
||||
fi
|
||||
|
||||
case $Rep in
|
||||
1)
|
||||
echo "En cours ..."
|
||||
sudo /usr/bin/diagnose
|
||||
Entree
|
||||
;;
|
||||
2)
|
||||
sudo /usr/bin/reconfigure
|
||||
Entree
|
||||
;;
|
||||
3)
|
||||
sudo /usr/bin/Query-Auto
|
||||
Entree
|
||||
;;
|
||||
4)
|
||||
sudo /usr/bin/Maj-Auto
|
||||
Entree
|
||||
;;
|
||||
# TODO : pouvoir inclure des entrées venant d'ailleurs ;)
|
||||
#5)
|
||||
#sudo /usr/share/eole/Maj-blacklist.sh
|
||||
#Entree
|
||||
#;;
|
||||
8)
|
||||
QuestionBox "Vous avez demandé le redémarrage du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/reboot
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
9)
|
||||
QuestionBox "Vous avez demandé un arret total du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/halt -p
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
!)
|
||||
echo "\"exit\" ou \"Ctrl + d\" pour revenir au Menu"
|
||||
/bin/bash
|
||||
;;
|
||||
Q)
|
||||
CleanExit 0
|
||||
;;
|
||||
|
||||
esac
|
||||
done
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
main()
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
|
@ -0,0 +1,15 @@
|
|||
# creole specific rules
|
||||
|
||||
schedule_PROG_DIR := $(eole_DIR)/schedule
|
||||
upgrade_REC_DIR := $(eole_DIR)/upgrade
|
||||
bin_PROG_DIR := $(DESTDIR)/usr/bin
|
||||
sbin_PROG_DIR := $(DESTDIR)/usr/sbin
|
||||
data_REC_DIR := $(DESTDIR)/usr/share/creole
|
||||
fr.man8_DATA_DIR := $(DESTDIR)/usr/share/man/fr.UTF-8/man8
|
||||
en.man8_DATA_DIR := $(DESTDIR)/usr/share/man/man8
|
||||
motd_PROG_DIR := $(DESTDIR)/etc/update-motd.d
|
||||
local_DATA_DIR := $(DESTDIR)/usr/share/eole/creole/dicos/local
|
||||
|
||||
install-files::
|
||||
# To inform user about coding changes
|
||||
$(INSTALL_DATA) deprecated/FonctionsEoleNg $(eole_DIR)
|
|
@ -0,0 +1,1807 @@
|
|||
# coding: utf-8
|
||||
from copy import copy
|
||||
|
||||
from collections import OrderedDict
|
||||
from os.path import join, basename
|
||||
from ast import literal_eval
|
||||
import sys
|
||||
import imp
|
||||
|
||||
|
||||
from .i18n import _
|
||||
from .utils import normalize_family
|
||||
from .config import VIRTBASE, VIRTROOT, VIRTMASTER, templatedir
|
||||
from .error import CreoleDictConsistencyError
|
||||
from .xmlreflector import HIGH_COMPATIBILITY
|
||||
|
||||
#mode order is important
|
||||
modes_level = ('basic', 'normal', 'expert')
|
||||
|
||||
|
||||
class secure_eosfunc:
|
||||
def __init__(self, eosfunc):
|
||||
self.eosfunc = eosfunc
|
||||
|
||||
def __getattribute__(self, func_name):
|
||||
if func_name == 'eosfunc':
|
||||
return super().__getattribute__('eosfunc')
|
||||
if func_name in self.eosfunc.func_on_zephir_context:
|
||||
return getattr(self.eosfunc)
|
||||
raise Exception(_('unknown or unauthorized function: {}'.format(func_name)))
|
||||
|
||||
|
||||
class Mode(object):
|
||||
def __init__(self, name, level):
|
||||
self.name = name
|
||||
self.level = level
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.level, other.level)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.level == other.level
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.level != other.level
|
||||
|
||||
def __gt__(self, other):
|
||||
return other.level < self.level
|
||||
|
||||
def __ge__(self, other):
|
||||
return not self.level < other.level
|
||||
|
||||
def __le__(self, other):
|
||||
return not other.level < self.level
|
||||
|
||||
|
||||
def mode_factory():
|
||||
mode_obj = {}
|
||||
for idx in range(len(modes_level)):
|
||||
name = modes_level[idx]
|
||||
mode_obj[name] = Mode(name, idx)
|
||||
return mode_obj
|
||||
|
||||
modes = mode_factory()
|
||||
|
||||
# a CreoleObjSpace's attribute has some annotations
|
||||
# that shall not be present in the exported (flatened) XML
|
||||
ERASED_ATTRIBUTES = ('redefine', 'exists', 'fallback', 'optional', 'remove_check', 'namespace',
|
||||
'remove_condition', 'path', 'instance_mode', 'index', 'is_in_master', '_real_container')
|
||||
|
||||
NOT_NEED_ACTIVATE = ('package', 'disknod')
|
||||
|
||||
FORCE_CHOICE = {'oui/non': ['oui', 'non'],
|
||||
'on/off': ['on', 'off'],
|
||||
'yes/no': ['yes', 'no'],
|
||||
'schedule': ['none', 'daily', 'weekly', 'monthly'],
|
||||
'schedulemod': ['pre', 'post']}
|
||||
|
||||
KEY_TYPE = {'SymLinkOption': 'symlink',
|
||||
'PortOption': 'port',
|
||||
'UnicodeOption': 'string',
|
||||
'NetworkOption': 'network',
|
||||
'NetmaskOption': 'netmask',
|
||||
'URLOption': 'web_address',
|
||||
'FilenameOption': 'filename'}
|
||||
|
||||
TYPE_PARAM_CHECK = ('string', 'python', 'eole')
|
||||
TYPE_PARAM_CONDITION = ('string', 'python', 'number', 'eole')
|
||||
TYPE_PARAM_FILL = ('string', 'eole', 'number', 'container', 'context')
|
||||
|
||||
DISKNOD_KEY_TYPE = {'major': 'number',
|
||||
'minor': 'number'}
|
||||
|
||||
ERASED_FAMILY_ACTION_ATTRIBUTES = ('index', 'action')
|
||||
|
||||
|
||||
class ContainerAnnotator(object):
|
||||
"""Manage container's object
|
||||
"""
|
||||
def __init__(self, space, paths, objectspace):
|
||||
self.space = space
|
||||
self.paths = paths
|
||||
self.objectspace = objectspace
|
||||
self.extra_conditions = {}
|
||||
var_name = 'mode_conteneur_actif'
|
||||
self.containers_enabled = False
|
||||
try:
|
||||
family_name = self.paths.get_variable_family_name(var_name)
|
||||
if (hasattr(space, 'variables') and
|
||||
'creole' in space.variables and
|
||||
hasattr(space.variables['creole'], 'family') and
|
||||
family_name in space.variables['creole'].family and
|
||||
var_name in space.variables['creole'].family[family_name].variable and
|
||||
hasattr(space.variables['creole'].family[family_name].variable[var_name], 'value')):
|
||||
# assume that mode_conteneur_actif is not a multi
|
||||
value = space.variables['creole'].family[family_name].variable[var_name].value[0].name
|
||||
self.containers_enabled = value == 'oui'
|
||||
except CreoleDictConsistencyError:
|
||||
pass
|
||||
|
||||
root_container = self.objectspace.container()
|
||||
root_container.name = 'root'
|
||||
root_container.container = 'root'
|
||||
root_container.real_container = 'root'
|
||||
root_container.container_group = 'root'
|
||||
root_container.id = '1'
|
||||
if not hasattr(self.space, 'containers'):
|
||||
self.space.containers = self.objectspace.containers()
|
||||
if hasattr(self.space.containers, 'container'):
|
||||
old_container = list(self.space.containers.container.items())
|
||||
old_container.insert(0, ('root', root_container))
|
||||
self.space.containers.container = OrderedDict(old_container)
|
||||
#self.space.containers.container['root'] = root_container
|
||||
else:
|
||||
self.space.containers.container = OrderedDict({'root': root_container})
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'all'):
|
||||
all_container = self.objectspace.container()
|
||||
all_container.name = 'all'
|
||||
all_container.container = 'all'
|
||||
if self.containers_enabled:
|
||||
all_container.real_container = 'all'
|
||||
else:
|
||||
all_container.real_container = VIRTMASTER
|
||||
all_container.container_group = 'all'
|
||||
old_container = list(self.space.containers.container.items())
|
||||
old_container.insert(1, ('all', all_container))
|
||||
self.space.containers.container = OrderedDict(old_container)
|
||||
#self.space.containers.container['all'] = all_container
|
||||
if hasattr(space, 'variables') and 'creole' in space.variables:
|
||||
flattened_elts = dict()
|
||||
if hasattr(space, 'files'):
|
||||
for key, values in vars(self.space.files).items():
|
||||
if not isinstance(values, str) and not isinstance(values, int):
|
||||
if isinstance(values, dict):
|
||||
values = values.values()
|
||||
for value in values:
|
||||
value.container = root_container
|
||||
flattened_elts.setdefault(key, []).append(value)
|
||||
# Remove "all" and dispatch informations in all containers
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'all') and hasattr(space.containers, 'container'):
|
||||
for type_, containers in vars(space.containers.all).items():
|
||||
if type_ == 'index':
|
||||
continue
|
||||
if isinstance(containers, list):
|
||||
for elt in containers:
|
||||
for container in space.containers.container.values():
|
||||
if container.name not in ['root', 'all']:
|
||||
if not hasattr(container, type_):
|
||||
setattr(container, type_, [])
|
||||
new_elt = copy(elt)
|
||||
new_elt.container = container
|
||||
getattr(container, type_).append(new_elt)
|
||||
else:
|
||||
for name, elt in containers.items():
|
||||
for container in space.containers.container.values():
|
||||
if container.name not in ['root', 'all']:
|
||||
if not hasattr(container, type_):
|
||||
setattr(container, type_, OrderedDict())
|
||||
old_container = getattr(container, type_)
|
||||
if name in old_container:
|
||||
raise CreoleDictConsistencyError('{}'.format(name))
|
||||
new_elt = copy(elt)
|
||||
new_elt.container = container
|
||||
old_container[name] = new_elt
|
||||
del space.containers.all
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'container'):
|
||||
self.generate_interfaces()
|
||||
groups = {}
|
||||
containers = space.containers.container.values()
|
||||
container_groups = {}
|
||||
update_values = True
|
||||
while update_values:
|
||||
update_values = False
|
||||
for container in containers:
|
||||
if not hasattr(container, 'group'):
|
||||
container.group = container.name
|
||||
if not hasattr(container, 'container_group'):
|
||||
container.container_group = container.group
|
||||
if HIGH_COMPATIBILITY:
|
||||
if self.containers_enabled:
|
||||
real_container = container.group
|
||||
else:
|
||||
real_container = VIRTMASTER
|
||||
container.real_container = real_container
|
||||
if container.group in container_groups:
|
||||
group = container_groups[container.group]
|
||||
else:
|
||||
group = container.group
|
||||
if container_groups.get(container.name) != group:
|
||||
container_groups[container.name] = group
|
||||
container._real_container = group
|
||||
if not HIGH_COMPATIBILITY and self.containers_enabled:
|
||||
container.real_container = group
|
||||
update_values = True
|
||||
|
||||
for container in space.containers.container.values():
|
||||
if not hasattr(container, 'group'):
|
||||
container.group = container.name
|
||||
groupname = container.group
|
||||
groups.setdefault(groupname, []).append(container)
|
||||
for groupname, containers in groups.items():
|
||||
for container in containers:
|
||||
if container.name == 'all':
|
||||
continue
|
||||
#container.container_group = groupname
|
||||
if HIGH_COMPATIBILITY and hasattr(container, 'id'):
|
||||
container.group_id = container.id
|
||||
container.id = space.containers.container[container._real_container].id
|
||||
container.container = container.name
|
||||
for container in space.containers.container.values():
|
||||
container_info = self.objectspace.container()
|
||||
for key, value in vars(container).items():
|
||||
if isinstance(value, str):
|
||||
setattr(container_info, key, value)
|
||||
for key, values in vars(container).items():
|
||||
if not isinstance(values, str) and not isinstance(values, int):
|
||||
if isinstance(values, dict):
|
||||
values = values.values()
|
||||
for value in values:
|
||||
value.container = container_info
|
||||
flattened_elts.setdefault(key, []).append(value)
|
||||
self.generate_containers()
|
||||
if hasattr(self.space, 'files'):
|
||||
del self.space.files
|
||||
self.convert_containers()
|
||||
|
||||
if hasattr(self.space.containers, 'family'):
|
||||
raise Exception('hu?')
|
||||
self.space.containers.family = OrderedDict()
|
||||
self.generate_network_container()
|
||||
for elttype in self.objectspace.container_elt_attr_list:
|
||||
key_type_name = elttype.upper() + '_KEY_TYPE'
|
||||
if key_type_name in globals():
|
||||
key_type = globals()[key_type_name]
|
||||
else:
|
||||
key_type = {}
|
||||
elt = flattened_elts.get(elttype, {})
|
||||
families = self.make_group_from_elts(elttype, elt, key_type,
|
||||
'containers.{}s'.format(elttype), True)
|
||||
if families == [] and not HIGH_COMPATIBILITY:
|
||||
continue
|
||||
family = self.objectspace.family()
|
||||
family.name = elttype + 's'
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.family = families
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
self.space.containers.family[elttype + 's'] = family
|
||||
|
||||
def _generate_container_variable(self, name, description, value, family_name, frozen=False):
|
||||
var_data = {'hidden': True, 'mode': 'expert', 'name': name,
|
||||
'doc': description, 'value': value,
|
||||
'type': 'string'}
|
||||
variable = self.objectspace.variable()
|
||||
if HIGH_COMPATIBILITY:
|
||||
if frozen:
|
||||
var_data['frozen'] = True
|
||||
var_data['force_default_on_freeze'] = True
|
||||
var_data['hidden'] = False
|
||||
del var_data['mode']
|
||||
variable.mode = None
|
||||
for key, value in var_data.items():
|
||||
if key == 'value':
|
||||
# Value is a list of objects
|
||||
val = self.objectspace.value()
|
||||
val.name = value
|
||||
value = [val]
|
||||
setattr(variable, key, value)
|
||||
self.paths.append('variable', variable.name, 'creole', family_name, variable)
|
||||
return variable
|
||||
|
||||
def _generate_root_container(self, family_name):
|
||||
if self.containers_enabled:
|
||||
ip_br0 = u'192.0.2.1'
|
||||
mask_br0 = u'255.255.255.0'
|
||||
network_br0 = u'192.0.2.0'
|
||||
bcast_br0 = u'192.0.2.255'
|
||||
else:
|
||||
ip_br0 = u'127.0.0.1'
|
||||
mask_br0 = u'255.0.0.0'
|
||||
network_br0 = u'127.0.0.0'
|
||||
bcast_br0 = u'127.255.255.255'
|
||||
variables = OrderedDict()
|
||||
variable = self._generate_container_variable('adresse_ip_br0',
|
||||
_(u"Bridge IP address"),
|
||||
ip_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
variable = self._generate_container_variable('adresse_netmask_br0',
|
||||
_(u"Bridge IP subnet mask"),
|
||||
mask_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
if HIGH_COMPATIBILITY:
|
||||
msg = u"Bridge IP network_br0 address"
|
||||
else:
|
||||
msg = u"Bridge IP network address"
|
||||
variable = self._generate_container_variable('adresse_network_br0',
|
||||
_(msg),
|
||||
network_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
variable = self._generate_container_variable('adresse_broadcast_br0',
|
||||
_(u"Bridge broadcast IP address"),
|
||||
bcast_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
return variables
|
||||
|
||||
def _get_containers(self):
|
||||
return self.space.containers.container
|
||||
|
||||
def convert_containers(self):
|
||||
idx = 0
|
||||
self.space.containers.containers = self.objectspace.containers()
|
||||
for name, container in self.space.containers.container.items():
|
||||
variables = []
|
||||
for key, value in vars(container).items():
|
||||
if key in ['container', 'group_id'] or key in ERASED_ATTRIBUTES:
|
||||
continue
|
||||
if not isinstance(value, list) and not isinstance(value, OrderedDict):
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = key
|
||||
variable.mode = None
|
||||
if key == 'id':
|
||||
variable.type = 'number'
|
||||
else:
|
||||
variable.type = 'string'
|
||||
if HIGH_COMPATIBILITY:
|
||||
variable.doc = ''
|
||||
val = self.objectspace.value()
|
||||
val.name = value
|
||||
variable.value = [val]
|
||||
variables.append(variable)
|
||||
for key in ['ip', 'path']:
|
||||
var_path = self.paths.get_variable_path('container_{}_{}'.format(key, name), 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = key
|
||||
variable.mode = None
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables.append(variable)
|
||||
|
||||
family = self.objectspace.family()
|
||||
family.name = 'container{}'.format(idx)
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.variable = variables
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
setattr(self.space.containers.containers, 'container{}'.format(idx), family)
|
||||
idx += 1
|
||||
del self.space.containers.container
|
||||
|
||||
def generate_network_container(self):
|
||||
family = self.objectspace.family()
|
||||
family.name = 'network'
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.mode = None
|
||||
variables = []
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
var_path = self.paths.get_variable_path(name, 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = name
|
||||
variable.mode = 'expert'
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables.append(variable)
|
||||
family.variable = variables
|
||||
self.space.containers.family['network'] = family
|
||||
|
||||
def generate_interfaces(self):
|
||||
if self.containers_enabled:
|
||||
for name, container in self._get_containers().items():
|
||||
if name in ['all', 'root']:
|
||||
continue
|
||||
interface = self.objectspace.interface()
|
||||
interface.name = 'containers'
|
||||
interface.container = name
|
||||
interface.linkto = 'br0'
|
||||
interface.method = 'bridge'
|
||||
interface.ip = 'container_ip_{0}'.format(name)
|
||||
interface.mask = 'adresse_netmask_br0'
|
||||
interface.bcast = 'adresse_broadcast_br0'
|
||||
interface.gateway = 'adresse_ip_br0'
|
||||
if not hasattr(container, 'interface'):
|
||||
container.interface = OrderedDict()
|
||||
container.interface['containers'] = interface
|
||||
else:
|
||||
old = list(container.interface.items())
|
||||
old.insert(0, ('containers', interface))
|
||||
container.interface = OrderedDict(old)
|
||||
|
||||
def generate_containers(self):
|
||||
"""generate the root's container informations
|
||||
"""
|
||||
family_description = 'Containers'
|
||||
family_name = family_description.lower()
|
||||
if family_name in self.space.variables:
|
||||
raise CreoleDictConsistencyError(_('{} family already exists').format(family_name))
|
||||
variables = self._generate_root_container(family_name)
|
||||
self._generate_containers(variables)
|
||||
self.paths.append('family', family_name, 'creole')
|
||||
family = self.objectspace.family()
|
||||
family.name = family_description
|
||||
family.doc = _(u'Containers informations')
|
||||
family.hidden = True
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = 'normal'
|
||||
family.icon = 'puzzle-piece'
|
||||
family.variable = variables
|
||||
# this family must be at the beginning
|
||||
if hasattr(self.space.variables['creole'], 'family'):
|
||||
old_families = list(self.space.variables['creole'].family.items())
|
||||
old_families.insert(0, (family_name, family))
|
||||
self.space.variables['creole'].family = OrderedDict(old_families)
|
||||
|
||||
def _generate_container_path(self, container):
|
||||
if container.name == 'all':
|
||||
path = None
|
||||
elif not self.containers_enabled or container.name == VIRTMASTER:
|
||||
path = u''
|
||||
else:
|
||||
group_name = container._real_container
|
||||
path = join(VIRTROOT, group_name, VIRTBASE)
|
||||
if sys.version_info[0] < 3:
|
||||
group_name = unicode(group_name)
|
||||
path = unicode(path)
|
||||
return path
|
||||
|
||||
def _generate_containers(self, variables):
|
||||
"""generate containers informations
|
||||
"""
|
||||
containers = self._get_containers()
|
||||
family_name = 'containers'
|
||||
ids = {}
|
||||
for name, container in containers.items():
|
||||
if not hasattr(container, 'group'):
|
||||
groupname = container.name
|
||||
else:
|
||||
groupname = container.group
|
||||
if name == 'all':
|
||||
ipaddr = None
|
||||
group_name = u'all'
|
||||
else:
|
||||
group_name = container._real_container
|
||||
if sys.version_info[0] < 3:
|
||||
group_name = unicode(group_name)
|
||||
if group_name not in containers:
|
||||
raise CreoleDictConsistencyError(_('the container "{}" does not exist').format(group_name))
|
||||
if not hasattr(containers[group_name], 'id'):
|
||||
raise CreoleDictConsistencyError(_('mandatory attribute "id" missing for container '
|
||||
'"{}"').format(group_name))
|
||||
id_value = containers[group_name].id
|
||||
if id_value in ids and ids[id_value] != group_name:
|
||||
raise CreoleDictConsistencyError(_('attribute "id" must be unique, but "{}" is used for containers "{}" and "{}"'
|
||||
).format(id_value, group_name, ids[id_value]))
|
||||
ids[id_value] = group_name
|
||||
if not self.containers_enabled or name == VIRTMASTER:
|
||||
ipaddr = u'127.0.0.1'
|
||||
group_name = VIRTMASTER
|
||||
else:
|
||||
group_id = id_value
|
||||
ipaddr = u"192.0.2.{}".format(group_id)
|
||||
|
||||
path = self._generate_container_path(container)
|
||||
# Variable : container_path_<conteneur>
|
||||
path_name = 'container_path_{0}'.format(name)
|
||||
variable = self._generate_container_variable(path_name,
|
||||
_(u'Path of container {0}').format(name),
|
||||
path,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : container_ip_<conteneur>
|
||||
ip_name = 'container_ip_{0}'.format(name)
|
||||
msg = u'IP address of container {0}'
|
||||
variable = self._generate_container_variable(ip_name,
|
||||
_(msg).format(
|
||||
name),
|
||||
ipaddr,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : container_name_<conteneur>
|
||||
name_name = 'container_name_{0}'.format(name)
|
||||
variable = self._generate_container_variable(name_name,
|
||||
_(u'Group name of container {0}').format(
|
||||
name),
|
||||
group_name,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : adresse_ip_<conteneur>
|
||||
# adresse_ip_<container> added for compat 2.3 (#5701, #5868)
|
||||
address_name = 'adresse_ip_{0}'.format(name)
|
||||
if HIGH_COMPATIBILITY:
|
||||
msg = u'Path of container {0}'
|
||||
else:
|
||||
msg = u'IP address of container {0}'
|
||||
if not self.paths.path_is_defined(address_name):
|
||||
if not self.containers_enabled:
|
||||
# hack to have "localhost" in non container mode #7183
|
||||
variable = self._generate_container_variable(address_name,
|
||||
_(msg).format(
|
||||
name),
|
||||
'localhost',
|
||||
family_name,
|
||||
frozen=True)
|
||||
else:
|
||||
self.paths.append('variable', address_name, 'creole', family_name, variable)
|
||||
path = self.paths.get_variable_path(address_name, 'creole')
|
||||
var_path = self.paths.get_variable_path(ip_name, 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = address_name
|
||||
variable.path = path
|
||||
variable.mode = 'expert'
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables[variable.name] = variable
|
||||
|
||||
def _generate_element(self, eltname, name, value, type_, subpath, multi=False):
|
||||
var_data = {'name': name, 'doc': '', 'value': value,
|
||||
'auto_freeze': False, 'mode': None, 'multi': multi}
|
||||
values = None
|
||||
if type_ == 'string':
|
||||
values = self.objectspace.forced_choice_option.get(eltname, {}).get(name)
|
||||
if values is not None:
|
||||
type_ = 'choice'
|
||||
var_data['type'] = type_
|
||||
|
||||
variable = self.objectspace.variable()
|
||||
if not HIGH_COMPATIBILITY:
|
||||
variable.mandatory = True
|
||||
for key, value in var_data.items():
|
||||
if key == 'value':
|
||||
if value is None:
|
||||
continue
|
||||
if type_ == 'symlink':
|
||||
key = 'opt'
|
||||
else:
|
||||
# Value is a list of objects
|
||||
if not multi:
|
||||
val = self.objectspace.value()
|
||||
val.name = value
|
||||
value = [val]
|
||||
else:
|
||||
value_list = []
|
||||
for valiter in value:
|
||||
val = self.objectspace.value()
|
||||
val.name = valiter.name
|
||||
value_list.append(val)
|
||||
value = value_list
|
||||
if key == 'doc' and type_ == 'symlink':
|
||||
continue
|
||||
setattr(variable, key, value)
|
||||
if values is not None:
|
||||
choices = []
|
||||
for value in values:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value, 'utf8')
|
||||
else:
|
||||
choice.name = value
|
||||
choices.append(choice)
|
||||
variable.choice = choices
|
||||
path = '{}.{}'.format(subpath, name)
|
||||
self.paths.append('variable', path, 'containers', 'containers', variable)
|
||||
return variable
|
||||
|
||||
def _make_disknod_auto(self, type_, index, variable):
|
||||
if not hasattr(self.space.constraints, 'auto'):
|
||||
self.space.constraints.auto = []
|
||||
auto = self.objectspace.auto()
|
||||
self.objectspace.index += 1
|
||||
auto.index = self.objectspace.index
|
||||
auto.namespace = 'containers'
|
||||
param1 = self.objectspace.param()
|
||||
param1.text = type_
|
||||
param2 = self.objectspace.param()
|
||||
param2.text = variable.name
|
||||
auto.param = [param1, param2]
|
||||
auto.name = 'cdrom_minormajor'
|
||||
family = 'disknod{}'.format(index)
|
||||
auto.target = 'containers.disknods.{}.{}'.format(family, type_)
|
||||
if not hasattr(self.space, 'constraints'):
|
||||
self.space.constraints = self.objectspace.constraints()
|
||||
self.space.constraints.auto.append(auto)
|
||||
|
||||
def _make_disknod_type(self, index, variable):
|
||||
auto = self.objectspace.auto()
|
||||
self.objectspace.index += 1
|
||||
auto.index = self.objectspace.index
|
||||
auto.namespace = 'containers'
|
||||
param = self.objectspace.param()
|
||||
param.text = variable.name
|
||||
auto.param = [param]
|
||||
auto.name = 'device_type'
|
||||
family = 'disknod{}'.format(index)
|
||||
auto.target = 'containers.disknods.{}.type'.format(family)
|
||||
if not hasattr(self.space, 'constraints'):
|
||||
self.space.constraints = self.objectspace.constraints()
|
||||
if not hasattr(self.space.constraints, 'auto'):
|
||||
self.space.constraints.auto = []
|
||||
self.space.constraints.auto.append(auto)
|
||||
|
||||
|
||||
def _update_disknod(self, disknod, index):
|
||||
disknod.major = None
|
||||
disknod.minor = None
|
||||
disknod.type = None
|
||||
self._make_disknod_auto('minor', index, disknod)
|
||||
self._make_disknod_auto('major', index, disknod)
|
||||
self._make_disknod_type(index, disknod)
|
||||
disknod.mode = u'rwm'
|
||||
disknod.permission = 'allow'
|
||||
|
||||
def _update_file(self, file_, index):
|
||||
# take care of os.path.join and absolute part after first
|
||||
# argument.
|
||||
filename = file_.name
|
||||
if filename[0] == '/':
|
||||
filename = filename[1:]
|
||||
|
||||
full_name = file_.name
|
||||
container_path = self._generate_container_path(file_.container)
|
||||
if container_path:
|
||||
if full_name.startswith('/'):
|
||||
full_name = full_name[1:]
|
||||
full_name = join(container_path, full_name)
|
||||
file_.full_name = full_name
|
||||
|
||||
if not hasattr(file_, 'source'):
|
||||
source = basename(filename)
|
||||
else:
|
||||
source = file_.source
|
||||
source = join(templatedir, source)
|
||||
file_.source = source
|
||||
|
||||
def _split_elts(self, name, key, value, elt):
|
||||
"""for example::
|
||||
|
||||
<service_access service='ntp'>
|
||||
<port protocol='udp' service_accesslist='ntp_udp'>123</port>
|
||||
<tcpwrapper>ntpd</tcpwrapper>
|
||||
</service_access>
|
||||
|
||||
builds a `service_access` object, but we need **two** objects `service_access`,
|
||||
for example one for the port and one for the tcpwrapper
|
||||
"""
|
||||
for subelt in value:
|
||||
new_elt = copy(elt)
|
||||
for subsubelt in dir(subelt):
|
||||
if subsubelt.startswith('_') or subsubelt == 'index':
|
||||
continue
|
||||
if hasattr(new_elt, subsubelt):
|
||||
if hasattr(elt, 'name'):
|
||||
name_ = elt.name
|
||||
else:
|
||||
name_ = elt.service
|
||||
raise CreoleDictConsistencyError(_('attribute {} already exists '
|
||||
'for {}').format(subsubelt,
|
||||
name_))
|
||||
setattr(new_elt, subsubelt, getattr(subelt, subsubelt))
|
||||
if hasattr(new_elt, 'node_name') or hasattr(new_elt, 'name_type'):
|
||||
raise CreoleDictConsistencyError(_('attribute node_name or name_type '
|
||||
'already exists for {}'
|
||||
'').format(name))
|
||||
if hasattr(subelt, key + '_type'):
|
||||
type_ = getattr(subelt, key + '_type')
|
||||
setattr(new_elt, 'name_type', type_)
|
||||
setattr(new_elt, 'node_name', key)
|
||||
if not hasattr(new_elt, name + 'list'):
|
||||
setattr(new_elt, name + 'list', '___auto_{}'.format(elt.service))
|
||||
else:
|
||||
self.extra_conditions[new_elt] = '___auto_{}'.format(elt.service)
|
||||
yield new_elt
|
||||
|
||||
def _reorder_elts(self, name, elts, duplicate_list):
|
||||
"""Reorders by index the elts (the interface,
|
||||
the hosts, actions...)
|
||||
"""
|
||||
dict_elts = OrderedDict()
|
||||
# reorder elts by index
|
||||
new_elts = {}
|
||||
not_indexed = []
|
||||
for elt in elts:
|
||||
if not hasattr(elt, 'index'):
|
||||
not_indexed.append(elt)
|
||||
else:
|
||||
idx = elt.index
|
||||
new_elts.setdefault(idx, []).append(elt)
|
||||
idxes = list(new_elts.keys())
|
||||
idxes.sort()
|
||||
elts = not_indexed
|
||||
for idx in idxes:
|
||||
elts.extend(new_elts[idx])
|
||||
for idx, elt in enumerate(elts):
|
||||
elt_added = False
|
||||
for key in dir(elt):
|
||||
if key.startswith('_') or key.endswith('_type') or key in ERASED_ATTRIBUTES:
|
||||
continue
|
||||
value = getattr(elt, key)
|
||||
if isinstance(value, list) and duplicate_list:
|
||||
for new_elt in self._split_elts(name, key, value, elt):
|
||||
dict_elts.setdefault(new_elt.name, []).append({'elt_name': key,
|
||||
'elt': new_elt})
|
||||
elt_added = True
|
||||
if not elt_added:
|
||||
if hasattr(elt, 'name'):
|
||||
eltname = elt.name
|
||||
else:
|
||||
eltname = idx
|
||||
dict_elts.setdefault(eltname, []).append({'elt_name': name, 'elt': elt})
|
||||
|
||||
result_elts = []
|
||||
for elt in dict_elts.values():
|
||||
result_elts.extend(elt)
|
||||
return result_elts
|
||||
|
||||
|
||||
def make_group_from_elts(self, name, elts, key_type, path, duplicate_list):
|
||||
"""Splits each objects into a group (and `OptionDescription`, in tiramisu terms)
|
||||
and build elements and its attributes (the `Options` in tiramisu terms)
|
||||
"""
|
||||
index = 0
|
||||
families = []
|
||||
new_elts = self._reorder_elts(name, elts, duplicate_list)
|
||||
for elt_info in new_elts:
|
||||
elt = elt_info['elt']
|
||||
elt_name = elt_info['elt_name']
|
||||
update_elt = '_update_' + elt_name
|
||||
if hasattr(self, update_elt):
|
||||
getattr(self, update_elt)(elt, index)
|
||||
if hasattr(elt, 'instance_mode'):
|
||||
instance_mode = elt.instance_mode
|
||||
else:
|
||||
instance_mode = 'always'
|
||||
if ((instance_mode == 'when_container' and not self.containers_enabled) or
|
||||
(instance_mode == 'when_no_container' and self.containers_enabled)):
|
||||
continue
|
||||
variables = []
|
||||
subpath = '{}.{}{}'.format(path, name, index)
|
||||
listname = '{}list'.format(name)
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
activate_path = '.'.join([subpath, 'activate'])
|
||||
if elt in self.extra_conditions:
|
||||
self.objectspace.list_conditions.setdefault(listname,
|
||||
{}).setdefault(
|
||||
self.extra_conditions[elt],
|
||||
[]).append(activate_path)
|
||||
for key in dir(elt):
|
||||
if key.startswith('_') or key.endswith('_type') or key in ERASED_ATTRIBUTES:
|
||||
continue
|
||||
value = getattr(elt, key)
|
||||
if isinstance(value, list) and duplicate_list:
|
||||
continue
|
||||
if key == listname:
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
self.objectspace.list_conditions.setdefault(listname,
|
||||
{}).setdefault(
|
||||
value,
|
||||
[]).append(activate_path)
|
||||
if not HIGH_COMPATIBILITY:
|
||||
continue
|
||||
if key == 'container':
|
||||
variables.append(self._generate_element(elt_name, key, value.container,
|
||||
'string', subpath))
|
||||
variables.append(self._generate_element(elt_name, 'container_group',
|
||||
value.container_group,
|
||||
'string', subpath))
|
||||
if HIGH_COMPATIBILITY:
|
||||
if not self.containers_enabled:
|
||||
real_container = value.real_container
|
||||
else:
|
||||
real_container = value._real_container
|
||||
variables.append(self._generate_element(elt_name, 'real_container',
|
||||
real_container,
|
||||
'string', subpath))
|
||||
else:
|
||||
default_type = 'string'
|
||||
if key in self.objectspace.booleans_attributs:
|
||||
default_type = 'boolean'
|
||||
type_ = key_type.get(key, default_type)
|
||||
dtd_key_type = key + '_type'
|
||||
if hasattr(elt, dtd_key_type):
|
||||
type_ = KEY_TYPE[getattr(elt, dtd_key_type)]
|
||||
if isinstance(value, list):
|
||||
variables.append(self._generate_element(elt_name, key, value, type_,
|
||||
subpath, True))
|
||||
else:
|
||||
variables.append(self._generate_element(elt_name, key, value, type_,
|
||||
subpath, False))
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
variables.append(self._generate_element(name, 'activate', True, 'boolean', subpath))
|
||||
family = self.objectspace.family()
|
||||
family.name = '{}{}'.format(name, index)
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.variable = variables
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
self.paths.append('family', subpath, 'containers', creoleobj=family)
|
||||
families.append(family)
|
||||
index += 1
|
||||
return families
|
||||
|
||||
|
||||
class ActionAnnotator(ContainerAnnotator):
|
||||
|
||||
def __init__(self, space, paths, objectspace):
|
||||
self.space = space
|
||||
self.paths = paths
|
||||
self.objectspace = objectspace
|
||||
self.extra_conditions = []
|
||||
if hasattr(space, 'family_action'):
|
||||
actions = self.objectspace.family()
|
||||
actions.name = 'actions'
|
||||
if HIGH_COMPATIBILITY:
|
||||
actions.mode = None
|
||||
actions.family = []
|
||||
self.space.actions = actions
|
||||
namespaces = []
|
||||
for name, actions in space.family_action.items():
|
||||
subpath = 'actions.{}'.format(normalize_family(name))
|
||||
for action in actions.action:
|
||||
namespace = action.namespace
|
||||
if namespace in namespaces:
|
||||
raise CreoleDictConsistencyError(_('only one action allow for {}'
|
||||
'').format(namespace))
|
||||
namespaces.append(namespace)
|
||||
action.name = action.namespace
|
||||
new_actions = self.make_group_from_elts('action', actions.action, {}, subpath, False)
|
||||
family = self.objectspace.family()
|
||||
family.name = actions.name
|
||||
family.family = new_actions
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
variables = []
|
||||
for key, value in vars(actions).items():
|
||||
if key not in ERASED_FAMILY_ACTION_ATTRIBUTES:
|
||||
variables.append(self._generate_element('action', key, value, 'string',
|
||||
subpath))
|
||||
family.variable = variables
|
||||
self.space.actions.family.append(family)
|
||||
del space.family_action
|
||||
|
||||
|
||||
class SpaceAnnotator(object):
|
||||
"""Transformations applied on a CreoleObjSpace instance
|
||||
"""
|
||||
def __init__(self, space, paths, objectspace, eosfunc_file):
|
||||
self.paths = paths
|
||||
self.space = space
|
||||
self.objectspace = objectspace
|
||||
self.valid_enums = {}
|
||||
self.force_value = {}
|
||||
self.has_calc = []
|
||||
self.force_no_value = []
|
||||
self.force_not_mandatory = []
|
||||
self.eosfunc = imp.load_source('eosfunc', eosfunc_file)
|
||||
if HIGH_COMPATIBILITY:
|
||||
self.default_has_no_value = []
|
||||
self.has_frozen_if_in_condition = []
|
||||
try:
|
||||
self.default_variable_options(space.variables)
|
||||
except AttributeError:
|
||||
raise CreoleDictConsistencyError(_('No configuration variables available in the configuration set'))
|
||||
|
||||
for family in space.variables.values():
|
||||
if hasattr(family, 'family'):
|
||||
self.change_variable_auto_freeze(family.family, family.name)
|
||||
if 'group' in vars(space.constraints):
|
||||
self.transform_groups(space.constraints.group, space)
|
||||
if hasattr(space.constraints, 'check'):
|
||||
self.filter_check(space.constraints.check)
|
||||
if 'condition' in vars(space.constraints):
|
||||
self.filter_condition(space.constraints.condition)
|
||||
self._parse_object_space(space, None)
|
||||
# valid_enums must be empty now (all information are store in objects)
|
||||
if self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('valid_enum sets for unknown variables {}').format(self.valid_enums.keys()))
|
||||
self.filter_autofill(space)
|
||||
for family in space.variables.values():
|
||||
if not HIGH_COMPATIBILITY:
|
||||
self.remove_empty_family(family.family)
|
||||
if hasattr(family, 'family'):
|
||||
self.change_variable_mode(family.family)
|
||||
if not HIGH_COMPATIBILITY:
|
||||
self.change_family_mode(family.family)
|
||||
if (hasattr(family, 'separators') and
|
||||
hasattr(family.separators, 'separator')):
|
||||
self.filter_separator(family.separators.separator)
|
||||
self.absolute_path_for_symlink_in_containers(space.containers.family.values())
|
||||
if 'help' in vars(space):
|
||||
self.transform_helps(space.help)
|
||||
|
||||
def absolute_path_for_symlink_in_containers(self, families):
|
||||
for family in families:
|
||||
if hasattr(family, 'family'):
|
||||
for fam in family.family:
|
||||
for variable in fam.variable:
|
||||
if variable.type == 'symlink' and '.' not in variable.name:
|
||||
variable.opt = self.paths.get_variable_path(variable.opt, 'creole')
|
||||
|
||||
def transform_helps(self, helps):
|
||||
if hasattr(helps, 'variable'):
|
||||
for hlp in helps.variable.values():
|
||||
hlp.name = hlp.path
|
||||
if hasattr(helps, 'family'):
|
||||
for hlp in helps.family.values():
|
||||
hlp.name = hlp.path
|
||||
|
||||
def transform_groups(self, groups, space): # pylint: disable=C0111
|
||||
for group in groups:
|
||||
master_fullname = group.master
|
||||
slave_names = list(group.slave.keys())
|
||||
try:
|
||||
master_family_name = self.paths.get_variable_family_name(master_fullname)
|
||||
except CreoleDictConsistencyError as err:
|
||||
if HIGH_COMPATIBILITY:
|
||||
continue
|
||||
raise err
|
||||
namespace = self.paths.get_variable_namespace(master_fullname)
|
||||
master_name = self.paths.get_variable_name(master_fullname)
|
||||
master_family = space.variables[namespace].family[master_family_name]
|
||||
master_path = namespace + '.' + master_family_name
|
||||
is_master = False
|
||||
for variable_name, variable in list(master_family.variable.items()):
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
if variable.name == master_name:
|
||||
master_space = variable
|
||||
is_master = True
|
||||
else:
|
||||
if is_master:
|
||||
# slaves are multi
|
||||
if variable_name in slave_names:
|
||||
variable.multi = True
|
||||
slave_names.remove(variable_name)
|
||||
master_family.variable.pop(variable_name)
|
||||
master_space.variable.append(variable) # pylint: disable=E1101
|
||||
if namespace == 'creole':
|
||||
variable_fullpath = variable_name
|
||||
else:
|
||||
variable_fullpath = master_path + '.' + variable_name
|
||||
self.paths.set_master(variable_fullpath, master_name)
|
||||
if slave_names == []:
|
||||
break
|
||||
if is_master is False and variable_name == master_name:
|
||||
master_space = self.objectspace.Master()
|
||||
master_space.variable = []
|
||||
master_space.name = master_name
|
||||
# manage master's variable
|
||||
if variable.multi is not True:
|
||||
raise CreoleDictConsistencyError(_('the variable {} in a group must be multi').format(variable.name))
|
||||
master_family.variable[master_name] = master_space
|
||||
master_space.variable.append(variable) # pylint: disable=E1101
|
||||
self.paths.set_master(master_fullname, master_name)
|
||||
master_space.path = master_fullname
|
||||
is_master = True
|
||||
else: # pragma: no cover
|
||||
raise CreoleDictConsistencyError(_('cannot found a master {} '
|
||||
'nor a slave {}').format(master_name,
|
||||
slave_names))
|
||||
del space.constraints.group
|
||||
|
||||
def _parse_object_space(self, space, namespace, node_name='creole', parent_path=None, ishelp=False):
|
||||
space_is_help = ishelp
|
||||
vars_space = dict(vars(space))
|
||||
for name, subspace in vars_space.items():
|
||||
if namespace is None and name in ['containers', 'actions']:
|
||||
continue
|
||||
if space_is_help is False:
|
||||
ishelp = name == 'help'
|
||||
self._parse_subobject_space(name, node_name, space, subspace, parent_path, namespace, ishelp)
|
||||
|
||||
def _parse_subobject_space(self, name, node_name, parent, space, parent_path, namespace, ishelp): # pylint: disable=R0913
|
||||
keys = None
|
||||
if isinstance(space, dict):
|
||||
if namespace is None:
|
||||
keys = list(space.keys())
|
||||
space = list(space.values())
|
||||
|
||||
if isinstance(space, list):
|
||||
for idx, subspace in enumerate(space):
|
||||
if keys is not None and namespace is None:
|
||||
if subspace.__class__.__name__ == 'Variable':
|
||||
current_namespace = self.paths.get_variable_namespace(keys[idx])
|
||||
elif subspace.__class__.__name__ == 'Variables':
|
||||
current_namespace = keys[idx]
|
||||
else:
|
||||
current_namespace = self.paths.get_family_namespace(normalize_family(keys[idx],
|
||||
check_name=False))
|
||||
else:
|
||||
current_namespace = namespace
|
||||
if hasattr(parent, 'path'):
|
||||
parent_path = parent.path
|
||||
else:
|
||||
parent_path = None
|
||||
self._parse_object_space(subspace, current_namespace, name, parent_path, ishelp)
|
||||
elif isinstance(space, self.objectspace.Atom):
|
||||
for subname, subspace in vars(space).items():
|
||||
self._parse_subobject_space(subname, name, space, subspace, None, namespace, ishelp)
|
||||
else:
|
||||
self.absolute_paths_annotation(name, node_name, parent, space, parent_path, namespace, ishelp)
|
||||
self.uppercase_family_name(name, node_name, parent, space)
|
||||
|
||||
def remove_empty_family(self, space): # pylint: disable=C0111,R0201
|
||||
removed_families = []
|
||||
for family_name, family in space.items():
|
||||
if not hasattr(family, 'variable') or len(family.variable) == 0:
|
||||
removed_families.append(family_name)
|
||||
del space[family_name]
|
||||
# remove help too
|
||||
if hasattr(self.space, 'help') and hasattr(self.space.help, 'family'):
|
||||
for family in self.space.help.family.keys():
|
||||
if family in removed_families:
|
||||
del self.space.help.family[family]
|
||||
|
||||
def uppercase_family_name(self, name, node_name, parent, value): # pylint: disable=C0111,R0201
|
||||
if name == 'name' and node_name == 'family':
|
||||
# let's preserve uppercase letters
|
||||
# just in case where some acronyms are present,
|
||||
# example : 'ARV'
|
||||
if not value[0].isupper():
|
||||
if HIGH_COMPATIBILITY:
|
||||
parent.name = value
|
||||
else:
|
||||
parent.name = value.capitalize()
|
||||
|
||||
def change_family_mode(self, families): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
mode = modes_level[-1]
|
||||
for variable in family.variable.values():
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
variable_mode = variable.variable[0].mode
|
||||
variable.mode = variable_mode
|
||||
else:
|
||||
variable_mode = variable.mode
|
||||
if variable_mode is not None and modes[mode] > modes[variable_mode]:
|
||||
mode = variable_mode
|
||||
if HIGH_COMPATIBILITY and family.name == 'Containers':
|
||||
family.mode = 'normal'
|
||||
else:
|
||||
family.mode = mode
|
||||
|
||||
def _annotate_variable(self, variable, family_mode, is_slave=False):
|
||||
if (HIGH_COMPATIBILITY and variable.type == 'choice' and variable.mode != modes_level[-1] and variable.mandatory is True and variable.path in self.default_has_no_value):
|
||||
variable.mode = modes_level[0]
|
||||
if variable.type == 'choice' and is_slave and family_mode == modes_level[0] and variable.mandatory is True:
|
||||
variable.mode = modes_level[0]
|
||||
# if the variable is mandatory and doesn't have any value
|
||||
# then the variable's mode is set to 'basic'
|
||||
has_value = hasattr(variable, 'value')
|
||||
if (variable.path not in self.has_calc and variable.mandatory is True and
|
||||
(not has_value or is_slave) and variable.type != 'choice'):
|
||||
variable.mode = modes_level[0]
|
||||
if has_value:
|
||||
if not HIGH_COMPATIBILITY or (not variable.path.startswith('creole.containers.') \
|
||||
and variable.path not in self.force_no_value and variable.path not in self.force_not_mandatory):
|
||||
variable.mandatory = True
|
||||
if variable.hidden is True:
|
||||
variable.frozen = True
|
||||
if not variable.auto_save is True and 'force_default_on_freeze' not in vars(variable):
|
||||
variable.force_default_on_freeze = True
|
||||
if variable.name == 'frozen' and not variable.auto_save is True:
|
||||
variable.force_default_on_freeze = True
|
||||
if variable.mode != None and not is_slave and modes[variable.mode] < modes[family_mode]:
|
||||
variable.mode = family_mode
|
||||
if variable.mode != None and variable.mode != modes_level[0] and modes[variable.mode] < modes[family_mode]:
|
||||
variable.mode = family_mode
|
||||
if variable.name == "available_probes":
|
||||
variable.force_default_on_freeze = False
|
||||
|
||||
def default_variable_options(self, variables):
|
||||
for families in variables.values():
|
||||
if hasattr(families, 'family'):
|
||||
for family in families.family.values():
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
if not hasattr(variable, 'type'):
|
||||
variable.type = 'string'
|
||||
if variable.type != 'symlink' and not hasattr(variable, 'description'):
|
||||
variable.description = variable.name
|
||||
|
||||
def change_variable_auto_freeze(self, families, namespace): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
if variable.auto_freeze:
|
||||
new_condition = self.objectspace.condition()
|
||||
new_condition.name = 'auto_frozen_if_in'
|
||||
new_condition.namespace = namespace
|
||||
new_condition.source = 'module_instancie'
|
||||
new_param = self.objectspace.param()
|
||||
new_param.text = 'oui'
|
||||
new_condition.param = [new_param]
|
||||
new_target = self.objectspace.target()
|
||||
new_target.type = 'variable'
|
||||
if namespace == 'creole':
|
||||
path = variable.name
|
||||
else:
|
||||
path = namespace + '.' + family.name + '.' + variable.name
|
||||
new_target.name = path
|
||||
new_condition.target = [new_target]
|
||||
self.space.constraints.condition.append(new_condition)
|
||||
|
||||
def change_variable_mode(self, families): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
family_mode = family.mode
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
mode = modes_level[-1]
|
||||
for slave in variable.variable:
|
||||
if slave.auto_save is True:
|
||||
raise CreoleDictConsistencyError(_('master/slaves {} '
|
||||
'could not be '
|
||||
'auto_save').format(slave.name))
|
||||
if slave.auto_freeze is True:
|
||||
raise CreoleDictConsistencyError(_('master/slaves {} '
|
||||
'could not be '
|
||||
'auto_freeze').format(slave.name))
|
||||
if HIGH_COMPATIBILITY and variable.name != slave.name: # and variable.variable[0].mode != modes_level[0]:
|
||||
is_slave = True
|
||||
else:
|
||||
is_slave = False
|
||||
self._annotate_variable(slave, family_mode, is_slave)
|
||||
if HIGH_COMPATIBILITY:
|
||||
# master's variable are right
|
||||
if modes[variable.variable[0].mode] > modes[slave.mode]:
|
||||
slave.mode = variable.variable[0].mode
|
||||
else:
|
||||
# auto_save's variable is set in 'basic' mode if its mode is 'normal'
|
||||
if slave.auto_save is True and slave.mode != modes_level[-1]:
|
||||
slave.mode = modes_level[0]
|
||||
if modes[mode] > modes[slave.mode]:
|
||||
mode = slave.mode
|
||||
if not HIGH_COMPATIBILITY:
|
||||
# the master's mode is the lowest
|
||||
variable.variable[0].mode = mode
|
||||
variable.mode = variable.variable[0].mode
|
||||
else:
|
||||
# auto_save's variable is set in 'basic' mode if its mode is 'normal'
|
||||
if variable.auto_save is True and variable.mode != modes_level[-1]:
|
||||
variable.mode = modes_level[0]
|
||||
# auto_freeze's variable is set in 'basic' mode if its mode is 'normal'
|
||||
if variable.auto_freeze is True and variable.mode != modes_level[-1]:
|
||||
variable.mode = modes_level[0]
|
||||
self._annotate_variable(variable, family_mode)
|
||||
|
||||
def absolute_paths_annotation(self, name, node_name, parent, value, parent_path, namespace, ishelp): # pylint: disable=C0111,R0913
|
||||
if hasattr(parent, 'path'):
|
||||
return
|
||||
if name == 'name' and node_name in ['variable', 'family']:
|
||||
if node_name == 'family':
|
||||
family_name = normalize_family(value, check_name=False)
|
||||
subpath = self.paths.get_family_path(family_name, namespace)
|
||||
namespace = self.paths.get_family_namespace(family_name)
|
||||
else:
|
||||
if self.paths.path_is_defined(value):
|
||||
value_name = value
|
||||
else:
|
||||
value_name = parent_path + '.' + value
|
||||
if namespace is None:
|
||||
namespace = self.paths.get_variable_namespace(value)
|
||||
subpath = self.paths.get_variable_path(value_name, namespace)
|
||||
if not ishelp and hasattr(parent, 'type') and parent.type in FORCE_CHOICE:
|
||||
if subpath in self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('cannot set valid enum for variable with type {}').format(parent.type))
|
||||
parent.choice = []
|
||||
for value in FORCE_CHOICE[parent.type]:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value, 'utf8')
|
||||
else:
|
||||
choice.name = str(value)
|
||||
parent.choice.append(choice)
|
||||
parent.type = 'choice'
|
||||
if not HIGH_COMPATIBILITY:
|
||||
parent.mandatory = True
|
||||
if parent.choice == []:
|
||||
raise CreoleDictConsistencyError(_('empty valid enum is not allowed for variable {}').format(value_name))
|
||||
if hasattr(parent, 'type') and parent.type != 'choice':
|
||||
orig_type = parent.type
|
||||
else:
|
||||
orig_type = None
|
||||
if not ishelp and subpath in self.valid_enums:
|
||||
values = self.valid_enums[subpath]['values']
|
||||
if isinstance(values, list):
|
||||
parent.choice = []
|
||||
choices = []
|
||||
for value in values:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value)
|
||||
else:
|
||||
choice.name = str(value)
|
||||
choices.append(choice.name)
|
||||
choice.type = parent.type
|
||||
parent.choice.append(choice)
|
||||
if hasattr(parent, 'value'):
|
||||
for value in parent.value:
|
||||
value.type = parent.type
|
||||
if value.name not in choices:
|
||||
raise CreoleDictConsistencyError(_('value "{}" of variable "{}" is not in list of all expected values ({})').format(value.name, parent.name, choices))
|
||||
if parent.choice == []:
|
||||
raise CreoleDictConsistencyError(_('empty valid enum is not allowed for variable {}').format(value_name))
|
||||
else:
|
||||
# probe choice
|
||||
parent.choice = values
|
||||
parent.type = 'choice'
|
||||
del(self.valid_enums[subpath])
|
||||
if not ishelp and subpath in self.force_value:
|
||||
if not hasattr(parent, 'value'):
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = self.force_value[subpath]
|
||||
parent.value = [new_value]
|
||||
self.force_no_value.append(subpath)
|
||||
if not ishelp and hasattr(parent, 'type') and parent.type == 'choice':
|
||||
# if choice with no value, set value with the first choice
|
||||
if not hasattr(parent, 'value'):
|
||||
no_value = False
|
||||
if HIGH_COMPATIBILITY and parent.multi:
|
||||
no_value = True
|
||||
if not no_value:
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = parent.choice[0].name
|
||||
new_value.type = orig_type
|
||||
if HIGH_COMPATIBILITY:
|
||||
self.default_has_no_value.append(subpath)
|
||||
parent.value = [new_value]
|
||||
self.force_no_value.append(subpath)
|
||||
parent.path = subpath
|
||||
if name == 'name' and node_name == 'separator':
|
||||
pass
|
||||
|
||||
def get_variable(self, name): # pylint: disable=C0111
|
||||
return self.paths.get_variable_obj(name)
|
||||
|
||||
def filter_autofill(self, space): # pylint: disable=C0111
|
||||
self.filter_duplicate_autofill(space.constraints)
|
||||
if 'auto' in vars(space.constraints):
|
||||
self.filter_auto(space.constraints.auto, space)
|
||||
if 'fill' in vars(space.constraints):
|
||||
self.filter_fill(space.constraints.fill, space)
|
||||
|
||||
def filter_duplicate_autofill(self, constraints):
|
||||
""" Remove duplicate auto or fill for a variable
|
||||
This variable must be redefined
|
||||
"""
|
||||
fills = {}
|
||||
# sort fill/auto by index
|
||||
if 'fill' in vars(constraints):
|
||||
for idx, fill in enumerate(constraints.fill):
|
||||
if fill.index in fills:
|
||||
raise Exception('hu?')
|
||||
fills[fill.index] = {'idx': idx, 'fill': fill, 'type': 'fill'}
|
||||
if 'auto' in vars(constraints):
|
||||
for idx, fill in enumerate(constraints.auto):
|
||||
if fill.index in fills:
|
||||
raise Exception('hu?')
|
||||
fills[fill.index] = {'idx': idx, 'fill': fill, 'type': 'auto'}
|
||||
indexes = list(fills.keys())
|
||||
indexes.sort()
|
||||
targets = {}
|
||||
remove_autos = []
|
||||
remove_fills = []
|
||||
for idx in indexes:
|
||||
fill = fills[idx]['fill']
|
||||
if hasattr(fill, 'redefine'):
|
||||
redefine = bool(fill.redefine)
|
||||
else:
|
||||
redefine = False
|
||||
if fill.target in targets:
|
||||
if redefine:
|
||||
if targets[fill.target][1] == 'auto':
|
||||
remove_autos.append(targets[fill.target][0])
|
||||
else:
|
||||
remove_fills.append(targets[fill.target][0])
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_("An auto or fill already exists "
|
||||
"for the target: {}").format(
|
||||
fill.target))
|
||||
targets[fill.target] = (fills[idx]['idx'], fills[idx]['type'])
|
||||
remove_autos.sort(reverse=True)
|
||||
for idx in remove_autos:
|
||||
constraints.auto.pop(idx)
|
||||
remove_fills.sort(reverse=True)
|
||||
for idx in remove_fills:
|
||||
constraints.fill.pop(idx)
|
||||
|
||||
def filter_auto(self, auto_space, space): # pylint: disable=C0111
|
||||
for auto in auto_space:
|
||||
if HIGH_COMPATIBILITY and auto.target in self.has_frozen_if_in_condition:
|
||||
# if a variable has a 'frozen_if_in' condition
|
||||
# then we change the 'auto' variable as a 'fill' variable
|
||||
continue
|
||||
# an auto is a fill with "hidden" and "frozen" properties
|
||||
variable = self.get_variable(auto.target)
|
||||
if variable.auto_freeze:
|
||||
raise CreoleDictConsistencyError(_('variable with auto value '
|
||||
'cannot be auto_freeze').format(auto.target))
|
||||
if variable.auto_save:
|
||||
raise CreoleDictConsistencyError(_('variable with auto value '
|
||||
'cannot be auto_save').format(auto.target))
|
||||
variable.hidden = True
|
||||
variable.frozen = True
|
||||
variable.force_default_on_freeze = True
|
||||
if 'fill' not in vars(space.constraints):
|
||||
space.constraints.fill = []
|
||||
space.constraints.fill.extend(auto_space)
|
||||
del space.constraints.auto
|
||||
|
||||
def filter_separator(self, space): # pylint: disable=C0111,R0201
|
||||
names = []
|
||||
remove_separators = []
|
||||
for idx, separator in enumerate(space):
|
||||
try:
|
||||
namespace = self.paths.get_variable_namespace(separator.name)
|
||||
subpath = self.paths.get_variable_path(separator.name, namespace)
|
||||
separator.name = subpath
|
||||
except CreoleDictConsistencyError as err:
|
||||
if HIGH_COMPATIBILITY:
|
||||
remove_separators.append(idx)
|
||||
continue
|
||||
else:
|
||||
raise err
|
||||
if separator.name in names:
|
||||
raise CreoleDictConsistencyError(_('{} already has a separator').format(separator.name))
|
||||
names.append(separator.name)
|
||||
remove_separators.sort(reverse=True)
|
||||
for idx in remove_separators:
|
||||
del space[idx]
|
||||
|
||||
|
||||
def load_params_in_validenum(self, param, probe):
|
||||
if not probe and param.type in ['string', 'python', 'number']:
|
||||
if not hasattr(param, 'text') and (param.type == 'python' or param.type == 'number'):
|
||||
raise CreoleDictConsistencyError(_("All '{}' variables shall be set in order to calculate {}").format(param.type, 'valid_enum'))
|
||||
if param.type in ['string', 'number']:
|
||||
try:
|
||||
values = literal_eval(param.text)
|
||||
except ValueError:
|
||||
raise CreoleDictConsistencyError(_('Cannot load {}').format(param.text))
|
||||
elif param.type == 'python':
|
||||
try:
|
||||
values = eval(param.text, {'eosfunc': secure_eosfunc(self.eosfunc), '__builtins__': {'range': range, 'str': str}})
|
||||
#FIXME : eval('[str(i) for i in range(3, 13)]', {'eosfunc': eosfunc, '__builtins__': {'range': range, 'str': str}})
|
||||
except NameError:
|
||||
raise CreoleDictConsistencyError(_('The function {} is unknown').format(param.text))
|
||||
if not isinstance(values, list):
|
||||
raise CreoleDictConsistencyError(_('Function {} shall return a list').format(param.text))
|
||||
new_values = []
|
||||
for val in values:
|
||||
if sys.version_info[0] < 3 and isinstance(val, str):
|
||||
val = val.decode('utf-8')
|
||||
new_values.append(val)
|
||||
values = new_values
|
||||
else:
|
||||
values = param.text
|
||||
return values
|
||||
|
||||
def filter_check(self, space): # pylint: disable=C0111
|
||||
# valid param in check
|
||||
remove_indexes = []
|
||||
for check_idx, check in enumerate(space):
|
||||
namespace = check.namespace
|
||||
if hasattr(check, 'param'):
|
||||
param_option_indexes = []
|
||||
for idx, param in enumerate(check.param):
|
||||
if param.type not in TYPE_PARAM_CHECK:
|
||||
raise CreoleDictConsistencyError(_('cannot use {} type as a param in check for {}').format(param.type, check.target))
|
||||
if param.type == 'eole':
|
||||
if HIGH_COMPATIBILITY and param.text.startswith('container_ip'):
|
||||
if param.optional is True:
|
||||
param_option_indexes.append(idx)
|
||||
try:
|
||||
param.text = self.paths.get_variable_path(param.text, namespace)
|
||||
except CreoleDictConsistencyError as err:
|
||||
if param.optional is True:
|
||||
param_option_indexes.append(idx)
|
||||
else:
|
||||
raise err
|
||||
param_option_indexes = list(set(param_option_indexes))
|
||||
param_option_indexes.sort(reverse=True)
|
||||
for idx in param_option_indexes:
|
||||
check.param.pop(idx)
|
||||
if not HIGH_COMPATIBILITY and check.param == []:
|
||||
remove_indexes.append(check_idx)
|
||||
remove_indexes.sort(reverse=True)
|
||||
for idx in remove_indexes:
|
||||
del space[idx]
|
||||
variables = {}
|
||||
for index, check in enumerate(space):
|
||||
namespace = check.namespace
|
||||
if HIGH_COMPATIBILITY:
|
||||
if not self.paths.path_is_defined(check.target):
|
||||
continue
|
||||
check.is_in_master = self.paths.get_master(check.target) != None
|
||||
# let's replace the target by the path
|
||||
check.target = self.paths.get_variable_path(check.target, namespace)
|
||||
if check.target not in variables:
|
||||
variables[check.target] = []
|
||||
variables[check.target].append((index, check))
|
||||
# remove check already set for a variable
|
||||
remove_indexes = []
|
||||
for checks in variables.values():
|
||||
names = {}
|
||||
for idx, check in checks:
|
||||
if HIGH_COMPATIBILITY and check.name == 'valid_enum':
|
||||
redefine = True
|
||||
else:
|
||||
redefine = False
|
||||
#redefine = bool(check.redefine)
|
||||
if redefine and check.name in names:
|
||||
remove_indexes.append(names[check.name])
|
||||
del names[check.name]
|
||||
names[check.name] = idx
|
||||
del check.index
|
||||
remove_indexes.sort(reverse=True)
|
||||
for idx in remove_indexes:
|
||||
del space[idx]
|
||||
remove_indexes = []
|
||||
for idx, check in enumerate(space):
|
||||
if not check.name in dir(self.eosfunc):
|
||||
raise CreoleDictConsistencyError(_('cannot find check function {}').format(check.name))
|
||||
is_probe = not check.name in self.eosfunc.func_on_zephir_context
|
||||
if is_probe:
|
||||
raise CreoleDictConsistencyError(_('cannot have a check with probe function ({})').format(check.name))
|
||||
if check.name == 'valid_enum':
|
||||
proposed_value_type = False
|
||||
remove_params = []
|
||||
for param_idx, param in enumerate(check.param):
|
||||
if hasattr(param, 'name') and param.name == 'checkval':
|
||||
try:
|
||||
proposed_value_type = self.objectspace._convert_boolean(param.text) == False
|
||||
remove_params.append(param_idx)
|
||||
except TypeError as err:
|
||||
raise CreoleDictConsistencyError(_('cannot load checkval value for variable {}: {}').format(check.target, err))
|
||||
remove_params.sort(reverse=True)
|
||||
for param_idx in remove_params:
|
||||
del check.param[param_idx]
|
||||
if len(check.param) != 1:
|
||||
raise CreoleDictConsistencyError(_('cannot set more than one param '
|
||||
'for valid_enum for variable {}'
|
||||
'').format(check.target))
|
||||
param = check.param[0]
|
||||
if proposed_value_type:
|
||||
if param.type != 'eole':
|
||||
try:
|
||||
values = self.load_params_in_validenum(param, check.probe)
|
||||
except NameError as err:
|
||||
raise CreoleDictConsistencyError(_('cannot load value for variable {}: {}').format(check.target, err))
|
||||
add_value = True
|
||||
if HIGH_COMPATIBILITY and check.is_in_master:
|
||||
add_value = False
|
||||
if add_value and values:
|
||||
self.force_value[check.target] = values[0]
|
||||
else:
|
||||
if check.target in self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('valid_enum already set for {}'
|
||||
'').format(check.target))
|
||||
values = self.load_params_in_validenum(param, check.probe)
|
||||
self.valid_enums[check.target] = {'type': param.type,
|
||||
'values': values}
|
||||
remove_indexes.append(idx)
|
||||
remove_indexes.sort(reverse=True)
|
||||
for idx in remove_indexes:
|
||||
del space[idx]
|
||||
|
||||
#convert level to "warnings_only" and hidden to "transitive"
|
||||
for check in space:
|
||||
if check.level == 'warning':
|
||||
check.warnings_only = True
|
||||
else:
|
||||
check.warnings_only = False
|
||||
check.level = None
|
||||
transitive = True
|
||||
if hasattr(check, 'param'):
|
||||
for param in check.param:
|
||||
if not param.hidden is True:
|
||||
transitive = False
|
||||
param.hidden = None
|
||||
check.transitive = transitive
|
||||
|
||||
def filter_fill(self, fill_space, space): # pylint: disable=C0111,R0912
|
||||
fills = {}
|
||||
# sort fill/auto by index
|
||||
for idx, fill in enumerate(fill_space):
|
||||
fills[fill.index] = {'idx': idx, 'fill': fill}
|
||||
del fill.index
|
||||
indexes = list(fills.keys())
|
||||
indexes.sort()
|
||||
del_idx = []
|
||||
for idx in indexes:
|
||||
fill = fills[idx]['fill']
|
||||
variable = self.get_variable(fill.target)
|
||||
if hasattr(variable, 'value'):
|
||||
del variable.value
|
||||
namespace = fill.namespace
|
||||
# let's replace the target by the path
|
||||
fill.target = self.paths.get_variable_path(fill.target, namespace)
|
||||
if not fill.name in dir(self.eosfunc):
|
||||
raise CreoleDictConsistencyError(_('cannot find fill function {}').format(fill.name))
|
||||
is_probe = not fill.name in self.eosfunc.func_on_zephir_context
|
||||
if hasattr(fill, 'param'):
|
||||
for param in fill.param:
|
||||
if param.type not in TYPE_PARAM_FILL:
|
||||
raise CreoleDictConsistencyError(_('cannot use {} type as a param '
|
||||
'in a fill/auto').format(param.type))
|
||||
param_option_indexes = []
|
||||
for fill_idx, param in enumerate(fill.param):
|
||||
if not hasattr(param, 'text') and \
|
||||
(param.type == 'eole' or param.type == 'number' or \
|
||||
param.type == 'container' or param.type == 'python'):
|
||||
raise CreoleDictConsistencyError(_("All '{}' variables shall be set in "
|
||||
"order to calculate {}").format(
|
||||
param.type,
|
||||
fill.target))
|
||||
if param.type == 'container':
|
||||
param.type = 'eole'
|
||||
param.text = 'container_ip_{}'.format(param.text)
|
||||
if param.type == 'eole':
|
||||
if is_probe:
|
||||
raise CreoleDictConsistencyError(_('Function {0} used to calculate {1} '
|
||||
'is executed on remote server, '
|
||||
'so cannot depends to an '
|
||||
'other variable'
|
||||
).format(fill.name, fill.target))
|
||||
if HIGH_COMPATIBILITY and param.text.startswith('container_ip'):
|
||||
if param.optional is True:
|
||||
param_option_indexes.append(fill_idx)
|
||||
try:
|
||||
param.text = self.paths.get_variable_path(param.text, namespace)
|
||||
except CreoleDictConsistencyError as err:
|
||||
if param.optional is True:
|
||||
param_option_indexes.append(fill_idx)
|
||||
else:
|
||||
raise err
|
||||
param_option_indexes = list(set(param_option_indexes))
|
||||
param_option_indexes.sort(reverse=True)
|
||||
for param_idx in param_option_indexes:
|
||||
fill.param.pop(param_idx)
|
||||
self.has_calc.append(fill.target)
|
||||
|
||||
if is_probe:
|
||||
variable.force_default_on_freeze = False
|
||||
self.objectspace.probe_variables.append(fill)
|
||||
del_idx.append(fills[idx]['idx'])
|
||||
del_idx.sort(reverse=True)
|
||||
for idx in del_idx:
|
||||
space.constraints.fill.pop(idx)
|
||||
|
||||
def filter_target(self, space, namespace): # pylint: disable=C0111
|
||||
del_idx = []
|
||||
for idx, target in enumerate(space.target):
|
||||
if target.type == 'variable':
|
||||
if (hasattr(target, 'optional') and target.optional is True and
|
||||
not self.paths.path_is_defined(target.name)):
|
||||
del_idx.append(idx)
|
||||
continue
|
||||
if space.source == target.name:
|
||||
raise CreoleDictConsistencyError(_('target name and source name must be different: {}').format(space.source))
|
||||
target.name = self.paths.get_variable_path(target.name, namespace)
|
||||
elif target.type == 'family':
|
||||
try:
|
||||
target.name = self.paths.get_family_path(target.name, namespace)
|
||||
except KeyError:
|
||||
raise CreoleDictConsistencyError(_('cannot found family {}').format(target.name))
|
||||
del_idx = list(set(del_idx))
|
||||
del_idx.sort(reverse=True)
|
||||
for idx in del_idx:
|
||||
space.target.pop(idx)
|
||||
|
||||
def filter_condition(self, space): # pylint: disable=C0111
|
||||
remove_conditions = []
|
||||
fallback_variables = []
|
||||
fallback_lists = []
|
||||
# automatic generation of the service_access lists
|
||||
# and the service_restriction lists from the servicelist
|
||||
for condition in space:
|
||||
if hasattr(condition, 'target'):
|
||||
new_targets = []
|
||||
for target in condition.target:
|
||||
if target.type == 'servicelist':
|
||||
new_target = copy(target)
|
||||
new_target.type = 'service_accesslist'
|
||||
new_target.name = '___auto_{}'.format(new_target.name)
|
||||
new_targets.append(new_target)
|
||||
|
||||
new_target = copy(target)
|
||||
new_target.type = 'service_restrictionlist'
|
||||
new_target.name = '___auto_{}'.format(new_target.name)
|
||||
new_targets.append(new_target)
|
||||
condition.target.extend(new_targets)
|
||||
|
||||
# remove condition with target
|
||||
if HIGH_COMPATIBILITY:
|
||||
for idx, condition in enumerate(space):
|
||||
if not hasattr(condition, 'target'):
|
||||
remove_conditions.append(idx)
|
||||
|
||||
for idx, condition in enumerate(space):
|
||||
if idx in remove_conditions:
|
||||
continue
|
||||
if condition.name == 'hidden_if_in':
|
||||
condition.name = 'disabled_if_in'
|
||||
elif condition.name == 'hidden_if_not_in':
|
||||
condition.name = 'disabled_if_not_in'
|
||||
# a conditon with a fallback **and** the source variable doesn't exist
|
||||
if (hasattr(condition, 'fallback') and condition.fallback is True and
|
||||
not self.paths.path_is_defined(condition.source)):
|
||||
for target in condition.target:
|
||||
if target.type in ['variable', 'family']:
|
||||
name = target.name.split('.')[-1]
|
||||
if target.type == 'variable':
|
||||
variable = self.get_variable(name)
|
||||
else:
|
||||
variable = self.paths.get_family_obj(name)
|
||||
if condition.name in ['disabled_if_in']:
|
||||
variable.disabled = True
|
||||
if condition.name in ['mandatory_if_in']:
|
||||
variable.mandatory = True
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in',
|
||||
'frozen_if_in', 'frozen_if_not_in']:
|
||||
variable.hidden = False
|
||||
if HIGH_COMPATIBILITY:
|
||||
fallback_variables.append(name)
|
||||
else:
|
||||
listname = target.type
|
||||
if not listname.endswith('list'):
|
||||
raise Exception('not yet implemented')
|
||||
listvars = self.objectspace.list_conditions.get(listname,
|
||||
{}).get(target.name)
|
||||
if listvars:
|
||||
for listvar in listvars:
|
||||
try:
|
||||
variable = self.get_variable(listvar)
|
||||
except CreoleDictConsistencyError:
|
||||
variable = self.paths.get_family_obj(listvar)
|
||||
if condition.name in ['disabled_if_in']:
|
||||
variable.disabled = True
|
||||
if condition.name in ['mandatory_if_in']:
|
||||
variable.mandatory = True
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in',
|
||||
'frozen_if_in', 'frozen_if_not_in']:
|
||||
variable.hidden = False
|
||||
fallback_lists.append(listvar)
|
||||
remove_conditions.append(idx)
|
||||
|
||||
for condition_idx, condition in enumerate(space):
|
||||
if condition_idx in remove_conditions:
|
||||
continue
|
||||
namespace = condition.namespace
|
||||
self.filter_target(condition, namespace)
|
||||
# transform *list to variable or family
|
||||
for condition_idx, condition in enumerate(space):
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in', 'frozen_if_in', 'auto_frozen_if_in',
|
||||
'frozen_if_not_in', 'mandatory_if_in', 'mandatory_if_not_in']:
|
||||
new_targets = []
|
||||
remove_targets = []
|
||||
if not hasattr(condition, 'target'):
|
||||
continue
|
||||
for target_idx, target in enumerate(condition.target):
|
||||
if target.type not in ['variable', 'family']:
|
||||
listname = target.type
|
||||
if not listname.endswith('list'):
|
||||
raise Exception('not yet implemented')
|
||||
listvars = self.objectspace.list_conditions.get(listname,
|
||||
{}).get(target.name)
|
||||
if listvars:
|
||||
for listvar in listvars:
|
||||
if listvar in fallback_lists:
|
||||
continue
|
||||
try:
|
||||
variable = self.get_variable(listvar)
|
||||
type_ = 'variable'
|
||||
except CreoleDictConsistencyError:
|
||||
variable = self.paths.get_family_obj(listvar)
|
||||
type_ = 'family'
|
||||
new_target = self.objectspace.target()
|
||||
new_target.type = type_
|
||||
new_target.name = listvar
|
||||
new_target.index = target.index
|
||||
new_targets.append(new_target)
|
||||
remove_targets.append(target_idx)
|
||||
remove_targets = list(set(remove_targets))
|
||||
remove_targets.sort(reverse=True)
|
||||
for target_idx in remove_targets:
|
||||
condition.target.pop(target_idx)
|
||||
condition.target.extend(new_targets)
|
||||
|
||||
force_remove_targets = {}
|
||||
for condition_idx, condition in enumerate(space):
|
||||
if condition_idx in remove_conditions:
|
||||
continue
|
||||
namespace = condition.namespace
|
||||
src_variable = self.paths.get_variable_obj(condition.source)
|
||||
condition.source = self.paths.get_variable_path(condition.source, namespace, allow_source=True)
|
||||
for param in condition.param:
|
||||
if param.type not in TYPE_PARAM_CONDITION:
|
||||
raise CreoleDictConsistencyError(_('cannot use {} type as a param '
|
||||
'in a condition').format(param.type))
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in', 'frozen_if_in', 'auto_frozen_if_in',
|
||||
'frozen_if_not_in', 'mandatory_if_in', 'mandatory_if_not_in']:
|
||||
valid_enum = None
|
||||
# remove condition for ChoiceOption that don't have param
|
||||
if condition.source in self.valid_enums and \
|
||||
self.valid_enums[condition.source]['type'] == 'string':
|
||||
valid_enum = self.valid_enums[condition.source]['values']
|
||||
if src_variable.type in FORCE_CHOICE:
|
||||
valid_enum = FORCE_CHOICE[src_variable.type]
|
||||
if valid_enum is not None:
|
||||
remove_param = []
|
||||
for param_idx, param in enumerate(condition.param):
|
||||
if param.text not in valid_enum:
|
||||
remove_param.append(param_idx)
|
||||
remove_param.sort(reverse=True)
|
||||
for idx in remove_param:
|
||||
del condition.param[idx]
|
||||
if condition.param == []:
|
||||
for target in condition.target:
|
||||
if target.name.startswith('creole.'):
|
||||
name = target.name.split('.')[-1]
|
||||
else:
|
||||
name = target.name
|
||||
if target.type == 'variable':
|
||||
variable = self.get_variable(name)
|
||||
else:
|
||||
variable = self.paths.get_family_obj(name)
|
||||
if condition.name == 'disabled_if_not_in':
|
||||
variable.disabled = True
|
||||
force_remove_targets.setdefault(condition.name,
|
||||
[]).append(target.name)
|
||||
elif condition.name == 'frozen_if_not_in':
|
||||
variable.hidden = True
|
||||
force_remove_targets.setdefault(condition.name,
|
||||
[]).append(target.name)
|
||||
elif condition.name == 'mandatory_if_not_in':
|
||||
variable.mandatory = True
|
||||
force_remove_targets.setdefault(condition.name,
|
||||
[]).append(target.name)
|
||||
elif HIGH_COMPATIBILITY and condition.name == 'disabled_if_in':
|
||||
variable.hidden = False
|
||||
remove_conditions.append(condition_idx)
|
||||
remove_conditions = list(set(remove_conditions))
|
||||
remove_conditions.sort(reverse=True)
|
||||
for idx in remove_conditions:
|
||||
space.pop(idx)
|
||||
|
||||
for condition_idx, condition in enumerate(space):
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in', 'frozen_if_in', 'auto_frozen_if_in',
|
||||
'frozen_if_not_in', 'mandatory_if_in', 'mandatory_if_not_in']:
|
||||
|
||||
remove_targets = []
|
||||
#parse each variable and family
|
||||
for target_idx, target in enumerate(condition.target):
|
||||
if target.name in force_remove_targets.get(condition.name, []):
|
||||
remove_targets.append(target_idx)
|
||||
if target.name.startswith('creole.'):
|
||||
name = target.name.split('.')[-1]
|
||||
else:
|
||||
name = target.name
|
||||
if target.type == 'variable':
|
||||
variable = self.get_variable(name)
|
||||
else:
|
||||
variable = self.paths.get_family_obj(name)
|
||||
if name in fallback_variables:
|
||||
remove_targets.append(target_idx)
|
||||
continue
|
||||
if condition.name in ['disabled_if_in', 'disabled_if_not_in',
|
||||
'frozen_if_in', 'frozen_if_not_in']:
|
||||
variable.hidden = False
|
||||
if condition.name in ['mandatory_if_in', 'mandatory_if_not_in']:
|
||||
variable.mandatory = False
|
||||
if HIGH_COMPATIBILITY and condition.name in ['frozen_if_in',
|
||||
'frozen_if_not_in']:
|
||||
self.has_frozen_if_in_condition.append(name)
|
||||
if condition.name in ['mandatory_if_in', 'mandatory_if_not_in']:
|
||||
self.force_not_mandatory.append(target.name)
|
||||
|
||||
remove_targets = list(set(remove_targets))
|
||||
remove_targets.sort(reverse=True)
|
||||
for target_idx in remove_targets:
|
||||
condition.target.pop(target_idx)
|
|
@ -0,0 +1,637 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# libsecure.py
|
||||
#
|
||||
# classes utilitaires pour lancement des services en https
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
points d'entrée de l'api
|
||||
|
||||
- gen_certif -> génère **un** certif
|
||||
- gen_certs -> génère tous les certifs
|
||||
|
||||
cf creole/doc/certifs.txt
|
||||
|
||||
"""
|
||||
# certains imports sont utilisés dans les fragments de code installés
|
||||
# dans /usr/share/eole/certs
|
||||
from os.path import join, splitext, basename, dirname, isdir, isfile, islink, exists, realpath
|
||||
from os import unlink, symlink, stat
|
||||
import os, glob, time
|
||||
from shutil import copy
|
||||
from subprocess import Popen, PIPE
|
||||
from OpenSSL import SSL
|
||||
import re
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# chemin du certificat eole par défaut
|
||||
from .config import cert_file, key_file, SSL_LAST_FILE
|
||||
from .client import CreoleClient
|
||||
from pyeole.process import system_out, system_code
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
global regexp_get_subject
|
||||
regexp_get_subject = None
|
||||
|
||||
def prep_dir() :
|
||||
"""
|
||||
Création de l'arborescence pour openssl
|
||||
"""
|
||||
#on génère le random
|
||||
load_default_conf_if_needed()
|
||||
rand_file = os.path.join(ssl_dir, ".rand")
|
||||
if not os.path.isfile(rand_file) :
|
||||
cmd_random = "/bin/dd if=/dev/urandom of=%s bs=1k count=16 >/dev/null 2>&1" % (rand_file)
|
||||
cmd = Popen(cmd_random, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u"! Error while generating entropy file !"))
|
||||
#on crée les fichiers pour gerer la pki
|
||||
file_serial = os.path.join(ssl_dir, "serial")
|
||||
if not os.path.isfile(file_serial) :
|
||||
f = file(file_serial, "w")
|
||||
f.write(str(start_index))
|
||||
f.close()
|
||||
file_index = os.path.join(ssl_dir, "index.txt")
|
||||
if not os.path.isfile(file_index) :
|
||||
f = file(file_index, "w")
|
||||
f.close()
|
||||
newcerts = os.path.join(ssl_dir, "newcerts")
|
||||
if not os.path.isdir(newcerts):
|
||||
os.makedirs(newcerts)
|
||||
if not os.path.isdir(key_dir):
|
||||
os.makedirs(key_dir)
|
||||
if not os.path.isdir(cert_dir):
|
||||
os.makedirs(cert_dir)
|
||||
if not os.path.isdir(req_dir):
|
||||
os.makedirs(req_dir)
|
||||
if not os.path.isdir(local_ca_dir):
|
||||
os.makedirs(local_ca_dir)
|
||||
##cmd = Popen("chmod 611 %s" % (key_dir), shell=True)
|
||||
dhfile = os.path.join(ssl_dir, "dh")
|
||||
if not os.path.isfile(dhfile):
|
||||
gen_dh = '/usr/bin/openssl dhparam -out "%s" 1024 >/dev/null 2>&1' % (dhfile)
|
||||
Popen(gen_dh, shell=True)
|
||||
|
||||
def sup_passwd(tmp_keyfile, keyfile) :
|
||||
"""
|
||||
Supression de la passphrase sur la clef privée
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
key_cmd = '/usr/bin/openssl rsa -in "%s" -passin pass:secret -out "%s" >/dev/null 2>&1' % (tmp_keyfile, keyfile)
|
||||
cmd = Popen(key_cmd, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u'! Error while generating ssl key in {0} !').format(keyfile))
|
||||
|
||||
def finalise_cert (certfile, keyfile, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod=''):
|
||||
"""
|
||||
Finalisation du certif
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if key_user != '':
|
||||
try:
|
||||
res = Popen("chown %s %s" % (key_user, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_grp != '':
|
||||
try:
|
||||
res=Popen("/bin/chgrp %s %s" % (key_grp, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (key_chmod, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if cert_user != '':
|
||||
try:
|
||||
res = Popen("/bin/chown %s %s" % (cert_user, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_grp != '':
|
||||
try:
|
||||
res = Popen("/bin/chgrp %s %s" % (cert_grp, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (cert_chmod, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_simple_cert(cert_file):
|
||||
"""
|
||||
Teste si le fichier contient un simple certificat ou une chaîne.
|
||||
:param cert_file: chemin du fichier à tester
|
||||
:type cert_file: str
|
||||
"""
|
||||
with open(cert_file, 'r') as pem:
|
||||
cert_num = len(re.findall(r'-+BEGIN CERTIFICATE-+', pem.read()))
|
||||
return cert_num == 1
|
||||
|
||||
def get_certs_catalog(simple=True):
|
||||
"""
|
||||
Créer un dictionnaire des certificats présents
|
||||
pour accélérer la reconstitution de la chaîne
|
||||
de certificats intermédiaires.
|
||||
:param simple: filtre sur les certificats à référencer
|
||||
:type simple: booléen
|
||||
"""
|
||||
global certs_catalog
|
||||
certs_catalog = {}
|
||||
for cert_file in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if simple and is_simple_cert(cert_file):
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
elif not simple:
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
except:
|
||||
continue
|
||||
return certs_catalog
|
||||
|
||||
|
||||
def get_certs_chain(certs):
|
||||
"""
|
||||
Récupération de la chaîne de certificats
|
||||
:param certs: liste des certificats dans l'ordre de la chaîne.
|
||||
:type certs: liste de chemins
|
||||
"""
|
||||
global certs_catalog, ca_issuer
|
||||
load_default_conf_if_needed()
|
||||
subject = get_subject(certfile=certs[-1])
|
||||
issuer = get_issuer_subject(certfile=certs[-1])
|
||||
if ca_issuer is None:
|
||||
ca_issuer = get_issuer_subject(certfile=ca_file)
|
||||
if subject == issuer:
|
||||
pass
|
||||
elif issuer == ca_issuer:
|
||||
certs.append(ca_file)
|
||||
else:
|
||||
try:
|
||||
if certs_catalog is None:
|
||||
certs_catalog = get_certs_catalog()
|
||||
certs.append(certs_catalog[issuer])
|
||||
get_certs_chain(certs)
|
||||
except KeyError as e:
|
||||
print _(u"Certificate chain incomplete.")
|
||||
return certs
|
||||
|
||||
|
||||
def get_intermediate_certs(cert):
|
||||
"""
|
||||
Récupération de la liste des certificats intermédiaires.
|
||||
:param cert: chemin du certificat pour lequel on reconstitue la chaîne
|
||||
:type cert:
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
try:
|
||||
chain = get_certs_chain([cert,])[1:-1]
|
||||
except:
|
||||
chain = []
|
||||
return chain
|
||||
|
||||
|
||||
def concat_fic(dst_fic, in_fics, overwrite=False, need_link=True):
|
||||
"""
|
||||
Concaténation d'une liste de fichiers dans un fichier de destination
|
||||
(le contenu d'origine est conservé)
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if need_link:
|
||||
remove_link(dst_fic)
|
||||
if type(in_fics) != list:
|
||||
in_fics = [in_fics]
|
||||
for fic in in_fics:
|
||||
if not os.path.isfile(fic):
|
||||
print _(u"Error: file {0} does not exist").format(fic)
|
||||
data = ""
|
||||
for fic_src in in_fics:
|
||||
f_src = file(fic_src)
|
||||
data += f_src.read().rstrip() + '\n'
|
||||
f_src.close()
|
||||
if overwrite:
|
||||
f_dst = file(dst_fic, "w")
|
||||
else:
|
||||
f_dst = file(dst_fic, "a+")
|
||||
f_dst.write(data)
|
||||
f_dst.close()
|
||||
if need_link:
|
||||
build_link(dst_fic, in_fics)
|
||||
|
||||
def gen_certs(regen=False, merge=True):
|
||||
"""
|
||||
Génère la ca puis les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
verif_ca()
|
||||
ca_generated = gen_ca(regen)
|
||||
if merge:
|
||||
merge_ca()
|
||||
if ca_generated:
|
||||
regen = True
|
||||
certif_loader(regen=regen)
|
||||
|
||||
def verif_ca():
|
||||
"""
|
||||
vérifie que la ca est générée correctement (serial > 0xstart_index) et cn valide
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# gestion des anciennes version de ca.crt
|
||||
if os.path.isfile(ca_dest_file) and not os.path.isfile(ca_file):
|
||||
# on reprend le premier certificat présent dans ca.crt dans ca_local.crt
|
||||
ca_certs = open(ca_dest_file).read().strip()
|
||||
tag_begin = '-----BEGIN CERTIFICATE-----'
|
||||
try:
|
||||
ca_data = tag_begin + ca_certs.split(tag_begin)[1]
|
||||
local_ca = open(ca_file, 'w')
|
||||
local_ca.write(ca_data)
|
||||
local_ca.close()
|
||||
except IndexError:
|
||||
# impossible de reprendre la ca actuelle, elle sera regénérée
|
||||
pass
|
||||
serial = int(eval('0x%s'%start_index))
|
||||
# vérification de la valeur actuelle du ca
|
||||
# vérification du cn de la ca
|
||||
if os.path.isfile(ca_file):
|
||||
cmd = Popen(['/usr/bin/openssl', 'x509', '-in', ca_file, '-subject', '-noout'], stdout=PIPE)
|
||||
if cmd.wait() != 0:
|
||||
unlink(ca_file)
|
||||
prep_dir()
|
||||
if os.path.isfile(file_serial):
|
||||
serial = open(file_serial).read().strip()
|
||||
# conversion en hexa
|
||||
serial = int(serial, 16)
|
||||
if serial < min_serial:
|
||||
if os.path.isfile(ca_file):
|
||||
unlink(ca_file)
|
||||
unlink(file_serial)
|
||||
for f_index in glob.glob(os.path.join(ssl_dir, 'index*')):
|
||||
unlink(f_index)
|
||||
for f_cert in glob.glob(os.path.join(newcerts_dir, '*.pem')):
|
||||
unlink(f_cert)
|
||||
prep_dir()
|
||||
|
||||
def gen_ca(regen=False, del_passwd=True, extensions="SERVEUR"):
|
||||
"""
|
||||
Generation ca
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
generated = False
|
||||
prep_dir()
|
||||
if not os.path.isfile(ca_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(ca_conf_file))
|
||||
if regen or (not os.path.isfile(ca_keyfile)) or (not os.path.isfile(ca_file)):
|
||||
print("* " + _(u"Generating CA certificate"))
|
||||
remove_link(ca_file)
|
||||
## On genère le certif de l'ac
|
||||
ca_gen = '/usr/bin/openssl req -x509 -config %s -newkey rsa:%s -days %s -keyout "%s" -out "%s" -extensions %s >/dev/null 2>&1' % (ca_conf_file, ssl_default_key_bits, ssl_default_cert_time, tmp_keyfile, ca_file, extensions)
|
||||
cmd = Popen(ca_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CA"))
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, ca_keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
generated = True
|
||||
## application des droits
|
||||
finalise_cert(ca_file, ca_keyfile, key_chmod='600')
|
||||
build_link(ca_file)
|
||||
## génération d'une crl
|
||||
if not os.path.isfile(os.path.join(ssl_dir, 'eole.crl')):
|
||||
print(_(u"Generating certificate revocation list (CRL)"))
|
||||
crl_gen = '/usr/bin/openssl ca -gencrl -config %s -crldays %s -out %s/eole.crl >/dev/null 2>&1' % (ca_conf_file, ssl_default_cert_time, ssl_dir)
|
||||
cmd = Popen(crl_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CRL ({0}/eole.crl)").format(ssl_dir))
|
||||
return generated
|
||||
|
||||
def merge_ca():
|
||||
"""
|
||||
concatène toutes les ca utiles dans ca.crt
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
## concaténation des certificats education
|
||||
ca_list = [ca_file, os.path.join(cert_dir, 'ACInfraEducation.pem')]
|
||||
## concaténation de certificats supplémentaires si définis
|
||||
for ca_perso in glob.glob(os.path.join(local_ca_dir,'*.*')):
|
||||
if os.path.isfile(ca_perso):
|
||||
ca_list.append(ca_perso)
|
||||
concat_fic(ca_dest_file, ca_list, True, False)
|
||||
|
||||
def gen_certif(certfile, keyfile=None, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod='', regen=False, copy_key=False,
|
||||
del_passwd=True, signe_req=True, container=None, client_cert=False,
|
||||
cert_conf_file=None):
|
||||
"""
|
||||
Génération des requêtes de certificats et signature par la CA
|
||||
"""
|
||||
if not cert_conf_file:
|
||||
if client_cert:
|
||||
cert_conf_file = client_conf_file
|
||||
else:
|
||||
cert_conf_file = conf_file
|
||||
load_default_conf_if_needed()
|
||||
if not os.path.isfile(cert_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(cert_conf_file))
|
||||
|
||||
basefile = os.path.splitext(certfile)[0]
|
||||
if keyfile is None:
|
||||
keyfile = "%s.key" % (basefile)
|
||||
|
||||
if container != None:
|
||||
cpath = client.get_container(name=container)['path']
|
||||
certfile = cpath + certfile
|
||||
keyfile = cpath + keyfile
|
||||
|
||||
if regen or not os.path.isfile(certfile) or not os.path.isfile(keyfile):
|
||||
|
||||
remove_link(certfile)
|
||||
if not isdir(dirname(certfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(certfile)))
|
||||
if not isdir(dirname(keyfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(keyfile)))
|
||||
|
||||
# certificat absent ou regénération demandée
|
||||
fic_p10 = os.path.join(req_dir, "%s.p10" % (os.path.basename(basefile)))
|
||||
# génération de la requête de certificat x509 et d'un simili certificat auto-signé
|
||||
if exists(keyfile):
|
||||
gen_req = '/usr/bin/openssl req -new -key "%s" -days %s -config %s -out "%s" >/dev/null 2>&1' % (
|
||||
keyfile, ssl_default_cert_time, cert_conf_file, fic_p10)
|
||||
new_key = False
|
||||
else:
|
||||
gen_req = '/usr/bin/openssl req -new -newkey rsa:%s -days %s -config %s -keyout "%s" -out "%s" >/dev/null 2>&1' % (
|
||||
ssl_default_key_bits, ssl_default_cert_time, cert_conf_file, tmp_keyfile, fic_p10)
|
||||
new_key = True
|
||||
cmd = Popen(gen_req, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while generating certificate request {0} !').format(fic_p10))
|
||||
if new_key:
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, keyfile)
|
||||
else:
|
||||
copy(tmp_keyfile, keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
if signe_req:
|
||||
# on signe la requête
|
||||
ca_signe = '/usr/bin/openssl ca -in "%s" -config %s -out "%s" -batch -notext >/dev/null 2>&1' % (fic_p10, cert_conf_file, certfile)
|
||||
cmd = Popen(ca_signe, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while signing certificate request {0} !') % fic_p10)
|
||||
print(_(u"* Certificate {0} successfully generated").format(certfile))
|
||||
if copy_key:
|
||||
concat_fic(certfile, [keyfile], need_link=False)
|
||||
finalise_cert(certfile, keyfile, key_user=key_user,
|
||||
key_grp=key_grp, key_chmod=key_chmod,
|
||||
cert_user=cert_user, cert_grp=cert_grp,
|
||||
cert_chmod=cert_chmod)
|
||||
build_link(certfile)
|
||||
|
||||
|
||||
def remove_link(name, remove_broken_link=True):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if islink(cert_link):
|
||||
if remove_broken_link and not exists(cert_link):
|
||||
#print 'ok lien cassé pour {} donc supprimé'.format(cert_link)
|
||||
unlink(cert_link)
|
||||
elif str(name) == realpath(cert_link):
|
||||
#print 'ok suppression lien {} comme demandé ({})'.format(cert_link, name)
|
||||
unlink(cert_link)
|
||||
|
||||
|
||||
def build_link(name, concats=[]):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
def _check_contats_link(link):
|
||||
# supprimer tous les liens vers les fichiers utilises pour la concatenation
|
||||
if islink(link):
|
||||
if realpath(link) in concats:
|
||||
#print 'ok suppression du link {} ({} est dans {})'.format(link, realpath(link), concats)
|
||||
unlink(link)
|
||||
|
||||
def _check_link(fp, suffix):
|
||||
# calcul du bon suffix utilise dans le nom
|
||||
# si le fichier existe avec le suffix courant, ajoute 1 au numero de suffix
|
||||
new_name = join(dir_name, fp) + '.' + str(suffix)
|
||||
if islink(new_name):
|
||||
#print 'pas de suppression du link {} ({} n\'est pas dans {})'.format(new_name, realpath(new_name), concats)
|
||||
return _check_link(fp, suffix + 1)
|
||||
#else:
|
||||
# print "ok ce n'est pas un link {}".format(new_name)
|
||||
return new_name
|
||||
|
||||
def _build_link(ret):
|
||||
# creer un lien a partir du hash du subject
|
||||
if ret != '':
|
||||
fp = ret.split('\n')[0]
|
||||
if fp.isalnum():
|
||||
if concats != []:
|
||||
for link in glob.glob(join(dir_name, fp) + '.*'):
|
||||
_check_contats_link(link)
|
||||
|
||||
new_name = _check_link(fp, 0)
|
||||
#print 'ok creation du link {} vers {}'.format(new_name, name)
|
||||
symlink(name, new_name)
|
||||
return stat(new_name).st_mtime
|
||||
return 0
|
||||
|
||||
dir_name = dirname(name)
|
||||
subject_fp = ["/usr/bin/openssl", "x509", "-subject_hash", "-fingerprint", "-noout", "-in", name]
|
||||
subject_fp_old = ["/usr/bin/openssl", "x509", "-subject_hash_old", "-fingerprint", "-noout", "-in", name]
|
||||
new_timestamp = _build_link(system_out(subject_fp)[1])
|
||||
new_timestamp = max(_build_link(system_out(subject_fp_old)[1]), new_timestamp)
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = float(fh.read().strip())
|
||||
except ValueError:
|
||||
timestamp = 0
|
||||
if new_timestamp > timestamp:
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
def rehash_if_needed():
|
||||
load_default_conf_if_needed()
|
||||
need_rehash = False
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = int(float(fh.read().strip()))
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if timestamp < int(stat(cert_link).st_mtime):
|
||||
need_rehash = True
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except ValueError:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
need_rehash = True
|
||||
else:
|
||||
need_rehash = True
|
||||
|
||||
if need_rehash:
|
||||
system_code(['/usr/bin/c_rehash'])
|
||||
new_timestamp = 0
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if isfile(cert_link):
|
||||
timestamp = stat(cert_link).st_mtime
|
||||
if timestamp > new_timestamp:
|
||||
new_timestamp = timestamp
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
# gen_certif utils reader
|
||||
|
||||
def certif_loader(regen=None):
|
||||
"""charge les fichiers permettant de générer les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# XXX FIXME : changer le path de data vers les paquets container,
|
||||
# XXX FIXME et déplacer les .gen_cert
|
||||
files = glob.glob(join('/usr/share/eole/certs', '*_*.gen_cert'))
|
||||
files.sort()
|
||||
for fname in files:
|
||||
# puts name in global namespace because we need it in execfile's
|
||||
# namespace in rules_loader
|
||||
name = splitext(basename(fname))[0].split('_')[1]
|
||||
# exec gen_certs
|
||||
execfile(fname, globals(),locals())
|
||||
|
||||
def get_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
global regexp_get_subject
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-subject', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-subject', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("subject= "):
|
||||
raise Exception(_(u'Invalid certificate subject: {0} ').format(ret))
|
||||
if regexp_get_subject is None:
|
||||
regexp_get_subject = re.compile('^subject= (.*)/CN=(.*)')
|
||||
return regexp_get_subject.findall(ret)[0]
|
||||
|
||||
def get_issuer_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject de la CA d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-issuer', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-issuer', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("issuer= "):
|
||||
raise Exception(_(u'Invalid certificate issuer: {0} ').format(ret))
|
||||
regexp = '^issuer= (.*)/CN=(.*)'
|
||||
return re.findall(regexp, ret)[0]
|
||||
|
||||
def load_conf(ssl_dico):
|
||||
global ssl_dir, cert_dir, key_dir, tmp_keyfile, file_serial, req_dir
|
||||
global local_ca_dir, newcerts_dir, ca_conf_file, conf_file, client_conf_file
|
||||
global ca_file, ca_dest_file, ca_keyfile, start_index, min_serial
|
||||
global ssl_default_key_bits, ssl_default_cert_time
|
||||
global certs_catalog
|
||||
|
||||
ssl_dir = ssl_dico.get('ssl_dir', ssl_dir)
|
||||
cert_dir = ssl_dico.get('cert_dir', os.path.join(ssl_dir, "certs"))
|
||||
key_dir = ssl_dico.get('key_dir', os.path.join(ssl_dir, "private"))
|
||||
tmp_keyfile = ssl_dico.get('tmp_keyfile', os.path.join(key_dir, "tmpkey.key"))
|
||||
file_serial = ssl_dico.get('file_serial', os.path.join(ssl_dir, "serial"))
|
||||
req_dir = ssl_dico.get('req_dir', os.path.join(ssl_dir, "req"))
|
||||
local_ca_dir = ssl_dico.get('local_ca_dir', os.path.join(ssl_dir, "local_ca"))
|
||||
newcerts_dir = ssl_dico.get('newcerts_dir', os.path.join(ssl_dir, "newcerts"))
|
||||
ca_conf_file = ssl_dico.get('ca_conf_file', ca_conf_file)
|
||||
conf_file = ssl_dico.get('conf_file', conf_file)
|
||||
client_conf_file = ssl_dico.get('client_conf_file', conf_file)
|
||||
# chemin de la CA
|
||||
ca_file = ssl_dico.get('ca_file', os.path.join(cert_dir, "ca_local.crt"))
|
||||
ca_dest_file = ssl_dico.get('ca_dest_file', os.path.join(cert_dir, "ca.crt"))
|
||||
ca_keyfile = ssl_dico.get('ca_keyfile', os.path.join(key_dir, "ca.key"))
|
||||
# index
|
||||
start_index = ssl_dico.get('start_index', hex(int(time.time()))[2:])
|
||||
min_serial = int(eval('0x30'))
|
||||
ssl_default_key_bits = ssl_dico.get('ssl_default_key_bits', client.get_creole('ssl_default_key_bits', 2048))
|
||||
ssl_default_cert_time = ssl_dico.get('ssl_default_cert_time', client.get_creole('ssl_default_cert_time', 1096))
|
||||
|
||||
def load_default_conf_if_needed():
|
||||
"""creoled n'est pas forcement démarré à ce moment là
|
||||
ne charger la configuration par défaut qu'à l'utilisation de la lib
|
||||
et non a l'importantion
|
||||
#8448
|
||||
"""
|
||||
global ssl_dir
|
||||
if ssl_dir == None:
|
||||
load_conf({'ssl_dir': '/etc/ssl',
|
||||
'ca_conf_file': '/etc/eole/ssl/ca-eole.conf',
|
||||
'conf_file': '/etc/eole/ssl/certif-eole.conf',
|
||||
'client_conf_file': '/etc/eole/ssl/client-eole.conf'})
|
||||
|
||||
ssl_dir=None
|
||||
ca_conf_file=None
|
||||
client_conf_file=None
|
||||
conf_file=None
|
||||
certs_catalog = None
|
||||
ca_issuer = None
|
|
@ -0,0 +1,838 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.client - client to request creole.server through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Request informations from :class:`creole.CreoleServer`
|
||||
|
||||
Simple http :mod:`restkit.request` client to request and manipulate
|
||||
informations from :class:`creole.CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
from http_parser.http import NoMoreData
|
||||
import restkit
|
||||
import eventlet
|
||||
from restkit.errors import ResourceError, RequestError, ParseException, RequestTimeout
|
||||
from eventlet.timeout import Timeout as EventletTimeout
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import json
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from .dtd_parser import parse_dtd
|
||||
from .config import dtdfilename
|
||||
|
||||
from .i18n import _
|
||||
from pyeole.encode import normalize
|
||||
|
||||
import re
|
||||
|
||||
# Stat filesystem
|
||||
import os
|
||||
|
||||
# Create instance method on the fly
|
||||
import types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_CONTAINER_COMPONENTS = ['container'] + parse_dtd(dtdfilename)['container']['options']
|
||||
"""List of components used to define an LXC container.
|
||||
|
||||
They are extracted from the ``creole.dtd``.
|
||||
|
||||
Each of them are use to fabric two accessor methods bound to
|
||||
:class:`CreoleClient`.
|
||||
|
||||
"""
|
||||
LOCAL_URL = 'http://127.0.0.1:8000'
|
||||
#Si on veut garder les threads, on peut désactiver les reap_connections pour éviter les tracebacks
|
||||
#restkit.session.get_session('thread', reap_connections=False)
|
||||
|
||||
|
||||
def _merge_entries(old, new):
|
||||
"""Merge component informations
|
||||
|
||||
This merge keep information from :data:`old` when the :data:`new`
|
||||
is ``None``.
|
||||
|
||||
The boolean information are ored between :data:`old` and
|
||||
:data:`new`.
|
||||
|
||||
:param old: previous component informations
|
||||
:type old: `dict`
|
||||
:param new: new component informations
|
||||
:type new: `dict`
|
||||
:return: merged informations
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
for key, val in new.items():
|
||||
if val is None:
|
||||
# Do not override previous value
|
||||
continue
|
||||
elif isinstance(val, bool):
|
||||
# Switch on first True
|
||||
# old[key] may not exists
|
||||
old[key] = val | old.get(key, False)
|
||||
else:
|
||||
old[key] = val
|
||||
|
||||
return old
|
||||
|
||||
|
||||
def _merge_duplicates_in_components(container_info, keys_to_strip=None):
|
||||
"""Merge duplicates entries
|
||||
|
||||
:param container_info: information on a container or group of
|
||||
containers
|
||||
:type container_info: `dict`
|
||||
:param keys_to_strip: keys for which to remove duplicated entries
|
||||
:type keys_to_strip: `list`
|
||||
|
||||
"""
|
||||
# Do not work in-place
|
||||
info = container_info.copy()
|
||||
|
||||
if keys_to_strip is None:
|
||||
# Run on all keys
|
||||
keys_to_strip = info.keys()
|
||||
|
||||
for key in keys_to_strip:
|
||||
if not isinstance(info[key], list):
|
||||
# Do not work on single values
|
||||
continue
|
||||
|
||||
result = OrderedDict()
|
||||
for entry in info[key]:
|
||||
if 'name' in entry:
|
||||
name = repr(entry['name'])
|
||||
if name in result and not entry.get(u'activate', False):
|
||||
# Duplicate found but inactive
|
||||
continue
|
||||
elif name in result:
|
||||
# Merge old and new informations
|
||||
old_entry = result[name]
|
||||
# Make sure entry appears at right place
|
||||
del(result[name])
|
||||
result[name] = _merge_entries(old=old_entry,
|
||||
new=entry)
|
||||
else:
|
||||
# New entry
|
||||
result[name] = entry
|
||||
|
||||
if result:
|
||||
# Store stripped information
|
||||
info[key] = [ item for item in result.values() ]
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def _build_component_accessors(component):
|
||||
"""Fabric of accessors for container components
|
||||
|
||||
It build two accessors:
|
||||
|
||||
- one to get all components for all containers named
|
||||
``get_<component>s``
|
||||
|
||||
- one to get one comoponent item defined for all containers
|
||||
named ``get_<component>``
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component accessors
|
||||
:rtype: `tuple` of `function`
|
||||
|
||||
"""
|
||||
def all_components(self, container=None):
|
||||
"""Return all components
|
||||
"""
|
||||
return self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
|
||||
all_components.__name__ = 'get_{0}s'.format(component)
|
||||
all_components.__doc__ = """Get {0}s for all containers
|
||||
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0}s informations
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
def single_component(self, name, container=None):
|
||||
"""Return single component
|
||||
"""
|
||||
components = []
|
||||
ret = self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
for item in ret:
|
||||
if item['name'] == name:
|
||||
components.append(item)
|
||||
return components
|
||||
single_component.__doc__ = """Get one {0} for all containers
|
||||
|
||||
:param name: name of {0} to return
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0} informations for all containers
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
single_component.__name__ = 'get_{0}'.format(component)
|
||||
|
||||
return all_components, single_component
|
||||
|
||||
|
||||
class CreoleClient(object):
|
||||
"""Request informations from :class:`creole.CreoleServer`.
|
||||
|
||||
In addition, this class provides some utilities to manipulate
|
||||
returned data.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, url=None):
|
||||
"""Initialize client.
|
||||
|
||||
:param url: HTTP URL to the :class:`creole.CreoleServer`
|
||||
:type url: `str`
|
||||
|
||||
"""
|
||||
if url is None:
|
||||
if self.is_in_lxc():
|
||||
url = 'http://192.0.2.1:8000'
|
||||
else:
|
||||
url = LOCAL_URL
|
||||
|
||||
self.url = url
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
comp_list.remove('container')
|
||||
# Disable logging of restkit
|
||||
restkit.set_logging('critical', logging.NullHandler())
|
||||
self._is_container_actif = None
|
||||
self._restkit_request = None
|
||||
for component in comp_list:
|
||||
get_all, get_single = _build_component_accessors(component)
|
||||
setattr(self, get_all.__name__,
|
||||
types.MethodType(get_all, self, CreoleClient))
|
||||
setattr(self, get_single.__name__,
|
||||
types.MethodType(get_single, self, CreoleClient))
|
||||
|
||||
@staticmethod
|
||||
def is_in_lxc():
|
||||
"""Check if we are in LXC.
|
||||
|
||||
We are under LXC if /proc/1/cgroup contains ``/lxc``.
|
||||
|
||||
:return: if we are under LXC.
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
if not os.path.isdir('/proc/self'):
|
||||
# when launch in chroot
|
||||
return True
|
||||
else:
|
||||
return os.access('/dev/lxc/console', os.F_OK)
|
||||
|
||||
|
||||
def close(self):
|
||||
if self._restkit_request is not None:
|
||||
self._restkit_request.close()
|
||||
|
||||
|
||||
def _request(self, path, **kwargs):
|
||||
"""Send HTTP request to Creole server.
|
||||
|
||||
If ConnectionError, try three time before leave.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: response of the request
|
||||
:rtype: :class:`restkit.wrappers.Response`
|
||||
:raise CreoleClientError: on HTTP errors
|
||||
|
||||
"""
|
||||
timeout = 5
|
||||
max_try = 3
|
||||
tried = 0
|
||||
|
||||
method = 'GET'
|
||||
if 'method' in kwargs:
|
||||
method = kwargs['method']
|
||||
del(kwargs['method'])
|
||||
|
||||
uri = restkit.util.make_uri(path, **kwargs)
|
||||
|
||||
while tried < max_try:
|
||||
tried += 1
|
||||
try:
|
||||
# use eventlet backend (#13194, #21388)
|
||||
with eventlet.Timeout(timeout):
|
||||
self._restkit_request = restkit.request(uri, method=method, backend='eventlet')
|
||||
return self._restkit_request
|
||||
except (ResourceError, RequestError, ParseException, NoMoreData, RequestTimeout, EventletTimeout) as err:
|
||||
log.debug(_(u"Connexion error '{0}',"
|
||||
u" retry {1}/{2}").format(err, tried, max_try))
|
||||
sleep(1)
|
||||
|
||||
if isinstance(err, RequestError):
|
||||
msg = _(u"HTTP error: {0}\nPlease check creoled's log (/var/log/rsyslog/local/creoled/creoled.info.log)\nand restart service with command 'service creoled start'")
|
||||
else:
|
||||
msg = _(u"HTTP error: {0}")
|
||||
if isinstance(err, RequestTimeout) or isinstance(err, EventletTimeout):
|
||||
err = _(u"creoled service didn't respond in time")
|
||||
|
||||
raise TimeoutCreoleClientError(msg.format(err))
|
||||
|
||||
def is_container_actif(self):
|
||||
if self._is_container_actif is None:
|
||||
self._is_container_actif = self.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
return self._is_container_actif
|
||||
|
||||
def request(self, command, path=None, **kwargs):
|
||||
"""Send HTTP request to creole server.
|
||||
|
||||
:param command: action to perform for the creole resource
|
||||
:type command: `str`
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dictionary of variable:value
|
||||
:rtype: `dict`
|
||||
:raise CreoleClientError: on bad response status or HTTP error
|
||||
|
||||
"""
|
||||
if path is not None:
|
||||
path = self.validate_path(path)
|
||||
ret = self._request(self.url + command + path, **kwargs)
|
||||
else:
|
||||
ret = self._request(self.url + command, **kwargs)
|
||||
if ret.status_int != 200:
|
||||
log.debug(_(u'HTML content: {0}').format(ret.body_string()))
|
||||
raise CreoleClientError(_(u"HTML error {0}, please consult creoled events log (/var/log/rsyslog/local/creoled/creoled.info.log) to have more informations").format(ret.status_int))
|
||||
reply = json.loads(ret.body_string())
|
||||
|
||||
# Previous fix for NoMoreData exception #7218 :
|
||||
#ret.connection.close()
|
||||
|
||||
if reply['status'] != 0:
|
||||
if reply['status'] == 4:
|
||||
raise NotFoundError(u"{0}".format(reply['response']))
|
||||
else:
|
||||
raise CreoleClientError(normalize(_("Creole error {0}: {1}")).format(
|
||||
reply['status'], reply['response']))
|
||||
|
||||
return reply['response']
|
||||
|
||||
@staticmethod
|
||||
def validate_path(path):
|
||||
"""Validate the path for http request.
|
||||
|
||||
:data:`path` must use ``/`` as separator with a leading one or
|
||||
use ``.`` as separator.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
:raise CreoleClientError: when path does not validate
|
||||
|
||||
"""
|
||||
ret = path
|
||||
if not ret.startswith('/'):
|
||||
if ret.find('.') != -1 and ret.find('/') != -1:
|
||||
raise CreoleClientError(_(u"Path must not mix dotted and" +
|
||||
u" slash notation: '{0}'").format(path))
|
||||
elif ret.find('.') != -1:
|
||||
ret = '/{0}'.format( ret.replace('.', '/') )
|
||||
else:
|
||||
raise CreoleClientError(_(u"Path must start" +
|
||||
u" with '/': '{0}'").format(path))
|
||||
return ret
|
||||
|
||||
def get(self, path='/creole', *args, **kwargs):
|
||||
"""Get the values from part of the tree.
|
||||
|
||||
If :data:`path` is a variable, it returns it's value.
|
||||
|
||||
If :data:`path` is a tree node, it returns the whole tree
|
||||
of ``variable:value`` as flat dictionary.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value if any error occurs
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
|
||||
"""
|
||||
# Use a dictionary to test existence
|
||||
default = {}
|
||||
if len(args) > 1:
|
||||
raise ValueError(_("Too many positional parameters {0}.").format(args))
|
||||
|
||||
if kwargs.has_key('default'):
|
||||
default['value'] = kwargs['default']
|
||||
del(kwargs['default'])
|
||||
elif len(args) == 1:
|
||||
default['value'] = args[0]
|
||||
|
||||
try:
|
||||
ret = self.request('/get', path, **kwargs)
|
||||
except (NotFoundError, CreoleClientError) as err:
|
||||
if default.has_key('value'):
|
||||
ret = default['value']
|
||||
else:
|
||||
raise err
|
||||
|
||||
return ret
|
||||
|
||||
def list(self, path='/creole'):
|
||||
"""List content of a path.
|
||||
|
||||
If :data:`path` is a variable, it returns it's name.
|
||||
|
||||
If :data:`path` is a tree node, it returns the list of items
|
||||
under it.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: items present under a path
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
return self.request('/list', path)
|
||||
|
||||
def get_creole(self, name=None, *args, **kwargs):
|
||||
"""Get variables under ``/creole``.
|
||||
|
||||
The full path of variable names is stripped in key names.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value to return if the variable named
|
||||
:data:`name` does not exist or any error occurs
|
||||
:return: variables and their value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
if name is not None:
|
||||
# Tiramisu has no any meaningful message
|
||||
try:
|
||||
ret = self.get('/creole', *args, variable=name, **kwargs)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown variable {0}')
|
||||
raise NotFoundError(msg.format(name))
|
||||
else:
|
||||
ret = self.strip_full_path(self.get('/creole', *args, **kwargs))
|
||||
|
||||
return ret
|
||||
|
||||
def reload_config(self):
|
||||
"""Reload Tiramisu's config
|
||||
"""
|
||||
return self.request('/reload_config')
|
||||
|
||||
def reload_eol(self):
|
||||
"""Reload Tiramisu's partial config
|
||||
"""
|
||||
return self.request('/reload_eol')
|
||||
|
||||
def valid_mandatory(self):
|
||||
return self.request('/valid_mandatory')
|
||||
|
||||
def get_containers(self, group=None):
|
||||
"""Get basic informations of all containers
|
||||
|
||||
:param group: limit search to a group of containers
|
||||
:type group: `str`
|
||||
:return: containers informations
|
||||
:rtype: `list`
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if group is None or (not mode_container and group == 'root'):
|
||||
args = {}
|
||||
else:
|
||||
args = {'withoption':'group',
|
||||
'withvalue':group}
|
||||
|
||||
try:
|
||||
ret = self.get('/containers/containers', **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
if group is not None:
|
||||
msg = _(u'No container found for group {0}')
|
||||
else:
|
||||
msg = _(u'No container found! Is that possible?')
|
||||
raise NotFoundError(msg.format(group))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret
|
||||
|
||||
|
||||
def get_container(self, name):
|
||||
"""Get informations of one container
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
try:
|
||||
ret = self.get('/containers/containers',
|
||||
withoption='name',
|
||||
withvalue=name)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
raise NotFoundError(_(u'Unknown container {0}').format(name))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret[0]
|
||||
|
||||
|
||||
def get_groups(self):
|
||||
"""Get list of container groups
|
||||
|
||||
All groups are a container, but all containers are not a
|
||||
group.
|
||||
|
||||
:return: container groups names
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
containers = self.get_containers()
|
||||
if not mode_container:
|
||||
groups = ['root']
|
||||
else:
|
||||
groups = []
|
||||
for container in containers:
|
||||
if container['name'] == container['group']:
|
||||
groups.append(container['name'])
|
||||
if 'all' in groups:
|
||||
groups.remove('all')
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
def is_group(self, name):
|
||||
"""Verify is a container is a group of containers.
|
||||
|
||||
:param name: name of the container
|
||||
:type name: `str`
|
||||
:return: is the container a group of containers?
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if not mode_container:
|
||||
return name == 'root'
|
||||
|
||||
container = self.get_container(name)
|
||||
return name == container['group']
|
||||
|
||||
|
||||
def get_containers_components(self, containers, group=False, merge_duplicates=False):
|
||||
"""Get all components of a list of containers or group of containers.
|
||||
|
||||
:param containers: container names
|
||||
:type containers: `list` of `str`
|
||||
:param group: containers are names of groups of containers
|
||||
:type group: `bool`
|
||||
:param merge_duplicates: merge duplicate entries
|
||||
:type merge_duplicates: `bool`
|
||||
:return: components of the containers
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
comp_list = [ '{0}s'.format(name) for name in _CONTAINER_COMPONENTS[:] ]
|
||||
component = {}
|
||||
|
||||
if not group:
|
||||
if 'all' in containers:
|
||||
# make sure all is first
|
||||
containers.remove('all')
|
||||
|
||||
# Remove duplicates
|
||||
containers = list(set(containers))
|
||||
containers.insert(0, 'all')
|
||||
|
||||
for comp in comp_list:
|
||||
component[comp] = []
|
||||
for container in containers:
|
||||
by_cont = self.get_components(None, container=container, group=group)
|
||||
|
||||
for comp, items in by_cont.items():
|
||||
if comp + 's' in comp_list:
|
||||
component[comp + 's'].extend(items)
|
||||
|
||||
if merge_duplicates:
|
||||
component = _merge_duplicates_in_components(component, comp_list)
|
||||
|
||||
if 'interfaces' in component:
|
||||
for interface in component['interfaces']:
|
||||
if 'gateway' in interface and interface['gateway']:
|
||||
component['gateway'] = {u'interface': interface['name'],
|
||||
u'ip': interface['gateway']}
|
||||
|
||||
return component
|
||||
|
||||
|
||||
def get_container_infos(self, container):
|
||||
"""Get all components of a container or its group
|
||||
|
||||
:param container: container name
|
||||
:type container: `str`
|
||||
:return: components of the container or its group
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
container_info = self.get_container(container)
|
||||
group_name = container_info[u'real_container']
|
||||
container_info = self.get_group_infos(group_name)
|
||||
|
||||
return container_info
|
||||
|
||||
|
||||
def get_group_infos(self, group):
|
||||
"""Get all components of a group of container
|
||||
|
||||
:param group: container group name
|
||||
:type group: `str`
|
||||
:return: components of the container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
group_info = self.get_containers_components(containers=[group],
|
||||
group=True,
|
||||
merge_duplicates=True)
|
||||
|
||||
# If we need to do thing in the name of all containers in the group
|
||||
names = []
|
||||
found = False
|
||||
for container in group_info['containers']:
|
||||
name = container['name']
|
||||
names.append(name)
|
||||
if name == group:
|
||||
found = True
|
||||
group_info.update(container)
|
||||
if not found:
|
||||
group_info.update(self.get_container(group))
|
||||
group_info['containers'] = names
|
||||
|
||||
return group_info
|
||||
|
||||
|
||||
def get_components(self, name, container=None, group=False):
|
||||
"""Get component for containers
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
if container is not None:
|
||||
if group:
|
||||
option_name = 'real_container'
|
||||
else:
|
||||
option_name = 'container'
|
||||
|
||||
args = {'withoption': option_name,
|
||||
'withvalue': container}
|
||||
else:
|
||||
args = {}
|
||||
|
||||
ret = None
|
||||
if name is None:
|
||||
path = '/containers'
|
||||
else:
|
||||
path = '/containers/{0}'.format(name)
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
if container is None:
|
||||
msg = _(u'Unknown container components {0}')
|
||||
else:
|
||||
args = {'withoption':'container_group',
|
||||
'withvalue':container}
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
# If not a container, maybe a container's group
|
||||
if ret is None:
|
||||
raise NotFoundError(msg.format(str(name), container))
|
||||
if name is None:
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
dico = {}
|
||||
ret_comp = {}
|
||||
for comp in comp_list:
|
||||
dico[comp] = {}
|
||||
for path, item in ret.items():
|
||||
spath = path.split('.')
|
||||
#without 's'
|
||||
comp = spath[0][:-1]
|
||||
dico[comp]['.'.join(spath[1:])] = item
|
||||
for comp in comp_list:
|
||||
ret_comp[comp] = self.to_list_of_dict(dico[comp], prefix=comp)
|
||||
|
||||
else:
|
||||
ret_comp = self.to_list_of_dict(ret, prefix=name)
|
||||
return ret_comp
|
||||
|
||||
@classmethod
|
||||
def to_list_of_dict(cls, flat, prefix=None):
|
||||
"""Convert a flat dictionary to a list of dictionaries.
|
||||
|
||||
Build a list of dictionary ``<name>:<value>`` for each
|
||||
prefix of the form ``<prefix><integer index>.<name>:<value>``
|
||||
|
||||
If list is numerically ordered by ``<integer index>``
|
||||
extracted from each key accordingly to :data:`prefix`.
|
||||
|
||||
If the :data:`prefix` is not specified, a random element of
|
||||
:data:`flat` is extracted to compute it.
|
||||
|
||||
:param flat: absolute attribute variable names and their
|
||||
values
|
||||
:type flat: `dict`
|
||||
:param prefix: alphabetic prefix to extract integer index
|
||||
:type prefix: `str`
|
||||
:return: variables and their attributes values
|
||||
:rtype: `list` of `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
sorted_items = []
|
||||
sort_key = None
|
||||
|
||||
if prefix is None:
|
||||
# Extract prefix name
|
||||
random_key = flat.iterkeys().next()
|
||||
indexed_prefix = random_key.split('.')[0]
|
||||
re_match = re.match(r'(\D+)\d+', indexed_prefix)
|
||||
prefix = re_match.group(1)
|
||||
|
||||
if prefix is not None:
|
||||
# check for none because maybe regexp match did not work
|
||||
# Extract component index as integer for comparaison
|
||||
sort_key = lambda string: int(string.split('.')[0].lstrip(prefix))
|
||||
|
||||
for key in sorted(flat.keys(), key=sort_key):
|
||||
sid, sattr = cls._split_path_leaf(key)
|
||||
if sid not in reply:
|
||||
sorted_items.append(sid)
|
||||
reply[sid] = {}
|
||||
reply[sid][sattr] = flat[key]
|
||||
return [ reply[item] for item in sorted_items ]
|
||||
|
||||
@staticmethod
|
||||
def strip_full_path(flat):
|
||||
"""Strip full path of flat dictionary keys.
|
||||
|
||||
:param flat: absolute variable names and their value
|
||||
:type flat: `dict`
|
||||
:return: short variable names and their value
|
||||
:rtype: `dict`
|
||||
"""
|
||||
ret = {}
|
||||
for path in flat:
|
||||
parts = path.split('.')[1:]
|
||||
if len(parts) == 1:
|
||||
# Single variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
elif len(parts) == 2 and parts[0] == parts[1]:
|
||||
# Master variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
else:
|
||||
# slave variable
|
||||
ret[ '.'.join(parts) ] = flat[path]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def to_grouped_lists(dict_list, keyname, keyvalue=None):
|
||||
"""Convert a `list` of `dict` to a `dict` :data:`keyvalue`:`list`.
|
||||
|
||||
Build dictionary of ``dictionary[:data:`keyvalue`]:<list of
|
||||
dict>`` to group all items with the same value of a key.
|
||||
|
||||
:param dict_list: dictionaries
|
||||
:type dict_list: `list`
|
||||
:param keyname: name of the key to test
|
||||
:type keyname: `str`
|
||||
:param keyvalue: value to match :data:`keyname`
|
||||
:return: dictionary grouped by a key value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
for key in dict_list:
|
||||
if keyname in key and keyvalue and keyvalue != key[keyname]:
|
||||
continue
|
||||
if keyname not in key:
|
||||
if None not in reply:
|
||||
reply[None] = []
|
||||
reply[None].append(key)
|
||||
else:
|
||||
if key[keyname] not in reply:
|
||||
reply[ key[keyname] ] = []
|
||||
reply[ key[keyname] ].append(key)
|
||||
return reply
|
||||
|
||||
@staticmethod
|
||||
def _split_path_leaf(path, separator='.'):
|
||||
"""Split path in two parts: dirname and basename.
|
||||
|
||||
If :data:`path` does not contains the :data:`separator`, it's
|
||||
considered as leaf and the dirname of :data:`path` is set to
|
||||
`None`.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dirname and basename of :data:`path`
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if path.find(separator) == -1:
|
||||
return (None, path)
|
||||
|
||||
splited = path.split(separator)
|
||||
return ( '.'.join(splited[:-1]), splited[-1] )
|
||||
|
||||
|
||||
class TimeoutCreoleClientError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class CreoleClientError(StandardError):
|
||||
"""Bad use of :class:`CreoleClient`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(CreoleClientError):
|
||||
"""Requested variable not found
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
print(CreoleClient().get('/'))
|
||||
except Exception as err:
|
||||
print(_(u"Error: {0}").format(err))
|
|
@ -0,0 +1,81 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
fichier de configuration pour créole
|
||||
|
||||
"""
|
||||
from os.path import join, isfile, isdir
|
||||
|
||||
eoledir = '/usr/share/eole'
|
||||
LOCALKERNEL_FILE = join(eoledir, 'noyau/local')
|
||||
REBOOT_FILE = '/var/run/reboot-required'
|
||||
|
||||
charset = 'UTF8'
|
||||
|
||||
# chemin par defaut des templates, fichier config.eol, etc
|
||||
configeoldir = '/etc/eole/'
|
||||
eoleroot = join(eoledir, 'creole')
|
||||
vareole = '/var/lib/eole'
|
||||
|
||||
bareos_restore_root = join(eoledir, 'bareos')
|
||||
bareos_restore = join(bareos_restore_root, 'restore')
|
||||
|
||||
configeol = join(configeoldir, 'config.eol')
|
||||
|
||||
# certificats
|
||||
cert_file = '/etc/ssl/certs/eole.crt'
|
||||
key_file = '/etc/ssl/certs/eole.key'
|
||||
# port du serveur creole_serv
|
||||
port_rpc = 4333
|
||||
|
||||
# chemin du répertoire source des fichiers templates
|
||||
templatedir = '/var/lib/creole'
|
||||
|
||||
dicos_dir = join(eoleroot, 'dicos')
|
||||
modif_dir = join(eoleroot, 'modif')
|
||||
distrib_dir = join(eoleroot, 'distrib')
|
||||
patch_dir = join(eoleroot, 'patch')
|
||||
|
||||
# chemin pour les fichiers de données
|
||||
datadir = '/usr/share/creole'
|
||||
# chemin pour les modules de fonctions supplémentaires
|
||||
func_dir = join(datadir,'funcs')
|
||||
# repertoire du ou des dictionnaires xml creole
|
||||
eoledirs = [dicos_dir, join(dicos_dir, 'variante'), join(dicos_dir, 'local')]
|
||||
|
||||
# extra
|
||||
eoleextradico = join(eoledir, 'creole/extra')
|
||||
eoleextraconfig = join(configeoldir, 'extra')
|
||||
forbiddenextra = ['containers', 'creole']
|
||||
|
||||
# repertoire de la dtd
|
||||
dtddir = datadir
|
||||
if isfile('data/creole.dtd'):
|
||||
dtdfilename = 'data/creole.dtd'
|
||||
elif isfile('../creole/data/creole.dtd'):
|
||||
dtdfilename = '../creole/data/creole.dtd'
|
||||
else:
|
||||
dtdfilename = join(dtddir, 'creole.dtd')
|
||||
|
||||
# repertoire avec le fichier lock
|
||||
LOCK_PATH = '/var/lock/eole'
|
||||
LOCK_SYSTEM_PATH = join(LOCK_PATH, 'eole-system')
|
||||
|
||||
# Nom du serveur maitre
|
||||
VIRTMASTER = 'root'
|
||||
VIRTROOT = '/var/lib/lxc'
|
||||
VIRTBASE = 'rootfs'
|
||||
|
||||
container_instance_lockfile = '/etc/eole/.container_instance.lock'
|
||||
containers_default_network = '192.0.2'
|
||||
gen_conteneurs_needed = '/etc/eole/.gen_conteneurs'
|
||||
|
||||
VIRTENABLED_LOCKFILE = '/etc/eole/.VirtEnabled.lock'
|
||||
VIRTDISABLED_LOCKFILE = '/etc/eole/.VirtDisabled.lock'
|
||||
INSTANCE_LOCKFILE = '/etc/eole/.instance'
|
||||
UPGRADE_LOCKFILE = '/etc/eole/.upgrade-auto'
|
||||
|
||||
SSL_LAST_FILE = '/etc/eole/ssl/lastfile.txt'
|
||||
|
||||
FLATTENED_CREOLE_DIR = join(vareole, 'config')
|
||||
if not isdir(FLATTENED_CREOLE_DIR):
|
||||
FLATTENED_CREOLE_DIR = join('/tmp')
|
|
@ -0,0 +1,224 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.containers - management of LXC containers
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Manage LXC containers
|
||||
|
||||
"""
|
||||
|
||||
from .client import CreoleClient, _CONTAINER_COMPONENTS
|
||||
from .config import VIRTENABLED_LOCKFILE, VIRTDISABLED_LOCKFILE
|
||||
from .error import VirtError
|
||||
from .config import templatedir, VIRTROOT
|
||||
from .template import CreoleTemplateEngine
|
||||
from pyeole.process import system_code, system_out, system_progress_out
|
||||
from pyeole.diagnose import test_tcp
|
||||
from .i18n import _
|
||||
|
||||
from distutils.spawn import find_executable
|
||||
from os.path import isdir
|
||||
from os.path import isfile, islink
|
||||
from os.path import ismount
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os import access
|
||||
from os import F_OK
|
||||
from os import stat
|
||||
from os import symlink
|
||||
from os import makedirs
|
||||
from os import mknod
|
||||
from os import makedev
|
||||
from os import major
|
||||
from os import minor
|
||||
from os import unlink
|
||||
from stat import S_IFBLK
|
||||
from stat import S_ISBLK
|
||||
from hashlib import md5
|
||||
from glob import glob
|
||||
import cjson
|
||||
|
||||
import logging
|
||||
|
||||
client = CreoleClient()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_LXC_MD5 = '/etc/eole/lxc.md5'
|
||||
_LXC_LOG = '/var/log/isolation.log'
|
||||
|
||||
_NOT_REALLY_LXC_CONTAINERS = ['root', 'all']
|
||||
"""List of container names that are not to be generated.
|
||||
|
||||
"""
|
||||
|
||||
_LXC_TEMPLATE = {'config': "lxc.config",
|
||||
'fstab': "lxc.fstab",
|
||||
'rootfs/etc/network/interfaces' : "lxc.interfaces",
|
||||
}
|
||||
"""Creole templates for LXC containers.
|
||||
|
||||
"""
|
||||
|
||||
def is_lxc_locked():
|
||||
"""Check if the LXC virtualization is locked.
|
||||
|
||||
The virtualization is locked after first ``instance`` of the
|
||||
server to avoid switching between modes.
|
||||
|
||||
:return: ``enable`` if LXC is enabled, ``disable`` if LXC is
|
||||
disabled or ``None`` where there is no lockfile.
|
||||
|
||||
"""
|
||||
if isfile(VIRTENABLED_LOCKFILE) and isfile(VIRTDISABLED_LOCKFILE):
|
||||
raise VirtError(_(u"Invalid LXC lock files state: both are present."))
|
||||
elif isfile(VIRTENABLED_LOCKFILE):
|
||||
virtlocked = 'enable'
|
||||
elif isfile(VIRTDISABLED_LOCKFILE):
|
||||
virtlocked = 'disable'
|
||||
else:
|
||||
virtlocked = None
|
||||
return virtlocked
|
||||
|
||||
def is_lxc_enabled():
|
||||
"""Check if LXC controller is enabled
|
||||
|
||||
We do not accept to switch between enabled and disabled LXC, after
|
||||
first ``instance``, a lock file is set to check at each
|
||||
``reconfigure``.
|
||||
|
||||
:return: If the LXC container mode is enabled.
|
||||
:rtype: `bool`
|
||||
:raise VirtError: if state in inconsistent between configuration
|
||||
and lock files.
|
||||
|
||||
"""
|
||||
containers_enabled = client.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
if containers_enabled and not find_executable('lxc-info'):
|
||||
raise VirtError(_(u'LXC is enabled but LXC commands not found in PATH.'))
|
||||
|
||||
if containers_enabled and is_lxc_locked() == 'disable':
|
||||
raise VirtError(_(u"Server already instantiated in no containers mode, attempt to activate containers mode aborted."))
|
||||
elif not containers_enabled and is_lxc_locked() == 'enable':
|
||||
raise VirtError(_(u"Server already instantiated in containers mode, attempt to activate no containers mode aborted."))
|
||||
|
||||
return containers_enabled
|
||||
|
||||
def generate_lxc_container(name, logger=None):
|
||||
"""Run creation of a container.
|
||||
|
||||
Check if LXC is enabled and take care of ``root`` and ``all``
|
||||
containers.
|
||||
|
||||
:param name: name of the LXC container
|
||||
:type name: `str`
|
||||
|
||||
"""
|
||||
if name not in _NOT_REALLY_LXC_CONTAINERS:
|
||||
if not test_tcp('localhost', client.get_creole('apt_cacher_port')):
|
||||
raise Exception(_('cacher not available, please start check log in /var/log/apt-cacher-ng/ and restart it with "service apt-cacher-ng start" command'))
|
||||
if isfile(_LXC_LOG):
|
||||
unlink(_LXC_LOG)
|
||||
cmd = ['lxc-create', '-n', name, '-t', 'eole']
|
||||
log.debug('Run: {0}'.format(' '.join(cmd)))
|
||||
code, stdout, stderr = system_progress_out(cmd, _(u"Managing container {0}").format(name), logger)
|
||||
fh = open(_LXC_LOG, 'w')
|
||||
fh.write(stdout)
|
||||
fh.write(stderr)
|
||||
fh.close()
|
||||
if code != 0 and stdout.find(u"'{0}' already exists'".format(name)) >= 0:
|
||||
raise Exception(_('error during the process of container creation, more informations in {0}').format(_LXC_LOG))
|
||||
path_container = client.get_creole('container_path_{0}'.format(name))
|
||||
path_apt_eole_conf = join(path_container, 'etc', 'apt', 'apt-eole.conf')
|
||||
path_apt_eole = join(path_container, 'usr', 'sbin', 'apt-eole')
|
||||
if not isfile(path_apt_eole_conf) or not isfile(path_apt_eole):
|
||||
raise Exception(_('eole-common-pkg not installed in container, something goes wrong, more informations in {0}').format(_LXC_LOG))
|
||||
|
||||
|
||||
def is_lxc_running(container):
|
||||
"""Check if an LXC container is running.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is running and reachable
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
return is_lxc_started(container) and test_tcp(container[u'ip'], 22)
|
||||
|
||||
|
||||
def is_lxc_started(container):
|
||||
"""Check if an LXC container is started.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is started
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
if not is_lxc_enabled() or container.get(u'path', None) == '':
|
||||
return True
|
||||
|
||||
if container.get(u'name', None) is None:
|
||||
raise ValueError(_(u"Container has no name"))
|
||||
|
||||
if container.get(u'ip', None) is None:
|
||||
raise ValueError(_(u"Container {0} has no IP").format(container[u'name']))
|
||||
|
||||
cmd = ['lxc-info', '--state', '--name', container[u'name']]
|
||||
code, stdout, stderr = system_out(cmd)
|
||||
|
||||
return stdout.strip().endswith('RUNNING')
|
||||
|
||||
|
||||
def create_mount_point(group):
|
||||
"""Create mount points in LXC.
|
||||
|
||||
This is required for LXC to start.
|
||||
|
||||
"""
|
||||
if 'fstabs' not in group:
|
||||
return
|
||||
for fstab in group['fstabs']:
|
||||
mount_point = fstab.get('mount_point', fstab['name'])
|
||||
full_path = join(group['path'], mount_point.lstrip('/'))
|
||||
if not isdir(full_path):
|
||||
makedirs(full_path)
|
||||
|
||||
|
||||
def lxc_need_restart():
|
||||
def md5sum(file):
|
||||
return md5(open(file).read()).hexdigest()
|
||||
files = ['/etc/lxc/default.conf', '/etc/default/lxc-net']
|
||||
files += glob('/opt/lxc/*/config')
|
||||
files += glob('/opt/lxc/*/fstab')
|
||||
md5s = []
|
||||
for f in files:
|
||||
md5s.append(md5sum(f))
|
||||
if not isfile(_LXC_MD5):
|
||||
ret = True
|
||||
else:
|
||||
try:
|
||||
old_md5s = cjson.decode(open(_LXC_MD5, 'r').read())
|
||||
except cjson.DecodeError:
|
||||
ret = True
|
||||
else:
|
||||
ret = not old_md5s == md5s
|
||||
|
||||
if ret:
|
||||
fh = open(_LXC_MD5, 'w')
|
||||
fh.write(cjson.encode(md5s))
|
||||
fh.close()
|
||||
return ret
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu import option
|
||||
CONVERT_VALUE = {'True': True, 'False': False, 'None': None}
|
||||
forbidden_name = ('level',)
|
||||
|
||||
def parse_dtd(filename):
|
||||
"""Parse DTD file and return a dict.
|
||||
Dict structure:
|
||||
|
||||
- key: name of element
|
||||
- values:
|
||||
|
||||
- type: if text, option type
|
||||
- options: list of subelements
|
||||
- needs: list of mandatory attributes with None or list of possible
|
||||
value
|
||||
- optionals: tuple:
|
||||
- list of optional attributes with None or list of possible
|
||||
value
|
||||
- default value (None if no default value)
|
||||
|
||||
Example:
|
||||
{'container':
|
||||
{'type': False,
|
||||
'options': ['service', 'interface', 'package', 'file', 'disknod'],
|
||||
'needs': {'name': {'values': None, 'type': None},
|
||||
'optionals': {'group': {'values': None, 'default': None,
|
||||
'type': None},
|
||||
'id': {'values': None, 'default': None, 'type': None}}}
|
||||
}
|
||||
"""
|
||||
def parse_option(option):
|
||||
option = option.replace('(', '').replace('*', '').replace(')', '')
|
||||
option = option.replace('>', '').replace(' ', '').replace('+', '')
|
||||
option = option.split('|')
|
||||
options = []
|
||||
for opt in option:
|
||||
options.extend(opt.split(','))
|
||||
if options == ['EMPTY']:
|
||||
options = []
|
||||
return options
|
||||
|
||||
def parse_comment(comment, options=None):
|
||||
type_ = None
|
||||
if comment.startswith('<!--') and comment.endswith('-->'):
|
||||
comment = comment[4:-3]
|
||||
if comment.endswith('Option'):
|
||||
if comment == 'ChoiceOption':
|
||||
raise ValueError(_(u'Do not write "ChoiceOption" in comments'))
|
||||
try:
|
||||
type_ = getattr(option, comment)
|
||||
except AttributeError:
|
||||
raise ValueError(_(u"Unvalid comment content: must match a valid attribute name"))
|
||||
else:
|
||||
#comment is the attribute name, the option type it's value
|
||||
type_ = comment
|
||||
return type_
|
||||
|
||||
fh = open(filename)
|
||||
dtd_load = {}
|
||||
for line in fh.readlines():
|
||||
sline = line.split()
|
||||
if sline == []:
|
||||
continue
|
||||
#for element line
|
||||
if sline[0] == '<!ELEMENT':
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
options = ' '.join(sline[2:-1])
|
||||
else:
|
||||
options = ' '.join(sline[2:])
|
||||
options = parse_option(options)
|
||||
type_ = None
|
||||
if '#PCDATA' in options:
|
||||
options.remove('#PCDATA')
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
type_ = parse_comment(sline[-1], options)
|
||||
else:
|
||||
type_ = option.UnicodeOption
|
||||
dtd_load[sline[1]] = {'type': type_, 'options': options,
|
||||
'needs': {}, 'optionals': {}}
|
||||
#for attlist line
|
||||
elif sline[0] == '<!ATTLIST':
|
||||
if sline[1] in forbidden_name:
|
||||
raise ValueError(_(u'Using name {0} is forbidden in attributes').format(sline[1]))
|
||||
#possible value
|
||||
if sline[3] == 'CDATA':
|
||||
values = None
|
||||
else:
|
||||
if not sline[3].startswith('(') or not sline[3].endswith(')'):
|
||||
raise Exception(_(u'Not a valid list'))
|
||||
sline3 = sline[3][1:-1].split('|')
|
||||
values = []
|
||||
for val in sline3:
|
||||
values.append(CONVERT_VALUE.get(val, val))
|
||||
#comment
|
||||
type_ = parse_comment(sline[-1])
|
||||
#default value or state value (needs or optionals)
|
||||
if sline[4].startswith('#REQUIRED'):
|
||||
dtd_load[sline[1]]['needs'][sline[2]] = {'values': values,
|
||||
'type': type_}
|
||||
elif sline[4].startswith('#IMPLIED'):
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': None,
|
||||
'type': type_}
|
||||
else:
|
||||
default = sline[4].replace('"', '').replace("'", '').replace(
|
||||
'>', '').strip()
|
||||
default = CONVERT_VALUE.get(default, default)
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': default,
|
||||
'type': type_}
|
||||
return dtd_load
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Version variable of EOLE distribution
|
||||
|
||||
"""
|
||||
|
||||
UBUNTU_VERSION = u'xenial'
|
||||
"""Ubuntu version used by EOLE.
|
||||
|
||||
"""
|
||||
EOLE_VERSION = u'2.6'
|
||||
"""Current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
EOLE_RELEASE = u'{0}.2'.format(EOLE_VERSION)
|
||||
"""Release version of the current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
ENVOLE_VERSION = u'6'
|
||||
"""Envole version to use.
|
||||
|
||||
"""
|
||||
|
||||
LAST_RELEASE = u'2'
|
||||
"""Last stable release for this version
|
||||
|
||||
"""
|
|
@ -0,0 +1,90 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Erreurs Creole
|
||||
"""
|
||||
|
||||
class VirtError(Exception):
|
||||
"""incohérence concernant les conteneurs"""
|
||||
pass
|
||||
|
||||
#class ValueEoleError(Exception):
|
||||
# """Cette valeur n'existe pas"""
|
||||
# pass
|
||||
#
|
||||
class NoneError(Exception):
|
||||
"""Valeur vide"""
|
||||
pass
|
||||
|
||||
class OutOfRange(Exception):
|
||||
pass
|
||||
|
||||
class TypeEoleError(Exception):
|
||||
"""Erreur de type"""
|
||||
pass
|
||||
|
||||
class ConfigError(Exception):
|
||||
pass
|
||||
|
||||
class NetworkConfigError(Exception):
|
||||
""" Network configuration error
|
||||
"""
|
||||
pass
|
||||
|
||||
class FileNotFound(ConfigError):
|
||||
pass
|
||||
|
||||
class TemplateError(ConfigError):
|
||||
pass
|
||||
|
||||
class TemplateDisabled(TemplateError):
|
||||
"""Template is disabled.
|
||||
"""
|
||||
pass
|
||||
|
||||
class DependencyError(ConfigError):
|
||||
pass
|
||||
|
||||
#class ConstraintError(ConfigError):
|
||||
# pass
|
||||
#
|
||||
|
||||
|
||||
class LockError(Exception):
|
||||
""" Add lock error
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UnlockError(Exception):
|
||||
""" Remove lock error
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserExit(Exception):
|
||||
""" User exit(0) signal
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserExitError(Exception):
|
||||
""" User exit(1) signal
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CreoleOperationError(Exception):
|
||||
"""Type error or value Error for Creole variable's type or values
|
||||
"""
|
||||
|
||||
|
||||
class SpaceObjShallNotBeUpdated(Exception):
|
||||
"""Specific behavior in case of the presence or not
|
||||
of an object in the space object
|
||||
"""
|
||||
|
||||
|
||||
class CreoleDictConsistencyError(Exception):
|
||||
"""It's not only that the Creole XML is valid against the Creole DTD
|
||||
it's that it is not consistent.
|
||||
"""
|
|
@ -0,0 +1,280 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
fonctions communes Creole
|
||||
"""
|
||||
import os, time, re
|
||||
from os.path import join, isfile
|
||||
from pyeole.process import system_out, system_code
|
||||
from pyeole.ansiprint import print_orange
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.pkg import EolePkg
|
||||
from pyeole.encode import normalize
|
||||
from .config import LOCALKERNEL_FILE, REBOOT_FILE
|
||||
|
||||
from .i18n import _
|
||||
|
||||
#si creole client n'est pas démarré
|
||||
global PkgManager
|
||||
PkgManager = None
|
||||
|
||||
######################
|
||||
# Gestion des noyaux #
|
||||
######################
|
||||
|
||||
def split_version(version):
|
||||
"""
|
||||
return version as list splitting subnumbers
|
||||
:param version: version number string
|
||||
:type version: string
|
||||
"""
|
||||
version_splitted = re.split('[-\.]', version)
|
||||
version_splitted = map(int, version_splitted)
|
||||
return version_splitted
|
||||
|
||||
def get_version_filtered_pkgs(prefix='linux-image'):
|
||||
"""
|
||||
return installed packages list ordered by version number
|
||||
"""
|
||||
vers_pkg_re = r"{0}-(?P<vers>[0-9]+(?P<upstr_vers>\.[0-9]+)*(-(?P<pkg_vers>[0-9]+))?)".format(prefix)
|
||||
vers_pkg_re = re.compile(vers_pkg_re)
|
||||
installed_pkgs = get_installed_kernel(prefix)
|
||||
vers_pkgs = [(pkg, split_version(vers_pkg_re.search(pkg).group('vers')))
|
||||
for pkg in installed_pkgs
|
||||
if vers_pkg_re.search(pkg)]
|
||||
vers_pkgs = [pkg[0] for pkg in sorted(vers_pkgs, key=lambda p: p[1])]
|
||||
return vers_pkgs
|
||||
|
||||
def get_custom_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau personnalisé
|
||||
ou None
|
||||
"""
|
||||
if isfile(LOCALKERNEL_FILE):
|
||||
# noyau personnalisé détecté
|
||||
kernel_file = LOCALKERNEL_FILE
|
||||
return file(kernel_file).read().strip()
|
||||
|
||||
def get_wanted_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on veut tourner
|
||||
"""
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
ret = custom_kernel
|
||||
else:
|
||||
kernel_images = get_version_filtered_pkgs()
|
||||
# Get last kernel version
|
||||
last_ver = kernel_images[-1].split('-')
|
||||
if len(last_ver) >= 4:
|
||||
last_ver = "{0}-{1}-{2}".format(*last_ver[2:5])
|
||||
elif len(last_ver) == 3:
|
||||
last_ver = "{0}".format(last_ver[-1])
|
||||
ret = last_ver
|
||||
return ret
|
||||
|
||||
def get_current_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on tourne
|
||||
"""
|
||||
version = system_out(['uname', '-r'])[1].strip()
|
||||
return version
|
||||
|
||||
def get_installed_kernel(kernel):
|
||||
"""
|
||||
renvoie la liste des noyaux installés
|
||||
correspondant à celui demandé
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l 2>/dev/null | awk -F " " '/^(i|h)i.*%s/ {print $2}'""" % kernel
|
||||
return os.popen(cmd).read().splitlines()
|
||||
|
||||
def get_package_depends(pkg):
|
||||
"""
|
||||
Renvois les dépendances d'un paquet
|
||||
"""
|
||||
try:
|
||||
global PkgManager
|
||||
if PkgManager is None:
|
||||
PkgManager = EolePkg('apt')
|
||||
res = PkgManager.get_depends(pkg)
|
||||
return res
|
||||
except:
|
||||
return []
|
||||
|
||||
def controle_kernel(force_grub=True):
|
||||
"""
|
||||
Vérifie si on est sur le noyau désiré
|
||||
Renvoie True si un reboot est nécessaire
|
||||
"""
|
||||
need_boot = False
|
||||
if isfile(REBOOT_FILE):
|
||||
# i.e. /var/run/reboot-required
|
||||
need_boot = True
|
||||
|
||||
wanted_kernel = get_wanted_kernel()
|
||||
# on utilise le noyau spécifié
|
||||
if wanted_kernel != get_current_kernel():
|
||||
need_boot = True
|
||||
if force_grub:
|
||||
# Update grub does the job since eole-kernel-version 2.3-eole37~2
|
||||
print _(u"Updating Grub configuration")
|
||||
# ajout de LVM_SUPPRESS_FD_WARNINGS pour #10761
|
||||
system_code("/usr/sbin/update-grub2", env={'LVM_SUPPRESS_FD_WARNINGS': '1', "LC_ALL": 'fr_FR.UTF-8'})
|
||||
# reboot nécessaire ?
|
||||
return need_boot
|
||||
|
||||
def regen_initrd():
|
||||
"""
|
||||
vérifie la présence de l'initrd
|
||||
"""
|
||||
noyau = get_wanted_kernel()
|
||||
if not isfile("/boot/initrd.img-%s" % noyau):
|
||||
print _(u"Initramfs missing, generating :")
|
||||
cmd = ["/usr/sbin/update-initramfs", '-c', '-k', noyau]
|
||||
system_code(cmd)
|
||||
|
||||
def get_kernel_to_remove():
|
||||
"""
|
||||
Obtenir la liste des noyaux a supprimer. Tous les noyaux sauf :
|
||||
- le noyau courant
|
||||
- les deux noyaux les plus récents
|
||||
- l'éventuel noyau personnalisé
|
||||
"""
|
||||
# tous les noyaux installés
|
||||
installed_kernels = get_version_filtered_pkgs()
|
||||
# les deux noyaux les plus récents
|
||||
to_keep = installed_kernels[-2:]
|
||||
# tous les headers installés
|
||||
installed_kernels.extend(get_version_filtered_pkgs(prefix='linux-headers'))
|
||||
# le noyau courant
|
||||
to_keep.append('linux-image-{0}'.format(get_current_kernel()))
|
||||
# l'éventuel noyau personnalisé
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
to_keep.append('linux-image-{0}'.format(custom_kernel))
|
||||
# les headers correspondants aux noyaux à conserver
|
||||
headers_to_keep = [k.replace('image', 'headers') for k in to_keep]
|
||||
headers_to_keep.extend([h.replace('-generic', '') for h in headers_to_keep])
|
||||
to_keep.extend(headers_to_keep)
|
||||
# on fait la différence
|
||||
to_remove = list(set(installed_kernels) - set(to_keep))
|
||||
return to_remove
|
||||
|
||||
def purge_rc():
|
||||
"""
|
||||
Purge des paquets "rc"
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l|grep "^rc"|awk -F " " '{print $2}'"""
|
||||
rcs = os.popen(cmd).read().splitlines()
|
||||
for pak in rcs:
|
||||
os.system("dpkg -P %s >/dev/null" % pak)
|
||||
|
||||
def log(etat, msg, type_proc, console=True):
|
||||
"""
|
||||
effectue un log local et éventuellement sur zephir
|
||||
"""
|
||||
msg = normalize(msg)
|
||||
type_proc = normalize(type_proc)
|
||||
display = False
|
||||
log_func = 'info'
|
||||
if etat == "ERR":
|
||||
if console:
|
||||
# affichage sur la console
|
||||
display = True
|
||||
log_func = 'error'
|
||||
|
||||
try:
|
||||
z_logger = init_logging(name=u'zephir', syslog=True, level=u'info', console=display)
|
||||
except ValueError, err:
|
||||
z_logger = init_logging(name=u'zephir', level=u'info', console=True)
|
||||
z_logger.warn(_(u"Syslog logging is not working properly: {0}".format(err)))
|
||||
z_logger.warn(_(u"You may need to start/restart systemd-journald"))
|
||||
|
||||
getattr(z_logger, log_func)("%s => %s : %s " % (type_proc, etat, msg))
|
||||
|
||||
def zephir(etat, msg, type_proc, console=True):
|
||||
""" gestion des messages Zephir """
|
||||
etat_zeph = None
|
||||
if etat.upper().startswith("INIT"):
|
||||
etat_zeph = -1
|
||||
elif etat.upper().startswith("FIN"):
|
||||
etat_zeph = 0
|
||||
elif etat.upper().startswith('ERR'):
|
||||
etat_zeph = 1
|
||||
elif etat.upper().startswith('MSG'):
|
||||
etat_zeph = -2
|
||||
# log local si msg ou erreur
|
||||
if (len(msg) > 0) or (etat.upper() == "ERR"):
|
||||
log(etat, msg, type_proc, console)
|
||||
# log sur zephir si disponible
|
||||
if etat_zeph is not None:
|
||||
try:
|
||||
# si serveur enregistré, on envoie un log à Zéphir
|
||||
from zephir.zephir_conf.zephir_conf import id_serveur
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
from zephir.lib_zephir import log as zlog
|
||||
zlog(type_proc, etat_zeph, msg, str(time.ctime()))
|
||||
|
||||
def init_proc(type_proc):
|
||||
"""
|
||||
initialisation d'une procédure (log démarrage + bcage éventuel)
|
||||
"""
|
||||
if verify_lock(type_proc):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
#def end_proc(etat,msg,type_proc):
|
||||
# """
|
||||
# loggue la fin d'une procédure
|
||||
# """
|
||||
# log(etat,msg,type_proc )
|
||||
|
||||
def verify_lock(name):
|
||||
"""
|
||||
vérifie le bloquage ou non d'une procédure
|
||||
"""
|
||||
LOCK_FILE = "/usr/share/zephir/zephir_locks"
|
||||
if name == "":
|
||||
return True
|
||||
from zephir.lib_zephir import zephir_path
|
||||
try:
|
||||
from zephir.lib_zephir import config, zephir, convert
|
||||
locks = convert(zephir.serveurs.get_locks(config.id_serveur))
|
||||
if locks[0] == 0:
|
||||
# erreur sur zephir, on ignore cette phase
|
||||
raise Exception
|
||||
locks = [lock[0] for lock in locks[1]]
|
||||
except Exception, mess:
|
||||
# pas de message d'erreur si le serveur n'est pas enregistré
|
||||
zephir_error = False
|
||||
if isfile(join(zephir_path, "zephir_conf", "zephir_conf.py")):
|
||||
# on ne bloque pas si l'appel à zephir échoue
|
||||
print ""
|
||||
print_orange(_(u"Checking permissions on Zéphir for {0} impossible.").format(name))
|
||||
print_orange(_(u"Error message: {0}").format(mess))
|
||||
zephir_error = True
|
||||
# on regarde le denier état
|
||||
if os.path.exists(LOCK_FILE):
|
||||
if zephir_error:
|
||||
print_orange(_(u"Using stored parameters"))
|
||||
file_lock = file(LOCK_FILE)
|
||||
locks = file_lock.read().split('\n')
|
||||
file_lock.close()
|
||||
# on bloque si interdit
|
||||
if name in locks:
|
||||
return False
|
||||
else:
|
||||
# mise en place du fichier de droits
|
||||
content = "\n".join(locks)
|
||||
try:
|
||||
file_lock = file(LOCK_FILE, "w")
|
||||
file_lock.write(content)
|
||||
file_lock.close()
|
||||
except:
|
||||
print _(u"Updating {0} impossible (insufficient rights).").format(LOCK_FILE)
|
||||
# retour du code
|
||||
if name in locks:
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,52 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
# Copyright (C) 2012-2013 Team tiramisu (see AUTHORS for all contributors)
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# The original `Config` design model is unproudly borrowed from
|
||||
# the rough gus of pypy: pypy: http://codespeak.net/svn/pypy/dist/pypy/config/
|
||||
# the whole pypy projet is under MIT licence
|
||||
"internationalisation utilities"
|
||||
import gettext
|
||||
import os
|
||||
import sys
|
||||
import locale
|
||||
|
||||
# Application Name
|
||||
APP_NAME = 'creole'
|
||||
|
||||
# Traduction dir
|
||||
APP_DIR = os.path.join(sys.prefix, 'share')
|
||||
LOCALE_DIR = os.path.join(APP_DIR, 'locale')
|
||||
|
||||
# Default Lanugage
|
||||
DEFAULT_LANG = os.environ.get('LANG', '').split(':')
|
||||
DEFAULT_LANG += ['en_US']
|
||||
|
||||
languages = []
|
||||
lc, encoding = locale.getdefaultlocale()
|
||||
if lc:
|
||||
languages = [lc]
|
||||
|
||||
languages += DEFAULT_LANG
|
||||
mo_location = LOCALE_DIR
|
||||
|
||||
gettext.find(APP_NAME, mo_location)
|
||||
gettext.textdomain(APP_NAME)
|
||||
gettext.bind_textdomain_codeset(APP_NAME, "UTF-8")
|
||||
gettext.translation(APP_NAME, fallback=True)
|
||||
|
||||
t = gettext.translation(APP_NAME, fallback=True)
|
||||
|
||||
_ = t.gettext
|
|
@ -0,0 +1,14 @@
|
|||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
MAJOR REASON IS :
|
||||
**revamping the implementation entirely for scalability**
|
||||
|
||||
AND :
|
||||
NOT INTEGRATED YET
|
||||
for pretty print in the console
|
||||
- ansiprint.py
|
||||
- terminalreport.py
|
||||
- terminalwriter.py
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Simple API fro creolelint reports"""
|
||||
import sys
|
||||
from creole.lint.warning import Warn
|
||||
from creole.lint import warnsymb
|
||||
|
||||
def ansi_print(text=None, fhandle=None, newline=True, flush=False):
|
||||
"""normalized (ansi) print >> file handle function"""
|
||||
#sys.stdout.write(self.getvalue())
|
||||
if fhandle is None:
|
||||
fhandle = sys.stderr
|
||||
if text != None:
|
||||
#text = text.strip()
|
||||
if newline:
|
||||
text += '\n'
|
||||
fhandle.write(text)
|
||||
if flush:
|
||||
fhandle.flush()
|
||||
# if fhandle:
|
||||
# fhandle.close()
|
||||
|
||||
class AnsiWriter(object):
|
||||
"""Définit une interface d'écriture de warnings
|
||||
"""
|
||||
def __init__(self, write_level, output=sys.stdout):
|
||||
self.write_level = write_level
|
||||
self.output = output
|
||||
|
||||
def process(self, linter):
|
||||
"""
|
||||
parse a result from an item.check() dictionnary
|
||||
which is made of {name: TmplVar}
|
||||
"""
|
||||
ident=1
|
||||
itemname = linter.name
|
||||
warnno = linter.warnno
|
||||
warncomment = linter.warncomment
|
||||
display = linter.display
|
||||
name, level = warnsymb.errorcode[warnno]
|
||||
if level > getattr(warnsymb, self.write_level):
|
||||
print "\nLint {0} désactivé (niveau {1})".format(itemname, warnsymb.errorlevel[level])
|
||||
return ''
|
||||
level = warnsymb.errorlevel[level]
|
||||
if not display:
|
||||
ansi_print('')
|
||||
ansi_print('%s (%s:%s:%s)'%(warncomment, itemname, name, level), self.output)
|
||||
checks = linter.check()
|
||||
warn = Warn(self.write_level, itemname, warnno, warncomment, checks)
|
||||
dico_loc = warn.to_dict()
|
||||
if dico_loc != '' and dico_loc != {}:
|
||||
ansi_print('')
|
||||
ansi_print('%s (%s:%s:%s)'%(warncomment, itemname, name, level), self.output)
|
||||
def compare(x,y):
|
||||
return cmp(x[0],y[0])
|
||||
for vfile in dico_loc.keys():
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print('%s\-- fichier %s' % (' '*ident, vfile), self.output, newline=False)
|
||||
vlines = dico_loc[vfile]
|
||||
vlines.sort(compare)
|
||||
oldline=0
|
||||
for vline, var in vlines:
|
||||
if hasattr(var, 'name'):
|
||||
vname = '%%%%%s'%str(var.name)
|
||||
else:
|
||||
vname = str(var)
|
||||
if vline != None:
|
||||
if vline != oldline:
|
||||
ansi_print('', self.output)
|
||||
ansi_print('%s|-- ligne %s' % (' '*(ident+1), vline), self.output, newline=False)
|
||||
pass
|
||||
oldline=vline
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print(" %s" %vname, self.output, newline=False)
|
||||
else:
|
||||
ansi_print("%s\-- %s" %(' '*ident, vname), self.output)
|
||||
if vfile != 'dictionnaire':
|
||||
ansi_print('', self.output)
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
"a system command launcher"
|
||||
|
||||
import os, sys
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
def cmdexec(cmd):
|
||||
""" return output of executing 'cmd' in a separate process.
|
||||
|
||||
raise ExecutionFailed exception if the command failed.
|
||||
the exception will provide an 'err' attribute containing
|
||||
the error-output from the command.
|
||||
"""
|
||||
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = process.communicate()
|
||||
status = process.poll()
|
||||
if status:
|
||||
raise ExecutionFailed(status, status, cmd, out, err)
|
||||
return out
|
||||
|
||||
class ExecutionFailed(Exception):
|
||||
def __init__(self, status, systemstatus, cmd, out, err):
|
||||
Exception.__init__(self)
|
||||
self.status = status
|
||||
self.systemstatus = systemstatus
|
||||
self.cmd = cmd
|
||||
self.err = err
|
||||
self.out = out
|
||||
|
||||
def __str__(self):
|
||||
return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
|
||||
|
||||
# export the exception under the name 'Error'
|
||||
Error = ExecutionFailed
|
||||
try:
|
||||
ExecutionFailed.__module__ = 'cmdexec'
|
||||
ExecutionFailed.__name__ = 'Error'
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
|
@ -0,0 +1,1195 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
creole lint main module
|
||||
"""
|
||||
import types
|
||||
import sys
|
||||
import re
|
||||
from glob import glob
|
||||
from os.path import join, basename, isfile, abspath, normpath, isabs
|
||||
from lxml.etree import parse
|
||||
|
||||
from creole import config, eosfunc
|
||||
from pyeole.process import system_code
|
||||
from creole.lint.parsetemplate import parse_templates
|
||||
from creole.lint.normalize import is_correct
|
||||
from creole.var_loader import parse_dtd
|
||||
from creole.lxml_parser import parse_xml_file
|
||||
from creole.config import FLATTENED_CREOLE_DIR, dtdfilename
|
||||
from creole.loader import PopulateTiramisuObjects
|
||||
|
||||
# variables internes aux dictionnaires
|
||||
DICO_TEST_VARS = ['test_', 'tmp_']
|
||||
# variables de conteneur calculées dynamiquement
|
||||
CONTAINER_VARS = ['container_path_', 'container_ip_', 'container_name_',
|
||||
'adresse_ip_ftp', 'adresse_ip_mysql', 'adresse_ip_dhcp',
|
||||
'adresse_ip_internet', 'adresse_ip_interbase',
|
||||
'adresse_ip_postgresql']
|
||||
|
||||
# faux-positifs sur les variables d'activation
|
||||
EXCLUDE_ACTIVATION_VARS = ['activer_cntlm_eth0', 'activer_cntlm_eth1',
|
||||
'activer_cntlm_eth2', 'activer_cntlm_eth3',
|
||||
'activer_cntlm_eth4', 'activer_supp_proxy_eth0',
|
||||
'activer_bash_completion', 'activer_log_martian',
|
||||
'activer_dns_eth0', 'activer_ctrl_alt_suppr',
|
||||
'activer_ipv6', 'activer_courier_commun',
|
||||
'activer_web_valider_ca', 'activer_admin_passfile',
|
||||
'activer_regles_filtrage_port_source',
|
||||
'activer_ftp_anonymous_access', 'activer_ftp_access',
|
||||
'activer_pydio_local', 'activer_courier_imap_sso',
|
||||
'activer_pydio_ftp', 'activer_client_ldap',
|
||||
]
|
||||
|
||||
# templates à ne pas tester par défaut
|
||||
EXCLUDE_TMPL = ['/usr/share/eole/creole/distrib/named.conf',
|
||||
'/usr/share/eole/creole/distrib/common-squid1.conf',
|
||||
'/usr/share/eole/creole/distrib/zstats.cfg',
|
||||
'/usr/share/eole/creole/distrib/hosts',
|
||||
'/usr/share/eole/creole/distrib/active_tags',
|
||||
]
|
||||
|
||||
# dictionnaires conservés pour compatibilité 2.3
|
||||
OLD_DICOS = ['/usr/share/eole/creole/dicos/51_gepi.xml',
|
||||
'/usr/share/eole/creole/dicos/51_taskfreak.xml',
|
||||
'/usr/share/eole/creole/dicos/51_wordpress.xml',
|
||||
'/usr/share/eole/creole/dicos/60_roundcube.xml',
|
||||
'/usr/share/eole/creole/dicos/61_ajaxplorer.xml',
|
||||
'/usr/share/eole/creole/dicos/61_dokuwiki.xml',
|
||||
'/usr/share/eole/creole/dicos/61_fluxbb.xml',
|
||||
'/usr/share/eole/creole/dicos/61_piwigo.xml',
|
||||
'/usr/share/eole/creole/dicos/51_grr.xml',
|
||||
'/usr/share/eole/creole/dicos/51_cdt.xml',
|
||||
'/usr/share/eole/creole/dicos/51_piwik.xml',
|
||||
'/usr/share/eole/creole/dicos/51_spip.xml',
|
||||
]
|
||||
|
||||
starttoken = '%'
|
||||
varstarttoken = '%%'
|
||||
builts = [u'ArithmeticError', u'AssertionError', u'AttributeError',
|
||||
u'BaseException', u'DeprecationWarning', u'EOFError', u'Ellipsis',
|
||||
u'EnvironmentError', u'Exception', u'False', u'FloatingPointError',
|
||||
u'FutureWarning', u'GeneratorExit', u'IOError', u'ImportError',
|
||||
u'ImportWarning', u'IndentationError', u'IndexError', u'KeyError',
|
||||
u'KeyboardInterrupt', u'LookupError', u'MemoryError', u'NameError',
|
||||
u'None', u'NotImplemented', u'NotImplementedError', u'OSError',
|
||||
u'OverflowError', u'PendingDeprecationWarning', u'ReferenceError',
|
||||
u'RuntimeError', u'RuntimeWarning', u'StandardError',
|
||||
u'StopIteration', u'SyntaxError', u'SyntaxWarning', u'SystemError',
|
||||
u'SystemExit', u'TabError', u'True', u'TypeError',
|
||||
u'UnboundLocalError', u'UnicodeDecodeError', u'UnicodeEncodeError',
|
||||
u'UnicodeError', u'UnicodeTranslateError', u'UnicodeWarning',
|
||||
u'UserWarning', u'ValueError', u'Warning', u'ZeroDivisionError',
|
||||
u'_', u'__debug__', u'__doc__', u'__import__', u'__name__', u'abs',
|
||||
u'all', u'any', u'apply', u'basestring', u'bool', u'buffer',
|
||||
u'callable', u'chr', u'classmethod', u'cmp', u'coerce', u'compile',
|
||||
u'complex', u'copyright', u'credits', u'delattr', u'dict', u'dir',
|
||||
u'divmod', u'enumerate', u'eval', u'execfile', u'exit', u'file',
|
||||
u'filter', u'float', u'frozenset', u'getattr', u'globals',
|
||||
u'hasattr', u'hash', u'help', u'hex', u'id', u'input', u'int',
|
||||
u'intern', u'isinstance', u'issubclass', u'iter', u'len',
|
||||
u'license', u'list', u'locals', u'long', u'map', u'max', u'min',
|
||||
u'object', u'oct', u'open', u'ord', u'pow', u'property', u'quit',
|
||||
u'range', u'raw_input', u'reduce', u'reload', u'repr', u'reversed',
|
||||
u'round', u'set', u'setattr', u'slice', u'sorted', u'staticmethod',
|
||||
u'str', u'sum', u'super', u'tuple', u'type', u'unichr', u'unicode',
|
||||
u'vars', u'xrange', u'zip']
|
||||
builts.append(u'is_defined')
|
||||
builts.append(u'split')
|
||||
builts.append(u'lower')
|
||||
cmd_client = (u'creole_client', ('get', 'get_containers'))
|
||||
for func in dir(eosfunc):
|
||||
if not func.startswith('_'):
|
||||
builts.append(unicode(func, 'utf-8'))
|
||||
|
||||
# FIXME: je sais pas où la mettre
|
||||
def is_container_var(varname):
|
||||
"""
|
||||
variables de conteneur calculées dynamiquement
|
||||
"""
|
||||
for var in CONTAINER_VARS:
|
||||
if varname.startswith(var):
|
||||
return True
|
||||
return False
|
||||
|
||||
class TmplVar():
|
||||
def __init__(self, name, fd, line):
|
||||
self.name = name
|
||||
self.location = []
|
||||
self.add_location(fd, line)
|
||||
|
||||
def add_location(self, fd, line):
|
||||
fd = basename(fd)
|
||||
self.location.append((fd, line+1))
|
||||
|
||||
def set_location(self, location):
|
||||
self.location = location
|
||||
|
||||
def get_location(self):
|
||||
return self.location
|
||||
|
||||
class Var():
|
||||
def __init__(self, name, description, separator, help, defaultvalue, is_slave):
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.separator = separator
|
||||
self.help = help
|
||||
self.defaultvalue = defaultvalue
|
||||
self.is_slave = is_slave
|
||||
|
||||
class CreoleLinter:
|
||||
"""Base class for linters, collects creole vars and templates
|
||||
**has to be launched once and only once**
|
||||
"""
|
||||
display = True
|
||||
class __impl:
|
||||
warnno = 1
|
||||
warncomment = "Undefined comment"
|
||||
""" Implementation of the singleton interface """
|
||||
def set_config(self, tmpl_dir_or_file=None):
|
||||
if tmpl_dir_or_file != None and type(tmpl_dir_or_file) != str:
|
||||
raise TypeError('tmpl_dir_or_file doit être une string')
|
||||
if self.tmpl_dir_or_file != None:
|
||||
sys.stderr('Tentative de redefinition de tmpl_dir_or_file')
|
||||
if tmpl_dir_or_file == None:
|
||||
self.tmpl_dir_or_file = config.distrib_dir
|
||||
else:
|
||||
if not isabs(tmpl_dir_or_file):
|
||||
tmpl_dir_or_file = normpath(join(config.distrib_dir, tmpl_dir_or_file))
|
||||
if not isfile(tmpl_dir_or_file):
|
||||
raise Exception("template doit etre le nom d'un template valide")
|
||||
self.tmpl_dir_or_file = tmpl_dir_or_file
|
||||
self.eoledirs = config.eoledirs
|
||||
self.dtddir = config.dtddir
|
||||
self.exclude_var = []
|
||||
self.pkgname = None
|
||||
|
||||
def load_dics(self):
|
||||
if self.config is None:
|
||||
self._collect_vars_in_dicos()
|
||||
|
||||
# def load_tmpls(self):
|
||||
# if self.tmplvars == None:
|
||||
# self._collect_set_vars()
|
||||
# self._collect_def_vars()
|
||||
# self._collect_for_vars()
|
||||
# self._collect_define_vars()
|
||||
# self._collect_vars_in_tmplfiles()
|
||||
#
|
||||
def get_dicos_name(self):
|
||||
if self.config is None:
|
||||
raise Exception('Dictionnaire non chargé')
|
||||
dic = self.variables.keys()
|
||||
dic.sort()
|
||||
return dic
|
||||
|
||||
# def get_dicos_files(self):
|
||||
# if self.creoledic == None:
|
||||
# raise Exception('Dictionnaire non chargé')
|
||||
# dic = []
|
||||
# for name in self.creoledic.generic['files']:
|
||||
# dic.append(basename(name['source']))
|
||||
# dic.sort()
|
||||
# return dic
|
||||
#
|
||||
# def get_tmplvars_name(self):
|
||||
# if self.tmplvars == None:
|
||||
# raise Exception('Template non chargé')
|
||||
# tmpl = self.tmplvars.keys()
|
||||
# tmpl.sort()
|
||||
# return tmpl
|
||||
#
|
||||
# def get_defvars_name(self):
|
||||
# if self.defvars == None:
|
||||
# raise Exception('Template non chargé')
|
||||
# defv = self.defvars.keys()
|
||||
# defv.sort()
|
||||
# return defv
|
||||
#
|
||||
# def get_setvars_name(self):
|
||||
# if self.setvars == None:
|
||||
# raise Exception('Fonction non chargé')
|
||||
# var = self.setvars.keys()
|
||||
# var.sort()
|
||||
# return var
|
||||
#
|
||||
# def get_forvars_name(self):
|
||||
# if self.forvars == None:
|
||||
# raise Exception('Fonction non chargé')
|
||||
# var = self.forvars.keys()
|
||||
# var.sort()
|
||||
# return var
|
||||
#
|
||||
# def get_tmplvar(self, name):
|
||||
# return self.tmplvars[name]
|
||||
#
|
||||
# def get_separators(self):
|
||||
# return self.creoledic.get_separators()
|
||||
#
|
||||
def get_dico_file_names(self):
|
||||
if self.eoledirs == None or self.tmpl_dir_or_file == None:
|
||||
raise Exception('Utiliser la methode set_config avant')
|
||||
ret = []
|
||||
for eoledir in self.eoledirs:
|
||||
eoledir = abspath(eoledir)
|
||||
if isfile(eoledir):
|
||||
ret.append(eoledir)
|
||||
else:
|
||||
ret.extend(glob(join(eoledir, '*.xml')))
|
||||
return ret
|
||||
|
||||
def get_dtd(self):
|
||||
if self.dtddir == None:
|
||||
raise Exception('Utiliser la methode set_config avant')
|
||||
return join(self.dtddir, 'creole.dtd')
|
||||
|
||||
def _collect_vars_in_dicos(self):
|
||||
if self.eoledirs == None or self.tmpl_dir_or_file == None:
|
||||
raise Exception('Utiliser la methode set_config avant')
|
||||
|
||||
flattened = join(FLATTENED_CREOLE_DIR, 'flattened_creole.xml')
|
||||
with file(flattened, 'r') as fhd:
|
||||
xmlroot = parse(fhd).getroot()
|
||||
tiramisu_objects = PopulateTiramisuObjects()
|
||||
tiramisu_objects.parse_dtd(dtdfilename)
|
||||
tiramisu_objects.make_tiramisu_objects(xmlroot)
|
||||
self.config = tiramisu_objects.build()
|
||||
|
||||
self.config.read_write()
|
||||
self.config.cfgimpl_get_settings().remove('hidden')
|
||||
self.config.cfgimpl_get_settings().remove('validator')
|
||||
self.config.cfgimpl_get_settings().remove('disabled')
|
||||
for path in self.config.creole.make_dict():
|
||||
spath = path.split('.')
|
||||
vname = spath[-1]
|
||||
fname = spath[0]
|
||||
is_slave = False
|
||||
if len(spath) == 3:
|
||||
master = spath[1]
|
||||
if master != vname:
|
||||
is_slave = True
|
||||
option = self.config.unwrap_from_path('creole.' + path)
|
||||
self.variables[vname] = Var(vname, option.impl_get_information('doc', None),
|
||||
option.impl_get_information('separator', ''),
|
||||
option.impl_get_information('help', None),
|
||||
option.impl_getdefault(),
|
||||
is_slave)
|
||||
if fname not in self.families:
|
||||
self.families.append(fname)
|
||||
|
||||
def _parse_tabs_in_dicos(self):
|
||||
if self.eoledirs == None or self.tmpl_dir_or_file == None:
|
||||
raise Exception('Utiliser la methode set_config avant')
|
||||
tabs_in_dics = []
|
||||
fnames = []
|
||||
for directory in self.eoledirs:
|
||||
if isfile(directory):
|
||||
fnames.append(directory)
|
||||
else:
|
||||
fnames.extend(glob(join(directory, "*.xml")))
|
||||
for fname in fnames:
|
||||
fh = file(fname, 'r')
|
||||
content = fh.read()
|
||||
if '\t' in content:
|
||||
tabs_in_dics.append(fname)
|
||||
fh.close()
|
||||
return tabs_in_dics
|
||||
|
||||
def _list_tmpl_files(self):
|
||||
if isfile(self.tmpl_dir_or_file):
|
||||
return [self.tmpl_dir_or_file]
|
||||
ret = []
|
||||
for filename in glob(join(self.tmpl_dir_or_file, '*')):
|
||||
if filename.startswith('.') or filename.endswith('~'):
|
||||
continue
|
||||
if filename in EXCLUDE_TMPL:
|
||||
print " \\-- template desactivé : {0}".format(filename)
|
||||
continue
|
||||
ret.append(filename)
|
||||
return ret
|
||||
|
||||
# def _add_collected_var(self, dvar, var, fd, linenb):
|
||||
# if dvar.has_key(var):
|
||||
# dvar[var].add_location(fd=fd, line=linenb)
|
||||
# else:
|
||||
# dvar[var] = TmplVar(name=var, fd=fd, line=linenb)
|
||||
#
|
||||
# def _collect_var_in(self, dvar, var, fd, linenb, exists=False):
|
||||
# not_added=True
|
||||
# if exists == True:
|
||||
# if self.forvars.has_key(var):
|
||||
# #si deja en memoire
|
||||
# if (basename(fd), linenb+1) in self.forvars[var].location:
|
||||
# return
|
||||
# self._add_collected_var(self.forvars, var, fd, linenb)
|
||||
# not_added=False
|
||||
# if self.setvars.has_key(var):
|
||||
# self._add_collected_var(self.setvars, var, fd, linenb)
|
||||
# not_added=False
|
||||
# if self.defvars.has_key(var):
|
||||
# self._add_collected_var(self.defvars, var, fd, linenb)
|
||||
# not_added=False
|
||||
# #test les builtsin seulement si variable
|
||||
# if not_added == True and unicode(var, 'utf-8') in builts:
|
||||
# #self.builtsvar.append(var)
|
||||
# return
|
||||
# if not_added == True:
|
||||
# self._add_collected_var(dvar, var, fd, linenb)
|
||||
#
|
||||
# def _collect_vars_in(self, expr, dvar, fd, linenb, tvarstarttoken,
|
||||
# exists=False):
|
||||
# if self.unknown_client == None:
|
||||
# self.unknown_client = []
|
||||
# varcreole = re.compile(tvarstarttoken+'([a-zA-Z0-9_\.{}]+)')
|
||||
# varcreole2 = re.compile(tvarstarttoken+'(\w+)')
|
||||
# varcreolebr = re.compile(tvarstarttoken+'{(\w+)}')
|
||||
# varmulti = re.compile('(\w+)\.(\w+)')
|
||||
# for var in varcreole.findall(expr):
|
||||
# ret = varmulti.match(var)
|
||||
# if ret != None:
|
||||
# if ret.group(1) == cmd_client[0]:
|
||||
# if ret.group(2) not in cmd_client[1]:
|
||||
# self.unknown_client.append(TmplVar(name=ret.group(2), fd=fd, line=linenb))
|
||||
# else:
|
||||
# #%%var.sousvar
|
||||
# self._collect_var_in(dvar, ret.group(1), fd, linenb, exists)
|
||||
# self._collect_var_in(dvar, ret.group(2), fd, linenb, exists)
|
||||
# else:
|
||||
# #%%var
|
||||
# for var2 in varcreole2.findall(tvarstarttoken+var):
|
||||
# self._collect_var_in(dvar, var2, fd, linenb, exists)
|
||||
# #%%{var}
|
||||
# for var2 in varcreolebr.findall(tvarstarttoken+var):
|
||||
# self._collect_var_in(dvar, var2, fd, linenb, exists)
|
||||
#
|
||||
# def _collect_vars(self, tvars, tpattern, all_char=False, with_var=False, with_vars=True, broken=None):
|
||||
# """
|
||||
# collect vars in template for a specified pattern
|
||||
#
|
||||
# :tvars: all collected var are store in this variable
|
||||
# :tpattern: re pattern
|
||||
# :broken: if set, store broken variable
|
||||
# """
|
||||
# if tvars == None:
|
||||
# tvars = {}
|
||||
# tstarttoken = ''
|
||||
# tvarstarttoken = ''
|
||||
# for tmplfd in self._list_tmpl_files():
|
||||
# fh = open(tmplfd, 'r')
|
||||
# lines = fh.readlines()
|
||||
# length = len(lines)
|
||||
# settings = False
|
||||
# if tstarttoken != starttoken or \
|
||||
# tvarstarttoken != varstarttoken:
|
||||
# if all_char:
|
||||
# char = '(.*)'
|
||||
# else:
|
||||
# char = '( *)'
|
||||
# pattern = re.compile(char+starttoken+tpattern)
|
||||
# tstarttoken = starttoken
|
||||
# tvarstarttoken = varstarttoken
|
||||
# for linenb in range(length):
|
||||
# line = lines[linenb]
|
||||
# if line.strip() == '%compiler-settings':
|
||||
# settings = True
|
||||
# if settings and line.strip() == \
|
||||
# '%end compiler-settings'.strip():
|
||||
# settings = False
|
||||
# if not settings:
|
||||
# ret = pattern.match(line.strip())
|
||||
# if ret != None:
|
||||
# expr = ret.group(2).strip()
|
||||
# if with_var:
|
||||
# self._collect_var_in(tvars, expr, tmplfd, linenb)
|
||||
# if with_vars:
|
||||
# self._collect_vars_in(expr,
|
||||
# tvars, tmplfd, linenb,
|
||||
# tvarstarttoken)
|
||||
# len_token = len(varstarttoken)
|
||||
# if broken is not None and expr.strip()[:len_token] != tvarstarttoken:
|
||||
# broken.append(TmplVar(
|
||||
# name=line.strip(),
|
||||
# fd=tmplfd, line=linenb))
|
||||
# else:
|
||||
# tline = line.split('=')
|
||||
# tkey = tline[0].strip()
|
||||
# if tkey == 'cheetahVarStartToken':
|
||||
# tvarstarttoken = tline[1].strip()
|
||||
# elif tkey == 'directiveStartToken':
|
||||
# tstarttoken = tline[1].strip()
|
||||
# pattern = re.compile(tstarttoken+tpattern)
|
||||
# fh.close()
|
||||
# return tvars
|
||||
#
|
||||
# def _collect_for_vars(self):
|
||||
# """
|
||||
# collect all vars generate in 'for'
|
||||
# """
|
||||
# self.brokenfor = []
|
||||
# self.forvars = self._collect_vars(self.forvars, 'for (.+) in (.*)', broken=self.brokenfor)
|
||||
#
|
||||
# def _collect_def_vars(self):
|
||||
# """
|
||||
# collect all vars generate in 'def'
|
||||
# """
|
||||
# self.defvars = self._collect_vars(self.defvars, 'def (.*)\((.*)\)', with_var=True)
|
||||
#
|
||||
# def _collect_set_vars(self):
|
||||
# """
|
||||
# collect all vars generate in 'set'
|
||||
# """
|
||||
# self.setvars = self._collect_vars(self.setvars, 'set (.*)=.*')
|
||||
#
|
||||
# def _collect_define_vars(self):
|
||||
# """
|
||||
# collect all vars generate in 'def'
|
||||
# """
|
||||
# self.var_with_is_defined = self._collect_vars(
|
||||
# self.var_with_is_defined,
|
||||
# "is_defined\(\'(\w+)\'\)",
|
||||
# all_char=True,
|
||||
# with_var=True, with_vars=False)
|
||||
# ##FIXME pas de support de cheetahVarStartToken, ...
|
||||
# #if self.var_with_is_defined == None:
|
||||
# # self.var_with_is_defined = {}
|
||||
# # pattern = re.compile('(.*) %sis_defined\(\'(\w+)\'\)'%varstarttoken)
|
||||
# # for tmplfd in self._list_tmpl_files():
|
||||
# # fh = open(tmplfd, 'r')
|
||||
# # lines = fh.readlines()
|
||||
# # length = len(lines)
|
||||
# # for linenb in range(length):
|
||||
# # line = lines[linenb]
|
||||
# # ret = pattern.match(line)
|
||||
# # if ret != None:
|
||||
# # self._collect_var_in(self.var_with_is_defined, ret.group(2), tmplfd, linenb)
|
||||
# # fh.close()
|
||||
#
|
||||
# def _collect_vars_in_tmplfiles(self):
|
||||
# if self.eoledirs == None or self.tmpl_dir_or_file == None:
|
||||
# raise Exception('Utiliser la methode set_config avant')
|
||||
# # XXX ".eolvars" is a good placeholder for var names to be kept in touch
|
||||
# if self.tmplvars == None:
|
||||
# self.tmplvars = {}
|
||||
# for tmplfd in self._list_tmpl_files():
|
||||
# fh = open(tmplfd, 'r')
|
||||
# lines = fh.readlines()
|
||||
# length = len(lines)
|
||||
# settings = False
|
||||
# tvarstarttoken = varstarttoken
|
||||
# for linenb in range(length):
|
||||
# line = lines[linenb]
|
||||
# if line.strip() == '%compiler-settings':
|
||||
# settings = True
|
||||
# if settings and line.strip() == \
|
||||
# '%end compiler-settings'.strip():
|
||||
# settings = False
|
||||
# if not settings:
|
||||
# self._collect_vars_in(line, self.tmplvars, tmplfd,
|
||||
# linenb, tvarstarttoken, True)
|
||||
# else:
|
||||
# tline = line.split('=')
|
||||
# tkey = tline[0].strip()
|
||||
# if tkey == 'cheetahVarStartToken':
|
||||
# tvarstarttoken = tline[1].strip()
|
||||
#
|
||||
# fh.close()
|
||||
#
|
||||
# storage for the instance reference
|
||||
__instance = None
|
||||
|
||||
def __init__(self):
|
||||
""" Create singleton instance """
|
||||
# Check whether we already have an instance
|
||||
|
||||
if CreoleLinter.__instance is None:
|
||||
# Create and remember instance
|
||||
CreoleLinter.__instance = CreoleLinter.__impl()
|
||||
self.tmpl_dir_or_file = None
|
||||
self.eoledirs = None
|
||||
self.config = None
|
||||
self.variables = {}
|
||||
self.families = []
|
||||
self.tmplvars = None
|
||||
self.forvars = None
|
||||
self.defvars = None
|
||||
self.setvars = None
|
||||
self.unknown_client = None
|
||||
self.brokenfor = None
|
||||
self.var_with_is_defined = None
|
||||
self.exclude_var = None
|
||||
self.skip_var = {}
|
||||
self.conflevel = 'eole'
|
||||
|
||||
# Store instance reference as the only member in the handle
|
||||
self.__dict__['_CreoleLinter__instance'] = CreoleLinter.__instance
|
||||
|
||||
def __getattr__(self, attr):
|
||||
""" Delegate access to implementation """
|
||||
return getattr(self.__instance, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
""" Delegate access to implementation """
|
||||
return setattr(self.__instance, attr, value)
|
||||
|
||||
#class SaveItem(CreoleLinter):
|
||||
# """
|
||||
# eolvars not present in the dicos
|
||||
# """
|
||||
# name = 'save'
|
||||
# warnno = 1
|
||||
# warncomment = "Ne pas utiliser la fonction SaveItem comme un test"
|
||||
#
|
||||
# def process(self):
|
||||
# self.load_dics()
|
||||
# if self.pkgname == None:
|
||||
# raise Exception('fichier creolelint.conf incomplet (name)')
|
||||
# filename = join(expanduser('~/.creolelint'), self.pkgname+'.conf')
|
||||
# fh = open(filename, 'w')
|
||||
# fh.write('vardico = '+str(self.get_dicos_name())+'\n')
|
||||
# fh.close()
|
||||
# print('fichier sauvegardé')
|
||||
#
|
||||
# def check(self):
|
||||
# return []
|
||||
#
|
||||
#class OrphansInDicosItem(CreoleLinter):
|
||||
# """
|
||||
# eolvars not present in the dicos
|
||||
# """
|
||||
# name = 'orphans_in_dicos'
|
||||
# warnno = 8
|
||||
# warncomment = "Variable dictionnaire non utilisée dans un template"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
# self.load_tmpls()
|
||||
#
|
||||
# vars_name = set(self.get_dicos_name())
|
||||
# tmplvars_name = set(self.get_tmplvars_name())
|
||||
# only_in_dicos = vars_name - tmplvars_name
|
||||
# ret = []
|
||||
# skip_var = self.skip_var.get(self.name, {})
|
||||
# for var in only_in_dicos:
|
||||
# if skip_var.has_key(var):
|
||||
# continue
|
||||
# test_start = False
|
||||
# for start in DICO_TEST_VARS:
|
||||
# if var.startswith(start):
|
||||
# test_start = True
|
||||
# break
|
||||
# if not test_start:
|
||||
# ret.append(self.variables[var])
|
||||
# return ret
|
||||
#
|
||||
#class OrphansInTmplItem(CreoleLinter):
|
||||
# """
|
||||
# eolvars not present in the templates
|
||||
# """
|
||||
# name = 'orphans_in_tmpl'
|
||||
# warnno = 8
|
||||
# warncomment = "Variable template non présente dans le dictionnaire"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
# self.load_tmpls()
|
||||
#
|
||||
# vars_name = self.get_dicos_name()
|
||||
# if self.exclude_var != None:
|
||||
# vars_name.extend(self.exclude_var)
|
||||
# vars_name=set(vars_name)
|
||||
# tmplvars_name = set(self.get_tmplvars_name())
|
||||
# only_in_tmpl = tmplvars_name - vars_name
|
||||
# #remove is_defined
|
||||
# is_defined_name = set(self.var_with_is_defined.keys())
|
||||
# only_in_tmpl = only_in_tmpl - is_defined_name
|
||||
# set_var = set(self.get_setvars_name())
|
||||
# only_in_tmpl = only_in_tmpl - set_var
|
||||
# ret = []
|
||||
# for var in only_in_tmpl:
|
||||
# skipped_location = []
|
||||
# for location in self.tmplvars[var].location:
|
||||
# if self.skip_var.has_key(self.name) and self.skip_var[self.name].has_key(var):
|
||||
# if not location in self.skip_var[self.name][var]:
|
||||
# skipped_location.append(location)
|
||||
# else:
|
||||
# skipped_location.append(location)
|
||||
# if skipped_location != []:
|
||||
# tmplvar = TmplVar(var, '', 0)
|
||||
# tmplvar.set_location(skipped_location)
|
||||
# ret.append(tmplvar)
|
||||
# #results.append(self.tmplvars[var])
|
||||
# return ret
|
||||
#
|
||||
#class OrphansDefItem(CreoleLinter):
|
||||
# name = 'orphans_def'
|
||||
# warnno = 7
|
||||
# warncomment = "Fonction définie mais non utilisée"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_tmpls()
|
||||
# results = []
|
||||
# for defvar in self.get_defvars_name():
|
||||
# defname = {}
|
||||
# for filedesc, linenb in self.defvars[defvar].location:
|
||||
# if not defname.has_key((defvar, filedesc)):
|
||||
# defname[(defvar, filedesc)]=linenb
|
||||
# else:
|
||||
# defname[(defvar, filedesc)]="exists"
|
||||
# for defvar, filedesc in defname.keys():
|
||||
# if defname[(defvar, filedesc)] != "exists":
|
||||
# results.append(TmplVar(name=defvar, fd=filedesc, line=defname[(defvar, filedesc)]))
|
||||
#
|
||||
# return results
|
||||
#
|
||||
#class OrphansSetItem(CreoleLinter):
|
||||
# name = 'orphans_set'
|
||||
# warnno = 7
|
||||
# warncomment = "Variable définie dans le template mais non utilisée"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_tmpls()
|
||||
# results = []
|
||||
# tmpl_vars = set(self.get_tmplvars_name())
|
||||
# for setvar in self.get_setvars_name():
|
||||
# setname = {}
|
||||
# for filedesc, linenb in self.setvars[setvar].location:
|
||||
# if setname.has_key((setvar, filedesc)) == False:
|
||||
# setname[(setvar, filedesc)]=linenb
|
||||
# else:
|
||||
# setname[(setvar, filedesc)]="exists"
|
||||
#
|
||||
# for setvar, filedesc in setname.keys():
|
||||
# if setname[(setvar, filedesc)] != "exists":
|
||||
# results.append(TmplVar(name=setvar, fd=filedesc, line=setname[(setvar, filedesc)]))
|
||||
#
|
||||
# return results
|
||||
#
|
||||
#class OrphansForItem(CreoleLinter):
|
||||
# name = 'orphans_for'
|
||||
# warnno = 7
|
||||
# warncomment = "Variable définie dans une boucle mais non utilisée"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_tmpls()
|
||||
# results = []
|
||||
# for forvar in self.get_forvars_name():
|
||||
# forname = {}
|
||||
# for filedesc, linenb in self.forvars[forvar].location:
|
||||
# if forname.has_key((forvar, filedesc)) == False:
|
||||
# forname[(forvar, filedesc)]=linenb
|
||||
# else:
|
||||
# forname[(forvar, filedesc)]="exists"
|
||||
#
|
||||
# for forvar, filedesc in forname.keys():
|
||||
# if forname[(forvar, filedesc)] != "exists":
|
||||
# results.append(TmplVar(name=forvar, fd=filedesc, line=forname[(forvar, filedesc)]))
|
||||
# return results
|
||||
#
|
||||
#class OrphansDicosFilesItem(CreoleLinter):
|
||||
# """
|
||||
# """
|
||||
# name = 'orphans_dicos_files'
|
||||
# warnno = 1
|
||||
# warncomment = "Template déclaré dans le dicos inexistant"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
#
|
||||
# dicos_files = []
|
||||
# for filen in self.get_dicos_files():
|
||||
# dicos_files.append(basename(filen))
|
||||
# dicos_files = set(dicos_files)
|
||||
# tmpl_files = []
|
||||
# for filen in self._list_tmpl_files():
|
||||
# tmpl_files.append(unicode(basename(filen), 'utf-8'))
|
||||
# tmpl_files=set(tmpl_files)
|
||||
# orphans = dicos_files - tmpl_files
|
||||
# ret = []
|
||||
# for var in orphans:
|
||||
# if self.skip_var.has_key(self.name) and not self.skip_var[self.name].has_key(var):
|
||||
# ret.append(var)
|
||||
# else:
|
||||
# ret.append(var)
|
||||
#
|
||||
# return ret
|
||||
#
|
||||
#class OrphansTmplFilesItem(CreoleLinter):
|
||||
# """
|
||||
# """
|
||||
# name = 'orphans_tmpl_files'
|
||||
# warnno = 1
|
||||
# warncomment = "Template non déclaré dans le dicos"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
#
|
||||
# dicos_files = []
|
||||
# for filen in self.get_dicos_files():
|
||||
# dicos_files.append(basename(filen))
|
||||
# dicos_files = set(dicos_files)
|
||||
# tmpl_files = []
|
||||
# for filen in self._list_tmpl_files():
|
||||
# tmpl_files.append(unicode(basename(filen), 'utf-8'))
|
||||
# tmpl_files=set(tmpl_files)
|
||||
# return tmpl_files - dicos_files
|
||||
#
|
||||
class WrongDicosNameItem(CreoleLinter):
|
||||
name = 'wrong_dicos_name'
|
||||
warnno = 6
|
||||
warncomment = "Dictionnaire avec un nom invalide"
|
||||
|
||||
def check(self):
|
||||
if self.conflevel == 'common':
|
||||
pattern = '(0'
|
||||
elif self.conflevel == 'conf':
|
||||
pattern = '(1'
|
||||
elif self.conflevel == 'eole':
|
||||
pattern = '(2'
|
||||
else:
|
||||
pattern = '([3-9]'
|
||||
pattern += '[0-9]_[a-z0-9_]+)'
|
||||
cpattern = re.compile(pattern)
|
||||
ret = []
|
||||
for filename in self.get_dico_file_names():
|
||||
name = basename(filename)
|
||||
if cpattern.match(name) == None:
|
||||
ret.append(name)
|
||||
return ret
|
||||
|
||||
class HiddenIfInDicosItem(CreoleLinter):
|
||||
name = 'hidden_if_in_dicos'
|
||||
warnno = 5
|
||||
warncomment = "Dictionnaire contenant un hidden_if_*"
|
||||
|
||||
def check(self):
|
||||
ret = []
|
||||
dtd = parse_dtd(self.get_dtd())
|
||||
for filename in self.get_dico_file_names():
|
||||
if filename in OLD_DICOS:
|
||||
continue
|
||||
parse = parse_xml_file(filename, dtd, parse_all=False)
|
||||
for cond in parse['conditions'].values():
|
||||
if cond[0]['name'].startswith('hidden_if_'):
|
||||
ret.append(filename)
|
||||
break
|
||||
return ret
|
||||
|
||||
class ConditionWithoutTarget(CreoleLinter):
|
||||
name = 'condition_without_target'
|
||||
warnno = 5
|
||||
warncomment = "Dictionnaire contenant une condition sans target"
|
||||
|
||||
def check(self):
|
||||
ret = []
|
||||
|
||||
dtd = parse_dtd(self.get_dtd())
|
||||
for filename in self.get_dico_file_names():
|
||||
if filename in OLD_DICOS:
|
||||
continue
|
||||
parse = parse_xml_file(filename, dtd, parse_all=False)
|
||||
for cond in parse['conditions'].values():
|
||||
for con in cond:
|
||||
if con['family'] == con['list'] == con['variable'] == []:
|
||||
ret.append(filename)
|
||||
break
|
||||
return ret
|
||||
|
||||
class ObligatoireInDicosItem(CreoleLinter):
|
||||
name = 'obligatoire_in_dicos'
|
||||
warnno = 5
|
||||
warncomment = "Dictionnaire contenant un check \"obligatoire\""
|
||||
|
||||
def check(self):
|
||||
ret = []
|
||||
dtd = parse_dtd(self.get_dtd())
|
||||
for filename in self.get_dico_file_names():
|
||||
if filename in OLD_DICOS:
|
||||
continue
|
||||
parse = parse_xml_file(filename, dtd, parse_all=False)
|
||||
for cond in parse['checks'].values():
|
||||
if cond[0][0] == 'obligatoire':
|
||||
ret.append(filename)
|
||||
break
|
||||
return ret
|
||||
|
||||
|
||||
class FamilyWithoutHelp(CreoleLinter):
|
||||
name = 'family_without_help'
|
||||
warnno = 5
|
||||
warncomment = "Famille sans balise d'aide"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for grp in self.config.creole.iter_groups():
|
||||
doc = grp[1].cfgimpl_get_description().impl_get_information('help', None)
|
||||
if doc is None:
|
||||
ret.append(grp[0])
|
||||
return ret
|
||||
|
||||
class FamilyWithoutIcon(CreoleLinter):
|
||||
name = 'family_without_icon'
|
||||
warnno = 5
|
||||
warncomment = "Famille sans icône spécifique"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for grp in self.config.creole.iter_groups():
|
||||
if grp[1].cfgimpl_get_description().impl_get_information('icon') is None:
|
||||
ret.append(grp[0])
|
||||
return ret
|
||||
|
||||
#class DefineItem(CreoleLinter):
|
||||
# """
|
||||
# check for syntaxes
|
||||
# """
|
||||
# name = 'define'
|
||||
# warnno = 4
|
||||
# warncomment = "Redéfinition d'un variable d'un dictionnaire"
|
||||
#
|
||||
# def check(self):
|
||||
# """
|
||||
# verifie si une variable définie est une variable du dictionnaire
|
||||
# """
|
||||
# self.load_dics()
|
||||
# self.load_tmpls()
|
||||
# dicos = set(self.get_dicos_name())
|
||||
# defv = set(self.get_defvars_name())
|
||||
# ret=[]
|
||||
# for var in defv & dicos:
|
||||
# ret.append(self.defvars[var])
|
||||
# return ret
|
||||
#
|
||||
class BuiltinsItem(CreoleLinter):
|
||||
"""
|
||||
verifier si une variable de dico n'est pas dans l'espace de nommage
|
||||
"""
|
||||
name = 'builtins'
|
||||
warnno = 4
|
||||
warncomment = "Variable identitique à une fonction python"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
# self.load_tmpls()
|
||||
ret = []
|
||||
#dans le dictionnaire
|
||||
for var in set(builts) & set(self.get_dicos_name()):
|
||||
ret.append(self.variables[var])
|
||||
# #dans les variables de template
|
||||
# for var in set(builts) & set(self.get_tmplvars_name()):
|
||||
# ret.append(self.tmplvars[var])
|
||||
# #dans les boucles for
|
||||
# for var in set(builts) & set(self.get_forvars_name()):
|
||||
# ret.append(self.forvars[var])
|
||||
# #dans la definition de variable dans un template
|
||||
# for var in set(builts) & set(self.get_setvars_name()):
|
||||
# ret.append(self.setvars[var])
|
||||
# #dans les noms de fonction
|
||||
# for var in set(builts) & set(self.get_defvars_name()):
|
||||
# ret.append(self.defvars[var])
|
||||
return ret
|
||||
|
||||
#class SyntaxForItem(CreoleLinter):
|
||||
# """
|
||||
# verifie la syntaxe de la ligne for
|
||||
# """
|
||||
# name = 'syntax_for'
|
||||
# warnno = 1
|
||||
# warncomment = "Syntaxe de la ligne for incorrect"
|
||||
# def check(self):
|
||||
# self.load_tmpls()
|
||||
# return self.brokenfor
|
||||
#
|
||||
#class SyntaxVarItem(CreoleLinter):
|
||||
# """
|
||||
# verifie les variables suivant la syntaxe de pattern
|
||||
# """
|
||||
# name = 'syntax_var'
|
||||
# pattern = '([a-z0-9][a-z0-9]+_[a-z0-9_]+)'
|
||||
# warnno = 6
|
||||
# warncomment = "La variable ne respecte pas la regexp %s" % pattern
|
||||
#
|
||||
# def check(self):
|
||||
# cpattern = re.compile(self.pattern)
|
||||
# self.load_dics()
|
||||
# self.load_tmpls()
|
||||
# ret=[]
|
||||
# #dans le dictionnaire
|
||||
# for var in self.get_dicos_name():
|
||||
# if cpattern.match(var) == None:
|
||||
# ret.append(self.variables[var])
|
||||
# #dans les variables de template
|
||||
# for var in self.get_tmplvars_name():
|
||||
# if cpattern.match(var) == None:
|
||||
# skipped_location = []
|
||||
# for location in self.tmplvars[var].location:
|
||||
# if self.skip_var.has_key(self.name) and self.skip_var[self.name].has_key(var):
|
||||
# if not location in self.skip_var[self.name][var]:
|
||||
# skipped_location.append(location)
|
||||
# else:
|
||||
# skipped_location.append(location)
|
||||
# if skipped_location != []:
|
||||
# tmplvar = TmplVar(var, '', 0)
|
||||
# tmplvar.set_location(skipped_location)
|
||||
# ret.append(tmplvar)
|
||||
# #ret.append(self.tmplvars[var])
|
||||
# #dans les boucles for
|
||||
# for var in self.get_forvars_name():
|
||||
# if cpattern.match(var) == None:
|
||||
# ret.append(self.forvars[var])
|
||||
# #dans la definition de variable dans un template
|
||||
# for var in self.get_setvars_name():
|
||||
# if cpattern.match(var) == None:
|
||||
# ret.append(self.setvars[var])
|
||||
# return ret
|
||||
#
|
||||
#class ForbiddenTemplateVarItem(CreoleLinter):
|
||||
# """
|
||||
# vérifie la présence des noms de variable interdits dans les templates
|
||||
# """
|
||||
# name = 'syntax_var2'
|
||||
# warnno = 6
|
||||
# warncomment = "Nom de variable interdit dans un template"
|
||||
#
|
||||
# def check(self):
|
||||
# #self.load_dics()
|
||||
# self.load_tmpls()
|
||||
# ret=[]
|
||||
# #dans les variables de template
|
||||
# for var in self.get_tmplvars_name():
|
||||
# for start in DICO_TEST_VARS:
|
||||
# if var.startswith(start):
|
||||
# ret.append(var)
|
||||
# break
|
||||
# return ret
|
||||
#
|
||||
#class SyntaxFunctionItem(CreoleLinter):
|
||||
# """
|
||||
# verifie les fonctions suivant la syntaxe de pattern
|
||||
# """
|
||||
# name = 'syntax_function'
|
||||
# pattern = '([a-z0-9][a-z0-9]+_[a-z0-9_]+)'
|
||||
# warnno = 6
|
||||
# warncomment = "La fonction ne respecte pas la regexp %s" % pattern
|
||||
#
|
||||
# def check(self):
|
||||
# cpattern = re.compile(self.pattern)
|
||||
# self.load_tmpls()
|
||||
# ret=[]
|
||||
# #dans les noms de fonction
|
||||
# for var in self.get_defvars_name():
|
||||
# if cpattern.match(var) == None:
|
||||
# ret.append(self.defvars[var])
|
||||
# return ret
|
||||
#
|
||||
#class OrphansVarHelpItem(CreoleLinter):
|
||||
# name = 'orphans_var_help'
|
||||
# warnno = 3
|
||||
# warncomment = "Aide définie dans le dictionnaire pour une variable inexistante"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
# vars_name = set(self.get_dicos_name())
|
||||
# vars_help = set(self.creoledic.get_helps()['variables'].keys())
|
||||
# #print vars_help
|
||||
# only_in_help = vars_help - vars_name
|
||||
# results = []
|
||||
# for tmpl in only_in_help:
|
||||
# results.append(tmpl)
|
||||
# return results
|
||||
|
||||
#class OrphansFamHelpItem(CreoleLinter):
|
||||
# name = 'orphans_fam_help'
|
||||
# warnno = 3
|
||||
# warncomment = "Aide définie dans le dictionnaire pour une famille inexistante"
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_dics()
|
||||
# #FIXME
|
||||
# vars_name = set(self.families)
|
||||
# vars_help = set(self.creoledic.get_helps()['families'].keys())
|
||||
# only_in_help = vars_help - vars_name
|
||||
# results = []
|
||||
# for tmpl in only_in_help:
|
||||
# results.append(tmpl)
|
||||
# return results
|
||||
#
|
||||
class ValidVarLabelItem(CreoleLinter):
|
||||
name = 'valid_var_label'
|
||||
warnno = 5
|
||||
warncomment = "Libellé de variable non valide dans un dictionnaire"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for var in self.variables.values():
|
||||
if not is_container_var(var.name):
|
||||
ret.extend(is_correct(var.description, var.name))
|
||||
return ret
|
||||
|
||||
class ActivationVarWithoutHelp(CreoleLinter):
|
||||
name = 'activation_var_without_help'
|
||||
warnno = 5
|
||||
warncomment = "Variable d'activation sans balise d'aide"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for var, var_obj in self.variables.items():
|
||||
if var.startswith('activer_') and var not in EXCLUDE_ACTIVATION_VARS:
|
||||
if var_obj.help is None:
|
||||
ret.append(var)
|
||||
return ret
|
||||
|
||||
class ValidSeparatorLabelItem(CreoleLinter):
|
||||
name = 'valid_separator_label'
|
||||
warnno = 5
|
||||
warncomment = "Libellé de séparateur non valide dans un dictionnaire"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
|
||||
for var, var_obj in self.variables.items():
|
||||
if var_obj.separator == '':
|
||||
#FIXME: variables de conteneur dynamiques
|
||||
continue
|
||||
ret.extend(is_correct(var_obj.separator[0], var))
|
||||
return ret
|
||||
|
||||
class ValidHelpLabelItem(CreoleLinter):
|
||||
name = 'valid_help_label'
|
||||
warnno = 5
|
||||
warncomment = "Libellé d'aide non valide dans un dictionnaire"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for var, var_obj in self.variables.items():
|
||||
# help/variable
|
||||
ret.extend(is_correct(var_obj.help, var))
|
||||
for grp in self.config.creole.iter_groups():
|
||||
# help/family
|
||||
ret.extend(is_correct(grp[1].cfgimpl_get_description().impl_get_information('help', ''),
|
||||
grp[0], family=True))
|
||||
return ret
|
||||
|
||||
class ValidSlaveValue(CreoleLinter):
|
||||
name = 'valid_slave_value'
|
||||
warnno = 5
|
||||
warncomment = "Variable esclave avec une liste en valeur défaut"
|
||||
|
||||
def check(self):
|
||||
self.load_dics()
|
||||
ret = []
|
||||
for var, var_obj in self.variables.items():
|
||||
if var_obj.is_slave:
|
||||
if len(var_obj.defaultvalue) > 1:
|
||||
ret.append(var)
|
||||
return ret
|
||||
|
||||
##class ValidCheckEnumOuiNon(CreoleLinter):
|
||||
## name = 'valid_check_enum_ouinon'
|
||||
## warnno = 6
|
||||
## warncomment = "Variable avec un valid_enum à oui/non au lieu du type oui/non"
|
||||
##
|
||||
## def check(self):
|
||||
## ret = []
|
||||
## for var, content in self.creoledic.variables.items():
|
||||
## print content
|
||||
## if str(type(content)) != "<class 'creole.typeole.OuiNon'>":
|
||||
## for check in content.choices:
|
||||
## if check[0] == u'valid_enum':
|
||||
## for valid_enum in check[1]:
|
||||
## if valid_enum['value'].startswith('['):
|
||||
## eval_valid_enum = eval(valid_enum['value'])
|
||||
## if set(eval_valid_enum) == set(['non', 'oui']):
|
||||
## ret.append(var)
|
||||
## return ret
|
||||
#
|
||||
##class ValidCheckEnumOnOff(CreoleLinter):
|
||||
## name = 'valid_check_enum_onoff'
|
||||
## warnno = 6
|
||||
## warncomment = "Variable avec un valid_enum à on/off au lieu du type on/off"
|
||||
##
|
||||
## def check(self):
|
||||
## ret = []
|
||||
## for var, content in self.creoledic.variables.items():
|
||||
## if str(type(content)) != "<class 'creole.typeole.OnOff'>":
|
||||
## for check in content.checks:
|
||||
## if check[0] == u'valid_enum':
|
||||
## for valid_enum in check[1]:
|
||||
## if valid_enum['value'].startswith('['):
|
||||
## eval_valid_enum = eval(valid_enum['value'])
|
||||
## if set(eval_valid_enum) == set(['on', 'off']):
|
||||
## ret.append(var)
|
||||
## return ret
|
||||
#
|
||||
class TabsInDicosItem(CreoleLinter):
|
||||
name = 'tabs_in_dicos'
|
||||
warnno = 5
|
||||
warncomment = "Tabulation dans le dictionnaire au lieu de 4 espaces"
|
||||
|
||||
def check(self):
|
||||
return self._parse_tabs_in_dicos()
|
||||
|
||||
#class ValidClientOption(CreoleLinter):
|
||||
# name = 'valid_client_option'
|
||||
# warnno = 6
|
||||
# warncomment = "Option inconnu pour %s" % cmd_client[0]
|
||||
#
|
||||
# def check(self):
|
||||
# self.load_tmpls()
|
||||
# return self.unknown_client
|
||||
|
||||
class ValidParseTmpl(CreoleLinter):
|
||||
name = 'valid_parse_tmpl'
|
||||
warnno = 1
|
||||
warncomment = "Template Non valide"
|
||||
display = False
|
||||
|
||||
def check(self):
|
||||
parse_templates(self._list_tmpl_files())
|
||||
return []
|
||||
|
||||
|
||||
class ValidDTDItem(CreoleLinter):
|
||||
name = 'valid_dtd'
|
||||
warnno = 1
|
||||
warncomment = "DTD Non valide"
|
||||
display = False
|
||||
|
||||
def check(self):
|
||||
dtd = self.get_dtd()
|
||||
for filename in self.get_dico_file_names():
|
||||
system_code(['xmllint', '--noout', '--dtdvalid', dtd, filename])
|
||||
return []
|
||||
|
||||
class OldFwFile(CreoleLinter):
|
||||
name = 'old_fw_file'
|
||||
warnno = 5
|
||||
warncomment = "Ancien fichier eole-firewall présent sur le serveur"
|
||||
|
||||
def check(self):
|
||||
fw_templates = '/usr/share/eole/creole/distrib/*.fw'
|
||||
fw_files = '/usr/share/eole/firewall/*.fw'
|
||||
return glob(fw_files) + glob(fw_templates)
|
||||
|
||||
def validate(keyword, ansi, tmpl):
|
||||
globs = globals()
|
||||
classitem = None
|
||||
for cls in globs:
|
||||
if cls.startswith('_') and type(globs[cls])!=types.ClassType and cls == 'CreoleLinter' and cls == 'TmplVar':
|
||||
continue
|
||||
if hasattr(globs[cls], 'name'):
|
||||
if globs[cls].name == keyword:
|
||||
classitem = globs[cls]
|
||||
break
|
||||
if classitem == None:
|
||||
raise Exception('test %s inconnu'%keyword)
|
||||
cl = classitem()
|
||||
if cl.eoledirs == None:
|
||||
cl.set_config(tmpl_dir_or_file=tmpl)
|
||||
ansi.process(cl)
|
||||
|
|
@ -0,0 +1,132 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
modules = [
|
||||
'Zéphir',
|
||||
'Eolebase',
|
||||
'Amon',
|
||||
'Sentinelle',
|
||||
'Sphynx',
|
||||
'Scribe',
|
||||
'Eclair',
|
||||
'Horus',
|
||||
'ZéphirLog',
|
||||
'PreludeManager',
|
||||
'AmonEcole',
|
||||
'EoleVZ',
|
||||
'Seshat',
|
||||
'ClientScribe',
|
||||
'ClientHorus']
|
||||
|
||||
projets = [
|
||||
' EOLE', # confusion avec eole-annuaire
|
||||
'EAD',
|
||||
'ead-web',
|
||||
'ead-server',
|
||||
'frontend',
|
||||
'backend',
|
||||
'Era',
|
||||
'ESU',
|
||||
'AGRIATES',
|
||||
'RACINE-AGRIATES',
|
||||
'Samba',
|
||||
'Creole',
|
||||
'GenConfig',
|
||||
'EoleDB',
|
||||
'EoleSSO',
|
||||
'Zéphir',
|
||||
"application Zéphir",
|
||||
"Zéphir-web",
|
||||
]
|
||||
|
||||
os_logiciels_protocoles = [
|
||||
'Linux',
|
||||
'GNU/Linux',
|
||||
'Ubuntu',
|
||||
'Unix',
|
||||
'Windows',
|
||||
'Microsoft',
|
||||
# 'ALIAS',
|
||||
'BlockInput',
|
||||
'Epreuve@SSR',
|
||||
'SSH',
|
||||
'OpenSSH',
|
||||
'DHCP',
|
||||
'DHCPd',
|
||||
'ClamAV',
|
||||
'NuFW',
|
||||
'NuWinC',
|
||||
'Nuauth',
|
||||
'DansGuardian',
|
||||
'Bacula',
|
||||
'Bareos',
|
||||
'TCP',
|
||||
'UDP',
|
||||
'ICMP',
|
||||
'IP',
|
||||
' IPsec', # confusion avec la commande ipsec
|
||||
'strongSwan',
|
||||
'DMZ',
|
||||
'FTP',
|
||||
'SMB',
|
||||
'XML',
|
||||
'XML-RPC',
|
||||
' SSO',
|
||||
# 'CAS',
|
||||
'SAML',
|
||||
'Sympa',
|
||||
'MySQL',
|
||||
'SpamAssassin',
|
||||
'web',
|
||||
'phpMyAdmin',
|
||||
'Grr',
|
||||
'Gibii',
|
||||
'Gepi',
|
||||
'SPIP-EVA',
|
||||
'Envole',
|
||||
'Envole 2',
|
||||
'WebShare',
|
||||
' CSS', # confusion avec .css
|
||||
'CUPS',
|
||||
'OpenOffice.org',
|
||||
'GDM',
|
||||
'client léger',
|
||||
'client lourd',
|
||||
'OpenLDAP',
|
||||
'ProFTPD',
|
||||
'Vim',
|
||||
'Controle-vnc',
|
||||
'BE1D',
|
||||
'RVP',
|
||||
'PostgreSQL',
|
||||
'Squid',
|
||||
'NUT',
|
||||
'PPPoE',
|
||||
'VLAN',
|
||||
'SSL',
|
||||
'Nginx',
|
||||
'reverse proxy',
|
||||
'SquirrelMail',
|
||||
'LDAP',
|
||||
'FreeRADIUS',
|
||||
'LightSquid',
|
||||
'SARG',
|
||||
'iptables',
|
||||
'Netfilter',
|
||||
'POSH',
|
||||
'InterBase',
|
||||
'OCS',
|
||||
]
|
||||
|
||||
divers = [
|
||||
'Éducation nationale',
|
||||
'Internet',
|
||||
'intranet',
|
||||
'pare-feu',
|
||||
'anti-virus',
|
||||
'anti-spam',
|
||||
'USB',
|
||||
'relai',
|
||||
]
|
||||
|
||||
entities = modules + projets + os_logiciels_protocoles + divers
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
"""errno-specific classes"""
|
||||
|
||||
import sys, os, errornb
|
||||
|
||||
class Error(EnvironmentError):
|
||||
def __repr__(self):
|
||||
return "%s.%s %r: %s " %(self.__class__.__module__,
|
||||
self.__class__.__name__,
|
||||
self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
#repr(self.args)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
s = "[%s]: %s" %(self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
)
|
||||
return s
|
||||
|
||||
#FIXME set the different error better suited than errno
|
||||
_winerrnomap = {
|
||||
2: errsymb.ENOENT,
|
||||
3: errsymb.ENOENT,
|
||||
17: errsymb.EEXIST,
|
||||
22: errsymb.ENOTDIR,
|
||||
267: errsymb.ENOTDIR,
|
||||
5: errsymb.EACCES, # anything better?
|
||||
}
|
||||
|
||||
class ErrorMaker(object):
|
||||
""" lazily provides Exception classes for each possible POSIX errno
|
||||
(as defined per the 'errno' module). All such instances
|
||||
subclass EnvironmentError.
|
||||
"""
|
||||
Error = Error
|
||||
_errno2class = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
eno = getattr(errno, name)
|
||||
cls = self._geterrnoclass(eno)
|
||||
setattr(self, name, cls)
|
||||
return cls
|
||||
|
||||
def _geterrnoclass(self, eno):
|
||||
try:
|
||||
return self._errno2class[eno]
|
||||
except KeyError:
|
||||
clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
|
||||
errorcls = type(Error)(clsname, (Error,),
|
||||
{'__module__':'py.error',
|
||||
'__doc__': os.strerror(eno)})
|
||||
self._errno2class[eno] = errorcls
|
||||
return errorcls
|
||||
|
||||
def checked_call(self, func, *args):
|
||||
""" call a function and raise an errno-exception if applicable. """
|
||||
__tracebackhide__ = True
|
||||
try:
|
||||
return func(*args)
|
||||
except self.Error:
|
||||
raise
|
||||
except EnvironmentError:
|
||||
cls, value, tb = sys.exc_info()
|
||||
if not hasattr(value, 'errno'):
|
||||
raise
|
||||
__tracebackhide__ = False
|
||||
errno = value.errno
|
||||
try:
|
||||
if not isinstance(value, WindowsError):
|
||||
raise NameError
|
||||
except NameError:
|
||||
# we are not on Windows, or we got a proper OSError
|
||||
cls = self._geterrnoclass(errno)
|
||||
else:
|
||||
try:
|
||||
cls = self._geterrnoclass(_winerrnomap[errno])
|
||||
except KeyError:
|
||||
raise value
|
||||
raise cls("%s%r" % (func.__name__, args))
|
||||
__tracebackhide__ = True
|
||||
|
||||
error = ErrorMaker()
|
|
@ -0,0 +1,76 @@
|
|||
# coding: utf-8
|
||||
import re
|
||||
import unicodedata
|
||||
from entities import entities
|
||||
|
||||
# ______________________________________________________________________________
|
||||
|
||||
ENCODING = 'utf-8'
|
||||
|
||||
def strip_accents(string):
|
||||
return unicodedata.normalize('NFKD', unicode(string, ENCODING)
|
||||
).encode('ASCII', 'ignore')
|
||||
|
||||
def normalize_entities():
|
||||
"""
|
||||
enleve les accents de la liste des entites + minuscules
|
||||
:return: entities normalisé
|
||||
"""
|
||||
norm_entities = []
|
||||
for entitie in entities:
|
||||
norm_entitie = strip_accents(entitie).lower()
|
||||
norm_entities.append(norm_entitie)
|
||||
return norm_entities
|
||||
|
||||
NORM_ENTITIES = normalize_entities()
|
||||
|
||||
# ______________________________________________________________________________
|
||||
|
||||
def parse_string(text):
|
||||
"""
|
||||
enlève les accents d'un texte
|
||||
"""
|
||||
# libelle = strip_accents(text)
|
||||
words = re.findall('([a-zA-Zéèàùêôëö_]+)', text)
|
||||
return words
|
||||
|
||||
def is_in_entities(text):
|
||||
"""
|
||||
donne l'index dans entities du texte
|
||||
"""
|
||||
norm_text = text.lower()
|
||||
index = None
|
||||
if norm_text in NORM_ENTITIES:
|
||||
index = NORM_ENTITIES.index(norm_text)
|
||||
return index
|
||||
|
||||
def is_correct(libelle, name, family=False):
|
||||
if libelle is not None and type(libelle) != str:
|
||||
libelle = unicode.encode(libelle, ENCODING)
|
||||
ret = []
|
||||
if libelle == '' or libelle is None:
|
||||
return ret
|
||||
if libelle[0].islower():
|
||||
#FIXME: faux positifs connus
|
||||
if not libelle.startswith('ejabberd') and \
|
||||
not libelle.startswith('phpMyAdmin'):
|
||||
ret.append('%%%%%s : phrase sans majuscule'%name)
|
||||
for text in parse_string(libelle):
|
||||
text_index = is_in_entities(text)
|
||||
if not text_index == None:
|
||||
if str(text) != str(entities[text_index]):
|
||||
#FIXME: faux positifs connus
|
||||
if 'ipsec.conf' in libelle or 'test-rvp' in libelle \
|
||||
or 'bareos-' in libelle \
|
||||
or 'bacula-' in libelle \
|
||||
or '/var/log/zephir' in libelle \
|
||||
or 'exemple : eolebase' in libelle:
|
||||
continue
|
||||
ent = str(unicode.encode((unicode(entities[text_index], ENCODING)), ENCODING))
|
||||
if family:
|
||||
ret.append('famille [%s] : %s => %s' % (str(name), text, ent))
|
||||
else:
|
||||
ret.append('%%%%%s : %s => %s' % (str(name), text, ent))
|
||||
return ret
|
||||
# ______________________________________________________________________________
|
||||
|
|
@ -0,0 +1,660 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
from os.path import basename
|
||||
from creole.loader import creole_loader
|
||||
from creole.client import CreoleClient
|
||||
from creole.template import CreoleGet, IsDefined, CreoleTemplateEngine, CreoleMaster
|
||||
from creole import eosfunc
|
||||
from tiramisu.option import *
|
||||
from tiramisu import Config
|
||||
from tiramisu.error import ConfigError, PropertiesOptionError, \
|
||||
RequirementError, ValueWarning
|
||||
from Cheetah import Parser, Compiler
|
||||
from Cheetah.Template import Template
|
||||
from Cheetah.NameMapper import NotFound
|
||||
from pyeole.ansiprint import print_red
|
||||
from creole.eosfunc import valid_regexp
|
||||
from Cheetah.Unspecified import Unspecified
|
||||
import warnings
|
||||
|
||||
|
||||
DEBUG = False
|
||||
#DEBUG = True
|
||||
|
||||
|
||||
client = CreoleClient()
|
||||
compilerSettings = {'directiveStartToken' : u'%',
|
||||
'cheetahVarStartToken' : u'%%', 'EOLSlurpToken' : u'%',
|
||||
'PSPStartToken' : u'µ' * 10, 'PSPEndToken' : u'µ' * 10,
|
||||
'commentStartToken' : u'µ' * 10, 'commentEndToken' : u'µ' * 10,
|
||||
'multiLineCommentStartToken' : u'µ' * 10,
|
||||
'multiLineCommentEndToken' : u'µ' * 10}
|
||||
|
||||
#======================= CHEETAH =======================
|
||||
# This class is used to retrieve all template vars
|
||||
#true_HighLevelParser = Parser._HighLevelParser
|
||||
global cl_chunks, cl_vars
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
class cl_Parser(Parser.Parser):
|
||||
|
||||
def getCheetahVarNameChunks(self, *args, **kwargs):
|
||||
global cl_chunks
|
||||
chunks = super(cl_Parser, self).getCheetahVarNameChunks(*args, **kwargs)
|
||||
for chunk in chunks:
|
||||
#if false, it's internal variable
|
||||
if chunk[1]:
|
||||
name = chunk[0]
|
||||
#remove master if master/slave and add force adding master
|
||||
if '.' in name:
|
||||
cl_chunks.add(name.split('.')[-1])
|
||||
cl_chunks.add(name.split('.')[0])
|
||||
else:
|
||||
cl_chunks.add(name)
|
||||
return chunks
|
||||
|
||||
def getCheetahVar(self, *args, **kwargs):
|
||||
global cl_vars
|
||||
var = super(cl_Parser, self).getCheetahVar(*args, **kwargs)
|
||||
if not var.startswith(u'VFFSL('):
|
||||
cl_vars.add(var)
|
||||
return var
|
||||
|
||||
def getVars():
|
||||
global cl_chunks, cl_vars
|
||||
#retrieve all calculated vars
|
||||
ret = list(cl_chunks - cl_vars)
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
return ret
|
||||
|
||||
class CompilerGetVars(Compiler.ModuleCompiler):
|
||||
parserClass = cl_Parser
|
||||
|
||||
|
||||
true_compile = Template.compile
|
||||
@classmethod
|
||||
def cl_compile(kls, *args, **kwargs):
|
||||
kwargs['compilerClass'] = CompilerGetVars
|
||||
kwargs['useCache'] = False
|
||||
return true_compile(*args, **kwargs)
|
||||
Template.compile = cl_compile
|
||||
|
||||
def CompilerGetVar(varName, default=Unspecified):
|
||||
#remplace Cheetah's getVar function
|
||||
#this function permite to known variable if getVar is used
|
||||
if varName.startswith('%%'):
|
||||
raise Exception('varname should not start with %% {0}'.format(varName))
|
||||
global extra_vars, config
|
||||
config.read_only()
|
||||
try:
|
||||
option = config.creole.find_first(byname=varName)
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
value = getattr(config, path)
|
||||
except (AttributeError, ConfigError):
|
||||
try:
|
||||
option = config.creole.find_first(byname=varName, check_properties=False)
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
#populate_mandatory(config, option, path, raise_propertyerror=True)
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
config.read_only()
|
||||
value = getattr(config, path)
|
||||
except (AttributeError, RequirementError), err:
|
||||
config.read_only()
|
||||
#support default value
|
||||
if default != Unspecified:
|
||||
return default
|
||||
else:
|
||||
raise AttributeError('option:', varName, ':', err)
|
||||
except PropertiesOptionError as err:
|
||||
if default != Unspecified:
|
||||
return default
|
||||
else:
|
||||
raise err
|
||||
except Exception as err:
|
||||
config.read_only()
|
||||
raise err
|
||||
except Exception as err:
|
||||
config.read_only()
|
||||
raise err
|
||||
lpath = '.'.join(path.split('.')[2:])
|
||||
dico = {lpath: value}
|
||||
engine = CreoleTemplateEngine(force_values=dico)
|
||||
name = path.split('.')[-1]
|
||||
extra_vars[option] = name
|
||||
if "." in lpath:
|
||||
spath = lpath.split('.')
|
||||
if spath[0] == spath[1]:
|
||||
ret = engine.creole_variables_dict[name]
|
||||
else:
|
||||
ret = engine.creole_variables_dict[spath[0]].slave[spath[1]]
|
||||
else:
|
||||
ret = engine.creole_variables_dict[name]
|
||||
return ret
|
||||
|
||||
def CompilerGetattr(creolemaster, name, default=None):
|
||||
if not isinstance(creolemaster, CreoleMaster):
|
||||
raise Exception('creolemaster must be CreoleMaster, not {0}'.format(type(creolemaster)))
|
||||
if name not in creolemaster.slave:
|
||||
#FIXME assume name is slave?
|
||||
value = CompilerGetVar(name, default)
|
||||
if creolemaster._index is not None:
|
||||
value = value[creolemaster._index]
|
||||
creolemaster.add_slave(name, value)
|
||||
return getattr(creolemaster, name, default)
|
||||
|
||||
#======================= EOSFUNC =======================
|
||||
eos = {}
|
||||
for func in dir(eosfunc):
|
||||
if not func.startswith('_'):
|
||||
eos[func] = getattr(eosfunc, func)
|
||||
|
||||
#======================= CONFIG =======================
|
||||
def populate_mandatory(config, option, path, raise_propertyerror=False):
|
||||
def _build_network(path):
|
||||
for num in range(0, 4):
|
||||
if path.startswith('creole.interface_{0}'.format(num)):
|
||||
return num
|
||||
#si il y a un test de consistence de type _cons_in_network (l'IP doit être dans un network défini)
|
||||
#on utilise le réseau de ce network #10714
|
||||
if getattr(option, '_consistencies', None) is not None:
|
||||
for const in option._consistencies:
|
||||
if const[0] == '_cons_in_network':
|
||||
try:
|
||||
opt = const[1][1]
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(opt)
|
||||
val = config.getattr(path, force_permissive=True)
|
||||
if isinstance(val, list):
|
||||
val = val[0]
|
||||
return val.split('.')[2]
|
||||
except IndexError:
|
||||
pass
|
||||
return 5
|
||||
def _build_ip(path):
|
||||
if path.endswith('_fichier_link'):
|
||||
return 3
|
||||
elif path.endswith('_proxy_link'):
|
||||
return 2
|
||||
else:
|
||||
#ne pas retourner la même valeur si elle est censé être différente
|
||||
if getattr(option, '_consistencies', None) is not None:
|
||||
for const in option._consistencies:
|
||||
if const[0] == '_cons_not_equal':
|
||||
return 4
|
||||
|
||||
return 1
|
||||
if option.impl_getname().startswith('nom_carte_eth'):
|
||||
value = unicode(option.impl_getname())
|
||||
elif isinstance(option, UnicodeOption):
|
||||
value = u'value'
|
||||
elif isinstance(option, IPOption):
|
||||
value = u'192.168.{0}.{1}'.format(_build_network(path), _build_ip(path))
|
||||
elif isinstance(option, NetworkOption):
|
||||
value = u'192.168.{0}.0'.format(_build_network(path))
|
||||
elif isinstance(option, NetmaskOption):
|
||||
value = u'255.255.255.0'
|
||||
elif isinstance(option, BroadcastOption):
|
||||
value = u'192.168.{0}.255'.format(_build_network(path))
|
||||
elif isinstance(option, EmailOption):
|
||||
value = u'foo@bar.com'
|
||||
elif isinstance(option, URLOption):
|
||||
value = u'http://foo.com/bar'
|
||||
elif isinstance(option, DomainnameOption):
|
||||
allow_without_dot = option._get_extra('_allow_without_dot')
|
||||
o_type = option._get_extra('_dom_type')
|
||||
if option._name == 'smb_workgroup':
|
||||
value = u'othervalue'
|
||||
elif o_type in ['netbios', 'hostname']:
|
||||
value = u'value'
|
||||
else:
|
||||
value = u'value.lan'
|
||||
elif isinstance(option, FilenameOption):
|
||||
value = u'/tmp/foo'
|
||||
elif isinstance(option, ChoiceOption):
|
||||
#FIXME devrait le faire tout seul non ?
|
||||
value = option.impl_get_values(config)[0]
|
||||
elif isinstance(option, IntOption):
|
||||
value = 1
|
||||
elif isinstance(option, PortOption):
|
||||
value = 80
|
||||
elif isinstance(option, DomainnameOption):
|
||||
value = 'foo.com'
|
||||
elif isinstance(option, UsernameOption):
|
||||
value = 'toto'
|
||||
elif isinstance(option, PasswordOption):
|
||||
value = 'P@ssWord'
|
||||
else:
|
||||
raise Exception('the Tiramisu type {0} is not supported by CreoleLint (variable : {1})'.format(type(option), path))
|
||||
validator = option.impl_get_validator()
|
||||
if validator is not None and validator[0] == valid_regexp:
|
||||
regexp = validator[1][''][0]
|
||||
# génération d'une "value" valide
|
||||
# en cas de valid_regexp sans valeur par défaut
|
||||
if regexp == u'^[A-Z][0-9]$':
|
||||
value = u'A1'
|
||||
elif option._name == 'additional_repository_source':
|
||||
# variable avec expression (très) spécifique #20291
|
||||
value = u"deb http://test dist"
|
||||
elif not regexp.startswith(u'^[a-z0-9]') and regexp.startswith('^'):
|
||||
value = regexp[1:]
|
||||
if option.impl_is_multi():
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
#slave should have same length as master
|
||||
masterpath = '.'.join(path.split('.')[:-1]+[path.split('.')[-2]])
|
||||
try:
|
||||
len_master = len(getattr(config, masterpath))
|
||||
val = []
|
||||
for i in range(0, len_master):
|
||||
val.append(value)
|
||||
value = val
|
||||
except:
|
||||
value = [value]
|
||||
else:
|
||||
value = [value]
|
||||
try:
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
except ValueError, err:
|
||||
msg = str('error for {0} type {1}: {2}'.format(path, type(option), err))
|
||||
raise Exception(msg)
|
||||
except PropertiesOptionError, err:
|
||||
if 'frozen' not in err.proptype:
|
||||
if raise_propertyerror:
|
||||
raise err
|
||||
msg = str('error for {0} type {1}: {2}'.format(path, type(option), err))
|
||||
raise Exception(msg)
|
||||
|
||||
|
||||
class Reload(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Check_Template:
|
||||
|
||||
def __init__(self, template_name):
|
||||
self.all_requires = {}
|
||||
self.current_opt = {}
|
||||
self.od_list = {}
|
||||
global extra_vars
|
||||
#reinit extra_vars
|
||||
extra_vars = {}
|
||||
self.old_dico = []
|
||||
self.current_var = []
|
||||
self.ori_options = []
|
||||
self.file_path = None
|
||||
self.template_name = template_name
|
||||
self.current_container = client.get_container_infos('mail')
|
||||
self.tmpl = None
|
||||
self.is_tmpl = False
|
||||
self.filename_ok = False
|
||||
|
||||
|
||||
def populate_requires(self, option, path, force=False):
|
||||
def _parse_requires(_option):
|
||||
o_requires = _option.impl_getrequires()
|
||||
if o_requires is not None:
|
||||
for requires in o_requires:
|
||||
for require in requires:
|
||||
opt_ = require[0]
|
||||
path_ = config.cfgimpl_get_description().impl_get_path_by_opt(opt_)
|
||||
self.populate_requires(opt_, path_, force=True)
|
||||
if not force and not path.startswith('creole.'):
|
||||
return
|
||||
if option in self.current_opt:
|
||||
return
|
||||
o_requires = option.impl_getrequires()
|
||||
if o_requires is not None:
|
||||
for requires in o_requires:
|
||||
for require in requires:
|
||||
if require[0].impl_is_master_slaves('slave'):
|
||||
path_ = config.cfgimpl_get_description().impl_get_path_by_opt(require[0])
|
||||
s_path = path_.split('.')
|
||||
master_path = 'creole.' + s_path[1] + '.' + s_path[2] + '.' + s_path[2]
|
||||
try:
|
||||
opt_master = config.unwrap_from_path(master_path)
|
||||
config.cfgimpl_get_settings().remove('everything_frozen')
|
||||
populate_mandatory(config, opt_master, master_path)
|
||||
except:
|
||||
pass
|
||||
self.all_requires.setdefault(option, []).append(require[0])
|
||||
if isinstance(option, OptionDescription):
|
||||
self.od_list[path] = option
|
||||
if force and not option._name in self.current_var:
|
||||
self.current_var.append(option._name)
|
||||
if option._name in self.current_var or not path.startswith('creole.'):
|
||||
if not isinstance(option, OptionDescription):
|
||||
if path.startswith('creole.'):
|
||||
self.current_opt[option] = '.'.join(path.split('.')[1:])
|
||||
else:
|
||||
self.current_opt[option] = None
|
||||
_parse_requires(option)
|
||||
#requires could be in parent's too
|
||||
opath = ''
|
||||
for parent in path.split('.')[:-1]:
|
||||
opath += parent
|
||||
if opath in self.od_list:
|
||||
desc = self.od_list[opath]
|
||||
self.current_opt[desc] = None
|
||||
_parse_requires(desc)
|
||||
opath += '.'
|
||||
try:
|
||||
if option._callback is not None:
|
||||
for params in option._callback[1].values():
|
||||
for param in params:
|
||||
if isinstance(param, tuple):
|
||||
opt = param[0]
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(opt)
|
||||
self.populate_requires(opt, path, force=True)
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
|
||||
def read_write(self):
|
||||
config.read_write()
|
||||
config.cfgimpl_get_settings().remove('disabled')
|
||||
config.cfgimpl_get_settings().remove('hidden')
|
||||
config.cfgimpl_get_settings().remove('frozen')
|
||||
|
||||
def change_value(self, path, value, multi, parse_message, option):
|
||||
self.read_write()
|
||||
config.cfgimpl_get_settings()[option].remove('force_default_on_freeze')
|
||||
if multi:
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
s_path = path.split('.')
|
||||
master_path = s_path[0] + '.' + s_path[1] + '.' + s_path[2] + '.' + s_path[2]
|
||||
master_option = config.cfgimpl_get_description().impl_get_opt_by_path(master_path)
|
||||
if getattr(config, master_path) == []:
|
||||
populate_mandatory(config, master_option, master_path)
|
||||
value = [value]
|
||||
if parse_message:
|
||||
print parse_message, value
|
||||
setattr(config, path, value)
|
||||
config.read_only()
|
||||
|
||||
def template(self):
|
||||
self.last_notfound = []
|
||||
def get_value(opt_, path_):
|
||||
try:
|
||||
return getattr(config.creole, path_)
|
||||
except PropertiesOptionError, err:
|
||||
if err.proptype == ['mandatory']:
|
||||
self.read_write()
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
s_path = path_.split('.')
|
||||
#set value to master
|
||||
if len(s_path) == 3 and s_path[1] != s_path[2]:
|
||||
master_path = 'creole.' + s_path[0] + '.' + s_path[1] + '.' + s_path[1]
|
||||
opt_master = config.unwrap_from_path(master_path)
|
||||
populate_mandatory(config, opt_master, master_path)
|
||||
populate_mandatory(config, opt_, 'creole.' + path_)
|
||||
config.read_only()
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
try:
|
||||
ret = getattr(config.creole, path_)
|
||||
config.cfgimpl_get_settings().append('mandatory')
|
||||
return ret
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
raise NotFound('no value')
|
||||
except ConfigError:
|
||||
self.read_write()
|
||||
populate_mandatory(config, opt_, 'creole.' + path_)
|
||||
config.read_only()
|
||||
try:
|
||||
return getattr(config.creole, path_)
|
||||
except ConfigError, err:
|
||||
raise err
|
||||
except PropertiesOptionError, err:
|
||||
raise NotFound('no value')
|
||||
try:
|
||||
is_gen_file = getattr(config, self.file_path)
|
||||
except PropertiesOptionError, err:
|
||||
is_gen_file = False
|
||||
if not is_gen_file:
|
||||
return
|
||||
try:
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
config.read_only()
|
||||
dico = {}
|
||||
for opt_, path_ in self.current_opt.items():
|
||||
#path_ is None if it's an OptionDescription
|
||||
if path_ is None:
|
||||
continue
|
||||
try:
|
||||
dico[path_] = get_value(opt_, path_)
|
||||
except NotFound:
|
||||
pass
|
||||
#FIXME revoir le strip_full_path
|
||||
ndico = {}
|
||||
for path_, value in dico.items():
|
||||
sdico = path_.split('.')
|
||||
if len(sdico) == 2:
|
||||
ndico[sdico[1]] = value
|
||||
elif len(sdico) == 3:
|
||||
if sdico[1] == sdico[2]:
|
||||
ndico[sdico[1]] = value
|
||||
else:
|
||||
ndico['.'.join(sdico[1:])] = value
|
||||
else:
|
||||
raise Exception('chemin de longueur inconnu {}'.format(path_))
|
||||
engine = CreoleTemplateEngine(force_values=ndico)
|
||||
dico = engine.creole_variables_dict
|
||||
self.read_write()
|
||||
except ConfigError, err:
|
||||
msg = 'erreur de templating', err
|
||||
raise ValueError(msg)
|
||||
diff = True
|
||||
for old in self.old_dico:
|
||||
if dico.keys() == old.keys():
|
||||
for key in old.keys():
|
||||
if old[key] != dico[key]:
|
||||
diff = False
|
||||
break
|
||||
if not diff:
|
||||
break
|
||||
if not diff:
|
||||
return
|
||||
try:
|
||||
self.old_dico.append(dico)
|
||||
searchlist = [dico, eos, {'is_defined' : IsDefined(dico),
|
||||
'creole_client' : CreoleClient(),
|
||||
'current_container': CreoleGet(self.current_container),
|
||||
}]
|
||||
rtmpl = self.tmpl(searchList=searchlist)
|
||||
rtmpl.getVar = CompilerGetVar
|
||||
rtmpl.getattr = CompilerGetattr
|
||||
rtmpl = str(rtmpl)
|
||||
#print rtmpl
|
||||
self.is_tmpl = True
|
||||
except NotFound, err:
|
||||
lst = getVars()
|
||||
if lst == []:
|
||||
raise Exception("Il manque une option", err, 'avec le dictionnaire', dico)
|
||||
for ls in lst:
|
||||
try:
|
||||
CompilerGetVar(ls)
|
||||
except AttributeError:
|
||||
self.last_notfound.append(ls)
|
||||
raise Reload('')
|
||||
except Exception, err:
|
||||
raise Exception("Il y a une erreur", err, 'avec le dictionnaire', dico)
|
||||
|
||||
def check_reload_with_extra(self):
|
||||
#if extra_vars has value, check if not already in current_opt
|
||||
global extra_vars
|
||||
if extra_vars != {}:
|
||||
oret = set(extra_vars.keys())
|
||||
opt_requires = oret & set(self.all_requires.keys())
|
||||
for opt_ in opt_requires:
|
||||
oret.update(self.all_requires[opt_])
|
||||
dont_exists = set(oret) - set(self.current_opt.keys())
|
||||
ret = []
|
||||
for opt_ in dont_exists:
|
||||
try:
|
||||
ret.append(extra_vars[opt_])
|
||||
except KeyError:
|
||||
ret.append(opt_._name)
|
||||
extra_vars = {}
|
||||
if ret == []:
|
||||
return None
|
||||
return ret
|
||||
|
||||
def test_all_values_for(self, options, cpt):
|
||||
option = options[0]
|
||||
parse_message = None
|
||||
if DEBUG:
|
||||
parse_message = '*' * cpt + '>' + option._name
|
||||
|
||||
if not isinstance(option, ChoiceOption):
|
||||
msg = str('pas simple la... ' + option._name)
|
||||
raise NotImplementedError(msg)
|
||||
multi = option.impl_is_multi()
|
||||
path = config.cfgimpl_get_description().impl_get_path_by_opt(option)
|
||||
for value in option.impl_get_values(config):
|
||||
self.change_value(path, value, multi, parse_message, option)
|
||||
if options[1:] != []:
|
||||
#if already value to test, restart test_all_values_for
|
||||
ret = self.test_all_values_for(options[1:], cpt + 1)
|
||||
if ret != None:
|
||||
return ret
|
||||
else:
|
||||
need_reload = False
|
||||
try:
|
||||
self.template()
|
||||
except Reload:
|
||||
need_reload = True
|
||||
ret = self.check_reload_with_extra()
|
||||
if need_reload and ret is None:
|
||||
notfound = []
|
||||
paths = config.cfgimpl_get_description()._cache_paths[1]
|
||||
for ls in self.last_notfound:
|
||||
#if variable is locale (means template) variable, not config's one
|
||||
for path in paths:
|
||||
if path.endswith('.' + ls):
|
||||
notfound.append(ls)
|
||||
break
|
||||
if notfound != []:
|
||||
raise Exception('variable not found after reload {0}'.format(notfound))
|
||||
if ret is not None:
|
||||
return ret
|
||||
|
||||
|
||||
def open_file(self, force_var):
|
||||
# Open template and compile it
|
||||
# retrieve template vars (add force_var if needed)
|
||||
filecontent = open(self.template_name).read()
|
||||
#try to convert content in unicode
|
||||
self.tmpl = Template.compile(filecontent, compilerSettings=compilerSettings) # ,
|
||||
#compilerClass=CompilerGetVars)
|
||||
self.current_var = getVars()
|
||||
if force_var:
|
||||
self.current_var.extend(force_var)
|
||||
|
||||
def populate_file(self, path, option):
|
||||
if path.startswith('containers.files.file'):
|
||||
if path.endswith('.source') and option.impl_getdefault().endswith('/{0}'.format(self.template_name.split('/')[-1])):
|
||||
self.filename_ok = True
|
||||
if self.filename_ok and path.endswith('.activate'):
|
||||
self.file_path = path
|
||||
self.filename_ok = False
|
||||
self.populate_requires(option, path, force=True)
|
||||
|
||||
def test_all_values(self):
|
||||
try:
|
||||
options = list(set(self.all_requires.keys())&set(self.current_opt.keys()))
|
||||
need_tmpl = False
|
||||
if options != []:
|
||||
requires_options = set()
|
||||
for opt in options:
|
||||
for op in self.all_requires[opt]:
|
||||
if 'frozen' not in config.cfgimpl_get_settings()[op]:
|
||||
requires_options.add(op)
|
||||
if requires_options == set([]):
|
||||
need_tmpl = True
|
||||
else:
|
||||
self.ori_options = requires_options
|
||||
ret = self.test_all_values_for(list(requires_options), 0)
|
||||
if ret is not None:
|
||||
if DEBUG:
|
||||
print "reload with", ret
|
||||
self.check_template(ret, already_load=True)
|
||||
else:
|
||||
need_tmpl = True
|
||||
|
||||
if need_tmpl is True:
|
||||
try:
|
||||
self.template()
|
||||
except:
|
||||
self.test_all_values()
|
||||
except Exception, err:
|
||||
if DEBUG:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
msg = self.template_name, ':', err
|
||||
raise Exception(msg)
|
||||
|
||||
def check_template(self, force_var=None, already_load=False):
|
||||
#remove all modification (value, properties, ...)
|
||||
open_error = None
|
||||
try:
|
||||
self.open_file(force_var)
|
||||
except Exception, err:
|
||||
open_error = "problème à l'ouverture du fichier {}".format(self.template_name)
|
||||
|
||||
config.read_only()
|
||||
for index, option in enumerate(config.cfgimpl_get_description()._cache_paths[0]):
|
||||
path = config.cfgimpl_get_description()._cache_paths[1][index]
|
||||
self.populate_file(path, option)
|
||||
self.populate_requires(option, path)
|
||||
if self.file_path is None:
|
||||
if open_error is not None:
|
||||
print "le fichier {0} non présent dans un dictionnaire a un problème : {1}".format(basename(self.template_name),
|
||||
open_error)
|
||||
else:
|
||||
print " \\-- fichier non présent dans un dictionnaire {0}".format(self.template_name)
|
||||
return
|
||||
if open_error is not None:
|
||||
raise Exception(open_error)
|
||||
|
||||
if not already_load:
|
||||
print " \\--", self.template_name
|
||||
self.test_all_values()
|
||||
if not self.is_tmpl:
|
||||
print "pas de templating !"
|
||||
|
||||
|
||||
def populate_mandatories():
|
||||
for path in config.cfgimpl_get_values().mandatory_warnings(config):
|
||||
if path.startswith('creole.'):
|
||||
option = config.cfgimpl_get_description().impl_get_opt_by_path(path)
|
||||
try:
|
||||
populate_mandatory(config, option, path)
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
|
||||
|
||||
def parse_templates(templates_name):
|
||||
global config, cl_chunks, cl_vars, extra_vars
|
||||
config = creole_loader(load_values=False, load_extra=True)
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
cfg = config
|
||||
for template_name in templates_name:
|
||||
cl_chunks = set()
|
||||
cl_vars = set()
|
||||
extra_vars = {}
|
||||
config = cfg.duplicate()
|
||||
config.read_write()
|
||||
populate_mandatories()
|
||||
ctmpl = Check_Template(template_name)
|
||||
try:
|
||||
ctmpl.check_template()
|
||||
except Exception, err:
|
||||
if DEBUG:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print_red(str(err))
|
||||
sys.exit(1)
|
||||
|
|
@ -0,0 +1,125 @@
|
|||
"""
|
||||
Implements terminal reporting of the full validation process.
|
||||
|
||||
Implements the various reporting hooks.
|
||||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("terminal reporting", after="general")
|
||||
group._addoption('-v', '--verbose', action="count",
|
||||
dest="verbose", default=0, help="increase verbosity."),
|
||||
group.addoption('--report',
|
||||
action="store", dest="report", default=None, metavar="opts",
|
||||
help="comma separated options, valid: skipped,xfailed")
|
||||
group._addoption('--fulltrace',
|
||||
action="store_true", dest="fulltrace", default=False,
|
||||
help="don't cut any tracebacks (default is to cut).")
|
||||
|
||||
group.addoption('--traceconfig',
|
||||
action="store_true", dest="traceconfig", default=False,
|
||||
help="trace considerations of conftest.py files."),
|
||||
|
||||
class TerminalReporter:
|
||||
def __init__(self, config, file=None):
|
||||
self.config = config
|
||||
self.stats = {}
|
||||
self.curdir = py.path.local()
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._tw = TerminalWriter(file)
|
||||
self.currentfspath = None
|
||||
self._reportopt = getreportopt(config.getvalue('report'))
|
||||
|
||||
def hasopt(self, name):
|
||||
return self._reportopt.get(name, False)
|
||||
|
||||
def write_fspath_result(self, fspath, res):
|
||||
fspath = self.curdir.bestrelpath(fspath)
|
||||
if fspath != self.currentfspath:
|
||||
self._tw.line()
|
||||
relpath = self.curdir.bestrelpath(fspath)
|
||||
self._tw.write(relpath + " ")
|
||||
self.currentfspath = fspath
|
||||
self._tw.write(res)
|
||||
|
||||
def write_ensure_prefix(self, prefix, extra="", **kwargs):
|
||||
if self.currentfspath != prefix:
|
||||
self._tw.line()
|
||||
self.currentfspath = prefix
|
||||
self._tw.write(prefix)
|
||||
if extra:
|
||||
self._tw.write(extra, **kwargs)
|
||||
self.currentfspath = -2
|
||||
|
||||
def ensure_newline(self):
|
||||
if self.currentfspath:
|
||||
self._tw.line()
|
||||
self.currentfspath = None
|
||||
|
||||
def write_line(self, line, **markup):
|
||||
line = str(line)
|
||||
self.ensure_newline()
|
||||
self._tw.line(line, **markup)
|
||||
|
||||
def write_sep(self, sep, title=None, **markup):
|
||||
self.ensure_newline()
|
||||
self._tw.sep(sep, title, **markup)
|
||||
|
||||
def getoutcomeword(self, rep):
|
||||
if rep.passed:
|
||||
return "PASS", dict(green=True)
|
||||
elif rep.failed:
|
||||
return "FAIL", dict(red=True)
|
||||
elif rep.skipped:
|
||||
return "SKIP"
|
||||
else:
|
||||
return "???", dict(red=True)
|
||||
|
||||
#
|
||||
# summaries for sessionfinish
|
||||
#
|
||||
|
||||
def summary_failures(self):
|
||||
if 'failed' in self.stats and self.config.option.tbstyle != "no":
|
||||
self.write_sep("=", "FAILURES")
|
||||
for rep in self.stats['failed']:
|
||||
msg = self._getfailureheadline(rep)
|
||||
self.write_sep("_", msg)
|
||||
self.write_platinfo(rep)
|
||||
rep.toterminal(self._tw)
|
||||
|
||||
def summary_errors(self):
|
||||
if 'error' in self.stats and self.config.option.tbstyle != "no":
|
||||
self.write_sep("=", "ERRORS")
|
||||
for rep in self.stats['error']:
|
||||
msg = self._getfailureheadline(rep)
|
||||
if not hasattr(rep, 'when'):
|
||||
# collect
|
||||
msg = "ERROR during collection " + msg
|
||||
elif rep.when == "setup":
|
||||
msg = "ERROR at setup of " + msg
|
||||
elif rep.when == "teardown":
|
||||
msg = "ERROR at teardown of " + msg
|
||||
self.write_sep("_", msg)
|
||||
self.write_platinfo(rep)
|
||||
rep.toterminal(self._tw)
|
||||
|
||||
def summary_stats(self):
|
||||
session_duration = py.std.time.time() - self._sessionstarttime
|
||||
|
||||
keys = "failed passed skipped deselected".split()
|
||||
for key in self.stats.keys():
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
parts = []
|
||||
for key in keys:
|
||||
val = self.stats.get(key, None)
|
||||
if val:
|
||||
parts.append("%d %s" %(len(val), key))
|
||||
line = ", ".join(parts)
|
||||
# XXX coloring
|
||||
self.write_sep("=", "%s in %.2f seconds" %(line, session_duration))
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
Helper functions for writing to terminals and files.
|
||||
XXX: Currently in progress, NOT IN WORKING STATE.
|
||||
|
||||
"""
|
||||
|
||||
import sys, os
|
||||
|
||||
def _getdimensions():
|
||||
import termios,fcntl,struct
|
||||
call = fcntl.ioctl(0,termios.TIOCGWINSZ,"\000"*8)
|
||||
height,width = struct.unpack( "hhhh", call ) [:2]
|
||||
return height, width
|
||||
|
||||
def get_terminal_width():
|
||||
try:
|
||||
height, width = _getdimensions()
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except:
|
||||
# FALLBACK
|
||||
width = int(os.environ.get('COLUMNS', 80))-1
|
||||
# XXX the windows getdimensions may be bogus, let's sanify a bit
|
||||
width = max(width, 40) # we alaways need 40 chars
|
||||
return width
|
||||
|
||||
terminal_width = get_terminal_width()
|
||||
|
||||
# XXX unify with _escaped func below
|
||||
def ansi_print(text, file=None, newline=True, flush=False):
|
||||
if file is None:
|
||||
file = sys.stderr
|
||||
text = text.strip()
|
||||
if newline:
|
||||
text += '\n'
|
||||
file.write(text)
|
||||
if flush:
|
||||
file.flush()
|
||||
if file:
|
||||
file.close()
|
||||
|
||||
def should_do_markup(file):
|
||||
return hasattr(file, 'isatty') and file.isatty() \
|
||||
and os.environ.get('TERM') != 'dumb'
|
||||
|
||||
class TerminalWriter(object):
|
||||
_esctable = dict(black=30, red=31, green=32, yellow=33,
|
||||
blue=34, purple=35, cyan=36, white=37,
|
||||
Black=40, Red=41, Green=42, Yellow=43,
|
||||
Blue=44, Purple=45, Cyan=46, White=47,
|
||||
bold=1, light=2, blink=5, invert=7)
|
||||
|
||||
def __init__(self, file=None, encoding=None):
|
||||
self.encoding = encoding
|
||||
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
|
||||
elif hasattr(file, '__call__'):
|
||||
file = WriteFile(file, encoding=encoding)
|
||||
self._file = file
|
||||
self.fullwidth = get_terminal_width()
|
||||
self.hasmarkup = should_do_markup(file)
|
||||
|
||||
def _escaped(self, text, esc):
|
||||
if esc and self.hasmarkup:
|
||||
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
|
||||
text +'\x1b[0m')
|
||||
return text
|
||||
|
||||
def markup(self, text, **kw):
|
||||
esc = []
|
||||
for name in kw:
|
||||
if name not in self._esctable:
|
||||
raise ValueError("unknown markup: %r" %(name,))
|
||||
if kw[name]:
|
||||
esc.append(self._esctable[name])
|
||||
return self._escaped(text, tuple(esc))
|
||||
|
||||
def sep(self, sepchar, title=None, fullwidth=None, **kw):
|
||||
if fullwidth is None:
|
||||
fullwidth = self.fullwidth
|
||||
# the goal is to have the line be as long as possible
|
||||
# under the condition that len(line) <= fullwidth
|
||||
if title is not None:
|
||||
# we want 2 + 2*len(fill) + len(title) <= fullwidth
|
||||
# i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
|
||||
# 2*len(sepchar)*N <= fullwidth - len(title) - 2
|
||||
# N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
|
||||
N = (fullwidth - len(title) - 2) // (2*len(sepchar))
|
||||
fill = sepchar * N
|
||||
line = "%s %s %s" % (fill, title, fill)
|
||||
else:
|
||||
# we want len(sepchar)*N <= fullwidth
|
||||
# i.e. N <= fullwidth // len(sepchar)
|
||||
line = sepchar * (fullwidth // len(sepchar))
|
||||
# in some situations there is room for an extra sepchar at the right,
|
||||
# in particular if we consider that with a sepchar like "_ " the
|
||||
# trailing space is not important at the end of the line
|
||||
if len(line) + len(sepchar.rstrip()) <= fullwidth:
|
||||
line += sepchar.rstrip()
|
||||
|
||||
self.line(line, **kw)
|
||||
|
||||
def write(self, s, **kw):
|
||||
if s:
|
||||
s = self._getbytestring(s)
|
||||
if self.hasmarkup and kw:
|
||||
s = self.markup(s, **kw)
|
||||
self._file.write(s)
|
||||
self._file.flush()
|
||||
|
||||
def _getbytestring(self, s):
|
||||
# XXX review this and the whole logic
|
||||
if self.encoding and sys.version_info < (3,0) and isinstance(s, unicode):
|
||||
return s.encode(self.encoding)
|
||||
elif not isinstance(s, str):
|
||||
return str(s)
|
||||
return s
|
||||
|
||||
def line(self, s='', **kw):
|
||||
self.write(s, **kw)
|
||||
self.write('\n')
|
||||
|
||||
class WriteFile(object):
|
||||
def __init__(self, writemethod, encoding=None):
|
||||
self.encoding = encoding
|
||||
self._writemethod = writemethod
|
||||
|
||||
def write(self, data):
|
||||
if self.encoding:
|
||||
data = data.encode(self.encoding)
|
||||
self._writemethod(data)
|
||||
|
||||
def flush(self):
|
||||
return
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
from creole.lint import warnsymb
|
||||
|
||||
|
||||
class Warn:
|
||||
|
||||
def __init__(self, write_level, itemname, warnno, comment, checks):
|
||||
self.warnno = warnno
|
||||
self.comment = comment
|
||||
self.checks = checks
|
||||
self.write_level = getattr(warnsymb, write_level)
|
||||
|
||||
def to_dict(self):
|
||||
"""
|
||||
formats a msg warn directly from a warning message
|
||||
"""
|
||||
dico_loc = {}
|
||||
for var in self.checks:
|
||||
if hasattr(var, 'location'):
|
||||
locs = var.location
|
||||
for vfile, vline in locs:
|
||||
if vfile == 'dictionnaire':
|
||||
raise Exception('vfile ne doit pas se nommer dictionnaire !!!')
|
||||
if not dico_loc.has_key(vfile):
|
||||
dico_loc[vfile] = []
|
||||
dico_loc[vfile].append((vline, var))
|
||||
else:
|
||||
if not dico_loc.has_key('dictionnaire'):
|
||||
dico_loc['dictionnaire'] = []
|
||||
dico_loc['dictionnaire'].append((None, var))
|
||||
# ret = ["[%s:%s:%s] %s : %s (dictionnaire)" %(level, name, self.itemname, self.comment, vname)]
|
||||
return dico_loc
|
|
@ -0,0 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Standard errno symbols
|
||||
"""
|
||||
|
||||
"""Dictionary providing a mapping from the errno value to the string
|
||||
name in the underlying waring. For instance,
|
||||
errno.errorcode[errno.EPERM] maps to 'EPERM'."""
|
||||
|
||||
errorlevel = {
|
||||
1: 'error',
|
||||
2: 'warning',
|
||||
3: 'info',
|
||||
}
|
||||
|
||||
errorcode = {
|
||||
1: ('ERROR', 1),
|
||||
2: ('WARNING', 2),
|
||||
3: ('INFO', 3),
|
||||
4: ('NAME', 1),
|
||||
5: ('NAME', 2),
|
||||
6: ('NAME', 3),
|
||||
7: ('UNUSED', 1),
|
||||
8: ('UNUSED', 2),
|
||||
9: ('UNUSED', 3),
|
||||
}
|
||||
|
||||
globs = globals()
|
||||
|
||||
for key, value in errorlevel.items():
|
||||
globs[value] = key
|
||||
|
||||
|
|
@ -0,0 +1,873 @@
|
|||
"""creole loader
|
||||
flattened XML specific
|
||||
"""
|
||||
from os.path import join, isfile, isdir
|
||||
from os import listdir
|
||||
#from ast import literal_eval
|
||||
from lxml.etree import parse, DTD
|
||||
|
||||
from tiramisu.option import (UnicodeOption, OptionDescription, PortOption,
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption,
|
||||
NetworkOption, NetmaskOption, DomainnameOption, BroadcastOption,
|
||||
URLOption, EmailOption, FilenameOption, UsernameOption, DateOption,
|
||||
PasswordOption, BoolOption, Leadership)
|
||||
from tiramisu import Config, MetaConfig, MixConfig
|
||||
from tiramisu.setting import groups
|
||||
from tiramisu.error import ConfigError
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu import Params, ParamOption, ParamValue, ParamContext
|
||||
|
||||
from .config import (FLATTENED_CREOLE_DIR, dtdfilename, eoledirs, eoleextradico, forbiddenextra,
|
||||
configeol, eoleextraconfig)
|
||||
from .i18n import _
|
||||
from .var_loader import convert_tiramisu_value, modes_level, MACOption # FIXME YO
|
||||
from .loader1 import load_config_eol, load_extras, _list_extras
|
||||
#For compatibility
|
||||
from .loader1 import config_save_values, config_load_store, config_get_values, add_eol_version
|
||||
from .loader1 import load_store, load_config_store, load_values
|
||||
from .xmlreflector import HIGH_COMPATIBILITY
|
||||
#from . import eosfunc
|
||||
from .objspace import CreoleObjSpace
|
||||
import imp
|
||||
|
||||
|
||||
class CreoleLoaderError(Exception):
|
||||
pass
|
||||
|
||||
CONVERT_OPTION = {'number': dict(opttype=IntOption),
|
||||
'choice': dict(opttype=ChoiceOption),
|
||||
'string': dict(opttype=UnicodeOption),
|
||||
'password': dict(opttype=PasswordOption),
|
||||
'mail': dict(opttype=EmailOption),
|
||||
'boolean': dict(opttype=BoolOption),
|
||||
'symlink': dict(opttype=SymLinkOption),
|
||||
'filename': dict(opttype=FilenameOption),
|
||||
'date': dict(opttype=DateOption),
|
||||
'unix_user': dict(opttype=UsernameOption),
|
||||
'ip': dict(opttype=IPOption, initkwargs={'allow_reserved': True}),
|
||||
'local_ip': dict(opttype=IPOption, initkwargs={'private_only': True, 'warnings_only': True}),
|
||||
'netmask': dict(opttype=NetmaskOption),
|
||||
'network': dict(opttype=NetworkOption),
|
||||
'broadcast': dict(opttype=BroadcastOption),
|
||||
'netbios': dict(opttype=DomainnameOption, initkwargs={'type_': 'netbios', 'warnings_only': True}),
|
||||
'domain': dict(opttype=DomainnameOption, initkwargs={'type_': 'domainname', 'allow_ip': True, 'allow_without_dot': True}),
|
||||
'domain_strict': dict(opttype=DomainnameOption, initkwargs={'type_': 'domainname', 'allow_ip': False}),
|
||||
'hostname': dict(opttype=DomainnameOption, initkwargs={'type_': 'hostname', 'allow_ip': True}),
|
||||
'hostname_strict': dict(opttype=DomainnameOption, initkwargs={'type_': 'hostname', 'allow_ip': False}),
|
||||
'web_address': dict(opttype=URLOption, initkwargs={'allow_ip': True, 'allow_without_dot': True}),
|
||||
'port': dict(opttype=PortOption, initkwargs={'allow_private': True}),
|
||||
'mac': dict(opttype=MACOption) # FIXME YO
|
||||
}
|
||||
|
||||
|
||||
REMOVED_ATTRIB = ['path', 'type']
|
||||
|
||||
|
||||
class Elt(object):
|
||||
def __init__(self, attrib):
|
||||
self.attrib = attrib
|
||||
|
||||
|
||||
class PopulateTiramisuObjects(object):
|
||||
def __init__(self):
|
||||
self.storage = ElementStorage()
|
||||
self.booleans = []
|
||||
self.force_store_values = set()
|
||||
self.separators = {}
|
||||
self.groups = {}
|
||||
|
||||
def parse_dtd(self, dtdfilename):
|
||||
"""Loads the Creole DTD
|
||||
|
||||
:raises IOError: if the DTD is not found
|
||||
|
||||
:param dtdfilename: the full filename of the Creole DTD
|
||||
"""
|
||||
if not isfile(dtdfilename):
|
||||
raise IOError(_("no such DTD file: {}").format(dtdfilename))
|
||||
with open(dtdfilename, 'r') as dtdfd:
|
||||
dtd = DTD(dtdfd)
|
||||
for elt in dtd.iterelements():
|
||||
if elt.name == 'variable':
|
||||
for attr in elt.iterattributes():
|
||||
if set(attr.itervalues()) == set(['True', 'False']):
|
||||
self.booleans.append(attr.name)
|
||||
|
||||
def make_tiramisu_objects(self, xmlroot, creolefunc_file, load_extra=True):
|
||||
elt = Elt({'name': 'baseoption'})
|
||||
family = Family(elt, self.booleans)
|
||||
self.storage.add('.', family)
|
||||
self.eosfunc = imp.load_source('eosfunc', creolefunc_file)
|
||||
|
||||
elts = {}
|
||||
for elt in xmlroot:
|
||||
elts.setdefault(elt.tag, []).append(elt)
|
||||
list_elts = list(elts.keys())
|
||||
if 'family' in list_elts:
|
||||
list_elts.remove('family')
|
||||
list_elts.insert(0, 'family')
|
||||
for elt in list_elts:
|
||||
xmlelts_ = elts[elt]
|
||||
if elt == 'family':
|
||||
xmlelts = []
|
||||
actions = None
|
||||
# `creole` family has to be loaded before any other family
|
||||
# because `extra` family could use `creole` variables.
|
||||
# `actions` family has to be loaded at the very end
|
||||
# because it may use `creole` or `extra` variables
|
||||
for xml in xmlelts_:
|
||||
if not load_extra and xml.attrib['name'] not in ['creole', 'containers']:
|
||||
continue
|
||||
if xml.attrib['name'] == 'creole':
|
||||
xmlelts.insert(0, xml)
|
||||
elif xml.attrib['name'] == 'actions':
|
||||
actions = xml
|
||||
else:
|
||||
xmlelts.append(xml)
|
||||
if actions is not None:
|
||||
xmlelts.append(actions)
|
||||
else:
|
||||
xmlelts = xmlelts_
|
||||
for xmlelt in xmlelts:
|
||||
if xmlelt.tag == 'family':
|
||||
self._iter_family(xmlelt, family=family)
|
||||
elif xmlelt.tag == 'help':
|
||||
self._iter_help(xmlelt)
|
||||
elif xmlelt.tag == 'constraints':
|
||||
self._iter_constraints(xmlelt, load_extra)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown tag {}').format(xmlelt.tag))
|
||||
|
||||
def _populate_variable(self, elt, subpath, is_slave, is_master):
|
||||
variable = Variable(elt, self.booleans, self.storage, is_slave, is_master, self.eosfunc)
|
||||
path = self._build_path(subpath, elt)
|
||||
properties = variable.attrib.get('properties', [])
|
||||
if 'force_store_value' in properties or "auto_freeze" in properties:
|
||||
self.force_store_values.add(path)
|
||||
self.storage.add(path, variable)
|
||||
return variable
|
||||
|
||||
def _populate_family(self, elt, subpath):
|
||||
if subpath is None:
|
||||
force_icon = False
|
||||
else:
|
||||
force_icon = not subpath.startswith('containers') and not subpath.startswith('actions')
|
||||
family = Family(elt, self.booleans, force_icon)
|
||||
path = self._build_path(subpath, elt)
|
||||
self.storage.add(path, family)
|
||||
return family
|
||||
|
||||
def _build_path(self, subpath, elt):
|
||||
if subpath is None:
|
||||
subpath = elt.attrib['name']
|
||||
else:
|
||||
subpath += '.' + elt.attrib['name']
|
||||
return subpath
|
||||
|
||||
def _iter_constraints(self, xmlelt, load_extra):
|
||||
for elt in xmlelt:
|
||||
if elt.tag == 'fill':
|
||||
self._parse_fill(elt, load_extra)
|
||||
elif elt.tag == 'condition':
|
||||
self._parse_condition(elt, load_extra)
|
||||
elif elt.tag == 'check':
|
||||
self._parse_check(elt, load_extra)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown constraint {}').format(elt.tag))
|
||||
|
||||
def _check_extra(self, variable, load_extra):
|
||||
if load_extra:
|
||||
return True
|
||||
return variable.startswith('creole.') or variable.startswith('containers.')
|
||||
|
||||
|
||||
def _parse_fill(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['target'], load_extra):
|
||||
return
|
||||
callback = getattr(self.eosfunc, elt.attrib['name'])
|
||||
callback_params = {}
|
||||
for param in elt:
|
||||
name = param.attrib.get('name', '')
|
||||
if param.attrib['type'] == 'string':
|
||||
value = str(param.text)
|
||||
elif param.attrib['type'] == 'eole':
|
||||
hidden = param.attrib['hidden']
|
||||
if hidden == 'True':
|
||||
hidden = False
|
||||
elif hidden == 'False':
|
||||
hidden = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown hidden boolean {}').format(hidden))
|
||||
if not self._check_extra(param.text, load_extra):
|
||||
return
|
||||
value = [self.storage.get(param.text), hidden]
|
||||
elif param.attrib['type'] == 'number':
|
||||
value = int(param.text)
|
||||
elif param.attrib['type'] == 'context':
|
||||
value = (None,)
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown param type {} in fill to {}').format(param.attrib['type'], elt.attrib['target']))
|
||||
callback_params.setdefault(name, []).append(value)
|
||||
if callback_params == {}:
|
||||
callback_params = None
|
||||
self.storage.add_callback(elt.attrib['target'], callback, callback_params)
|
||||
|
||||
def _parse_check(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['target'], load_extra):
|
||||
return
|
||||
all_param_eole = True
|
||||
for param in elt:
|
||||
if param.attrib.get('type') != 'eole':
|
||||
all_param_eole = False
|
||||
break
|
||||
if elt.attrib['name'] == 'valid_enum':
|
||||
# only for valid_enum with checkval to True
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_enum cannot have more than one param for {}').format(elt.attrib['target']))
|
||||
if elt.attrib['probe'] == 'True':
|
||||
proposed = elt[0].text
|
||||
type_ = 'string'
|
||||
elif elt[0].attrib['type'] == 'eole':
|
||||
proposed = elt[0].text
|
||||
type_ = 'eole'
|
||||
else:
|
||||
#proposed_value = literal_eval(elt[0].text)
|
||||
proposed_value = eval(elt[0].text)
|
||||
proposed = tuple(proposed_value)
|
||||
type_ = 'string'
|
||||
self.storage.add_information(elt.attrib['target'], 'proposed_value', {'value': proposed, 'type': type_})
|
||||
|
||||
validator = getattr(self.eosfunc, elt.attrib['name'])
|
||||
elif elt.attrib['name'] == 'valid_differ' and all_param_eole:
|
||||
if (HIGH_COMPATIBILITY and len(elt) not in [0, 1]) or (not HIGH_COMPATIBILITY and len(elt) != 1):
|
||||
raise CreoleLoaderError(_('valid_differ length should be 1'))
|
||||
if HIGH_COMPATIBILITY and len(elt) == 1:
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
else:
|
||||
variables = []
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'not_equal',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_networknetmask':
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_networknetmask length should be 1'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_networknetmask must have only eole variable'))
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'network_netmask',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_ipnetmask':
|
||||
if len(elt) != 1:
|
||||
raise CreoleLoaderError(_('valid_ipnetmask length should be 1'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_ipnetmask must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'ip_netmask',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_broadcast':
|
||||
if len(elt) != 2:
|
||||
raise CreoleLoaderError(_('valid_broadcast length should be 2'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_broadcast must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
if not self._check_extra(elt[1].text, load_extra):
|
||||
return
|
||||
variables.append(self.storage.get(elt[1].text))
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'broadcast',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
elif elt.attrib['name'] == 'valid_in_network':
|
||||
if len(elt) != 2:
|
||||
raise CreoleLoaderError(_('valid_in_network length should be 2'))
|
||||
if not all_param_eole:
|
||||
raise CreoleLoaderError(_('valid_in_network must have only eole variable'))
|
||||
if not self._check_extra(elt[0].text, load_extra):
|
||||
return
|
||||
variables = [self.storage.get(elt[0].text)]
|
||||
if not self._check_extra(elt[1].text, load_extra):
|
||||
return
|
||||
variables.append(self.storage.get(elt[1].text))
|
||||
self.storage.add_consistency(elt.attrib['target'],
|
||||
'in_network',
|
||||
variables,
|
||||
elt.attrib['warnings_only'],
|
||||
elt.attrib['transitive'])
|
||||
else:
|
||||
validator = getattr(self.eosfunc, elt.attrib['name'])
|
||||
validator_params = {}
|
||||
for param in elt:
|
||||
text = param.text
|
||||
if param.attrib['type'] == 'eole':
|
||||
hidden = param.attrib.get('hidden', 'True')
|
||||
if hidden == 'True':
|
||||
hidden = False
|
||||
elif hidden == 'False':
|
||||
hidden = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown hidden boolean {}').format(hidden))
|
||||
if not self._check_extra(text, load_extra):
|
||||
return
|
||||
text = [self.storage.get(text), hidden]
|
||||
validator_params.setdefault(param.attrib.get('name', ''), []).append(text)
|
||||
self.storage.add_validator(elt.attrib['target'], validator, validator_params)
|
||||
|
||||
def _parse_condition(self, elt, load_extra):
|
||||
if not self._check_extra(elt.attrib['source'], load_extra):
|
||||
return
|
||||
if elt.attrib['name'] == 'disabled_if_in':
|
||||
actions = ['disabled']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'disabled_if_not_in':
|
||||
actions = ['disabled']
|
||||
inverse = True
|
||||
elif elt.attrib['name'] == 'auto_frozen_if_in':
|
||||
actions = ['frozen']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'frozen_if_in':
|
||||
actions = ['frozen', 'hidden', 'force_default_on_freeze']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'frozen_if_not_in':
|
||||
actions = ['frozen', 'hidden', 'force_default_on_freeze']
|
||||
inverse = True
|
||||
elif elt.attrib['name'] == 'mandatory_if_in':
|
||||
actions = ['mandatory']
|
||||
inverse = False
|
||||
elif elt.attrib['name'] == 'mandatory_if_not_in':
|
||||
actions = ['mandatory']
|
||||
inverse = True
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown condition type {} for {}').format(elt.attrib['name'], elt.attrib['source']))
|
||||
expected_values = []
|
||||
options = []
|
||||
for param in elt:
|
||||
if param.tag == 'param':
|
||||
expected_values.append(param.text)
|
||||
elif param.tag == 'target':
|
||||
if param.attrib['type'] in ['variable', 'family']:
|
||||
if not self._check_extra(param.text, load_extra):
|
||||
return
|
||||
option = self.storage.get(param.text)
|
||||
option_actions = actions
|
||||
if 'force_store_value' in option.attrib.get('properties', []) and \
|
||||
'force_default_on_freeze' in option_actions:
|
||||
option_actions.remove('force_default_on_freeze')
|
||||
options.append((param.text, option_actions))
|
||||
source = self.storage.get(elt.attrib['source'])
|
||||
for option, actions in options:
|
||||
conditions = []
|
||||
for action in actions:
|
||||
for expected in expected_values:
|
||||
conditions.append({'option': source, 'expected': expected,
|
||||
'action': action, 'inverse': inverse})
|
||||
self.storage.add_requires(option, conditions)
|
||||
|
||||
def _iter_help(self, xmlelt):
|
||||
for elt in xmlelt:
|
||||
self.storage.add_help(elt.attrib['name'], elt.text)
|
||||
|
||||
def _iter_master(self, master, subpath):
|
||||
subpath = self._build_path(subpath, master)
|
||||
family = Family(master, self.booleans)
|
||||
family.set_master()
|
||||
self.storage.add(subpath, family)
|
||||
master_name = None
|
||||
for var in master:
|
||||
if master_name is None:
|
||||
master_name = var.attrib['name']
|
||||
self.groups[master_name] = []
|
||||
else:
|
||||
self.groups[master_name].append(var.attrib['name'])
|
||||
self._iter_family(var, subpath=subpath, family=family)
|
||||
return family
|
||||
|
||||
def _iter_family(self, child, subpath=None, family=None):
|
||||
if child.tag not in ['family', 'variable', 'separators', 'master']:
|
||||
raise CreoleLoaderError(_('unknown tag {}').format(child.tag))
|
||||
if child.tag == 'family':
|
||||
old_family = family
|
||||
family = self._populate_family(child, subpath)
|
||||
if old_family is not None:
|
||||
old_family.add(family)
|
||||
if child.tag == 'master':
|
||||
master = self._iter_master(child, subpath)
|
||||
family.add(master)
|
||||
elif child.tag == 'separators':
|
||||
self._parse_separators(child)
|
||||
elif child.tag == 'variable':
|
||||
if family is None:
|
||||
raise CreoleLoaderError(_('variable without family'))
|
||||
|
||||
is_slave = False
|
||||
is_master = False
|
||||
if family.is_master:
|
||||
if child.attrib['name'] != family.attrib['name']:
|
||||
is_slave = True
|
||||
else:
|
||||
is_master = True
|
||||
variable = self._populate_variable(child, subpath, is_slave, is_master)
|
||||
family.add(variable)
|
||||
elif len(child) != 0:
|
||||
subpath = self._build_path(subpath, child)
|
||||
for c in child:
|
||||
self._iter_family(c, subpath, family)
|
||||
|
||||
def _parse_separators(self, separators):
|
||||
for separator in separators:
|
||||
elt = self.storage.get(separator.attrib['name'])
|
||||
never_hidden = separator.attrib.get('never_hidden')
|
||||
if never_hidden == 'True':
|
||||
never_hidden = True
|
||||
else:
|
||||
never_hidden = None
|
||||
info = (separator.text, never_hidden)
|
||||
self.separators[separator.attrib['name']] = info
|
||||
elt.add_information('separator', info)
|
||||
|
||||
def build(self, persistent=False, session_id=None, meta_config=False):
|
||||
if meta_config:
|
||||
optiondescription = self.storage.paths['.'].get()
|
||||
config = MetaConfig([],
|
||||
optiondescription=optiondescription,
|
||||
persistent=persistent,
|
||||
session_id=session_id)
|
||||
mixconfig = MixConfig(children=[],
|
||||
optiondescription=optiondescription,
|
||||
persistent=persistent,
|
||||
session_id='m_' + session_id)
|
||||
config.config.add(mixconfig)
|
||||
else:
|
||||
config = Config(self.storage.paths['.'].get(),
|
||||
persistent=persistent,
|
||||
session_id=session_id)
|
||||
config.information.set('force_store_vars', self.force_store_values)
|
||||
config.information.set('force_store_values', list(self.force_store_values))
|
||||
# XXX really usefull?
|
||||
ro_append = frozenset(config.property.getdefault('read_only', 'append') - {'force_store_value'})
|
||||
rw_append = frozenset(config.property.getdefault('read_write', 'append') - {'force_store_value'})
|
||||
config.property.setdefault(ro_append, 'read_only', 'append')
|
||||
config.property.setdefault(rw_append, 'read_write', 'append')
|
||||
|
||||
config.property.read_only()
|
||||
_modes = list(modes_level)
|
||||
_modes.append('hidden')
|
||||
config.permissive.set(frozenset(_modes))
|
||||
return config
|
||||
|
||||
|
||||
class ElementStorage(object):
|
||||
def __init__(self):
|
||||
self.paths = {}
|
||||
|
||||
def add(self, path, elt):
|
||||
if path in self.paths:
|
||||
raise CreoleLoaderError(_('path already loaded {}').format(path))
|
||||
self.paths[path] = elt
|
||||
|
||||
def add_help(self, path, text):
|
||||
elt = self.get(path)
|
||||
self.paths[path].add_information('help', text)
|
||||
|
||||
def add_callback(self, path, callback, callback_params):
|
||||
elt = self.get(path)
|
||||
elt.add_callback(callback, callback_params)
|
||||
|
||||
def add_information(self, path, name, information):
|
||||
elt = self.get(path)
|
||||
elt.add_information(name, information)
|
||||
|
||||
def add_validator(self, path, validator, validator_params):
|
||||
elt = self.get(path)
|
||||
elt.add_validator(validator, validator_params)
|
||||
|
||||
def add_consistency(self, path, consistence, variables, warnings_only, transitive):
|
||||
elt = self.get(path)
|
||||
elt.add_consistency(consistence, variables, warnings_only, transitive)
|
||||
|
||||
def add_requires(self, path, requires):
|
||||
elt = self.get(path)
|
||||
elt.add_requires(requires)
|
||||
|
||||
def get(self, path):
|
||||
if path not in self.paths:
|
||||
raise CreoleLoaderError(_('there is no element for path {}').format(path))
|
||||
return self.paths[path]
|
||||
|
||||
|
||||
class Variable(object):
|
||||
def __init__(self, elt, booleans, storage, is_slave, is_master, eosfunc):
|
||||
self.option = None
|
||||
self.informations = {}
|
||||
self.attrib = {}
|
||||
self.callbacks = []
|
||||
self.requires = []
|
||||
self.validator = None
|
||||
self.consistencies = []
|
||||
self.attrib['properties'] = []
|
||||
self.eosfunc = eosfunc
|
||||
for key, value in elt.attrib.items():
|
||||
if key in REMOVED_ATTRIB:
|
||||
continue
|
||||
#if key != 'name':
|
||||
# value = unicode(value)
|
||||
|
||||
if key in booleans:
|
||||
if value == 'True':
|
||||
value = True
|
||||
elif value == 'False':
|
||||
value = False
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown value {} for {}').format(value, key))
|
||||
self.attrib[key] = value
|
||||
convert_option = CONVERT_OPTION[elt.attrib['type']]
|
||||
self.object_type = convert_option['opttype']
|
||||
if elt.attrib['type'] == 'choice':
|
||||
if self.attrib.get('choice'):
|
||||
self.attrib['values'] = getattr(self.eosfunc, self.attrib.get('choice'))
|
||||
else:
|
||||
self.attrib['values'] = []
|
||||
for child in elt:
|
||||
if child.tag == 'choice':
|
||||
value = child.text
|
||||
if 'type' in child.attrib and child.attrib['type'] == 'number':
|
||||
value = int(value)
|
||||
if value is None:
|
||||
value = u''
|
||||
self.attrib['values'].append(value)
|
||||
self.attrib['values'] = tuple(self.attrib['values'])
|
||||
for child in elt:
|
||||
if "type" in child.attrib:
|
||||
type_ = CONVERT_OPTION[child.attrib['type']]['opttype']
|
||||
else:
|
||||
type_ = self.object_type
|
||||
if child.tag == 'property':
|
||||
self.attrib['properties'].append(child.text)
|
||||
elif child.tag == 'value':
|
||||
if self.attrib['multi'] and not is_slave:
|
||||
if 'default' not in self.attrib:
|
||||
self.attrib['default'] = []
|
||||
value = convert_tiramisu_value(child.text, type_)
|
||||
self.attrib['default'].append(value)
|
||||
if 'default_multi' not in self.attrib and not is_master:
|
||||
self.attrib['default_multi'] = value
|
||||
else:
|
||||
if 'default' in self.attrib:
|
||||
raise CreoleLoaderError(_('default value already set for {}'
|
||||
'').format(self.attrib['path']))
|
||||
value = convert_tiramisu_value(child.text, type_)
|
||||
if value is None: # and (elt.attrib['type'] != 'choice' or value not in self.attrib['values']):
|
||||
value = u''
|
||||
if is_slave:
|
||||
self.attrib['default_multi'] = value
|
||||
else:
|
||||
self.attrib['default'] = value
|
||||
if 'initkwargs' in convert_option:
|
||||
self.attrib.update(convert_option['initkwargs'])
|
||||
self.attrib['properties'] = tuple(self.attrib['properties'])
|
||||
if elt.attrib['type'] == 'symlink':
|
||||
del self.attrib['properties']
|
||||
del self.attrib['multi']
|
||||
self.attrib['opt'] = storage.get(self.attrib['opt'])
|
||||
|
||||
def add_information(self, key, value):
|
||||
if key in self.informations:
|
||||
raise CreoleLoaderError(_('key already exists in information {}').format(key))
|
||||
self.informations[key] = value
|
||||
|
||||
def add_callback(self, callback, callback_params):
|
||||
self.callbacks.append((callback, callback_params))
|
||||
|
||||
def add_requires(self, requires):
|
||||
self.requires.extend(requires)
|
||||
|
||||
def add_validator(self, validator, validator_params):
|
||||
self.validator = (validator, validator_params)
|
||||
|
||||
def add_consistency(self, consistence, variables, warnings_only, transitive):
|
||||
self.consistencies.append((consistence, variables, warnings_only, transitive))
|
||||
|
||||
def build_params(self, params):
|
||||
if params != None:
|
||||
new_params = Params()
|
||||
for key, values in params.items():
|
||||
new_values = []
|
||||
for value in values:
|
||||
if isinstance(value, list):
|
||||
# retrieve object
|
||||
value = ParamOption(value[0].get(), value[1])
|
||||
elif value == (None,):
|
||||
value = ParamContext()
|
||||
else:
|
||||
value = ParamValue(value)
|
||||
if key == '':
|
||||
args = list(new_params.args)
|
||||
args.append(value)
|
||||
new_params.args = tuple(args)
|
||||
else:
|
||||
new_params.kwargs[key] = value
|
||||
return new_params
|
||||
return params
|
||||
|
||||
def get(self):
|
||||
if self.option is None:
|
||||
if self.object_type is SymLinkOption:
|
||||
self.attrib['opt'] = self.attrib['opt'].get()
|
||||
for callback, callback_params in self.callbacks:
|
||||
self.attrib['callback'] = callback
|
||||
self.attrib['callback_params'] = self.build_params(callback_params)
|
||||
for require in self.requires:
|
||||
if isinstance(require['option'], Variable):
|
||||
require['option'] = require['option'].get()
|
||||
if self.requires != []:
|
||||
self.attrib['requires'] = self.requires
|
||||
if self.validator:
|
||||
self.attrib['validator'] = self.validator[0]
|
||||
self.attrib['validator_params'] = self.build_params(self.validator[1])
|
||||
try:
|
||||
option = self.object_type(**self.attrib)
|
||||
except Exception as err:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
name = self.attrib['name']
|
||||
raise CreoleLoaderError(_('cannot create option {}: {}').format(name, err))
|
||||
for key, value in self.informations.items():
|
||||
option.impl_set_information(key, value)
|
||||
for consistency in self.consistencies:
|
||||
options = []
|
||||
for variable in consistency[1]:
|
||||
options.append(variable.get())
|
||||
try:
|
||||
kwargs = {}
|
||||
if consistency[2] == 'True':
|
||||
kwargs['warnings_only'] = True
|
||||
if consistency[3] == 'False':
|
||||
kwargs['transitive'] = False
|
||||
option.impl_add_consistency(consistency[0], *options, **kwargs)
|
||||
except ConfigError as err:
|
||||
name = self.attrib['name']
|
||||
raise CreoleLoaderError(_('cannot load consistency for {}: {}').format(name, err))
|
||||
self.option = option
|
||||
return self.option
|
||||
|
||||
|
||||
class Family(object):
|
||||
def __init__(self, elt, booleans, force_icon=False):
|
||||
self.requires = []
|
||||
self.option = None
|
||||
self.attrib = {}
|
||||
self.is_master = False
|
||||
if force_icon:
|
||||
self.informations = {'icon': None}
|
||||
else:
|
||||
self.informations = {}
|
||||
self.children = []
|
||||
self.attrib['properties'] = []
|
||||
for key, value in elt.attrib.items():
|
||||
if key in REMOVED_ATTRIB:
|
||||
continue
|
||||
if key in booleans:
|
||||
if value == 'True':
|
||||
value = True
|
||||
elif value == 'False':
|
||||
value = False
|
||||
else:
|
||||
raise CreoleLoaderError(_('unknown value {} for {}').format(value, key))
|
||||
if key == 'icon':
|
||||
self.add_information('icon', value)
|
||||
continue
|
||||
elif key == 'hidden':
|
||||
if value:
|
||||
self.attrib['properties'].append(key)
|
||||
elif key == 'mode':
|
||||
self.attrib['properties'].append(value)
|
||||
else:
|
||||
self.attrib[key] = value
|
||||
if 'doc' not in self.attrib:
|
||||
self.attrib['doc'] = u''
|
||||
self.attrib['properties'] = tuple(self.attrib['properties'])
|
||||
|
||||
def add(self, child):
|
||||
self.children.append(child)
|
||||
|
||||
def add_information(self, key, value):
|
||||
if key in self.informations and not (key == 'icon' and self.informations[key] is None):
|
||||
raise CreoleLoaderError(_('key already exists in information {}').format(key))
|
||||
self.informations[key] = value
|
||||
|
||||
def set_master(self):
|
||||
self.is_master = True
|
||||
|
||||
def add_requires(self, requires):
|
||||
self.requires.extend(requires)
|
||||
|
||||
def get(self):
|
||||
if self.option is None:
|
||||
self.attrib['children'] = []
|
||||
for child in self.children:
|
||||
self.attrib['children'].append(child.get())
|
||||
for require in self.requires:
|
||||
if isinstance(require['option'], Variable):
|
||||
require['option'] = require['option'].get()
|
||||
if self.requires != []:
|
||||
self.attrib['requires'] = self.requires
|
||||
try:
|
||||
if not self.is_master:
|
||||
option = OptionDescription(**self.attrib)
|
||||
else:
|
||||
option = Leadership(**self.attrib)
|
||||
#option = OptionDescription(**self.attrib)
|
||||
except Exception as err:
|
||||
raise CreoleLoaderError(_('cannot create optiondescription {}: {}').format(self.attrib['name'], err))
|
||||
for key, value in self.informations.items():
|
||||
option.impl_set_information(key, value)
|
||||
self.option = option
|
||||
#if self.is_master:
|
||||
# self.option.impl_set_group_type(groups.master)
|
||||
|
||||
return self.option
|
||||
|
||||
|
||||
def _gen_eol_file(namespace):
|
||||
if namespace == 'creole':
|
||||
return configeol
|
||||
else:
|
||||
return join(eoleextraconfig, namespace, 'config.eol')
|
||||
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None,
|
||||
force_dtdfile=None, force_flattened=None,
|
||||
mandatory_permissive=True, from_zephir=None,
|
||||
force_no_save=False, force_eoleextradico=None,
|
||||
force_eoleextraconfig=None, only_load_flattened=False):
|
||||
"""
|
||||
Loads the Creole XML dictionnary files and return a tiramisu config object
|
||||
|
||||
:param bool load_values: Loads (or not) the :file:`config.eol` file
|
||||
:param bool rw: Config's read/write flag
|
||||
:param str namespace: Root's namespace for the config (example: "creole", "bacula", ...)
|
||||
:param bool load_extra: Loads (or not) the extra dictionnaries (if `namespace='creole'`)
|
||||
:param bool reload_config: This parameter is kept for compatibility reasons
|
||||
:param str owner: forces the owner on a modified variable
|
||||
:param bool disable_mandatory: disables the mandatory variables
|
||||
:param str force_configeol: Forces the used configuration file
|
||||
:param bool try_upgrade: tries to upgrade
|
||||
:param force_load_creole_owner: Forces the owner for the loaded variables
|
||||
:param str force_dirs: Forces the folder's name containing the dictionnaries
|
||||
:param warnings: Shows the validation's warnings
|
||||
:param bool force_instanciate: tells us if the server is already instanciated or not
|
||||
:param force_dtdfile: None or dtd filename
|
||||
:param force_flattened: None or flatened filename's name
|
||||
:param only_load_flattened: boolean to desactivate generated of flattened file
|
||||
"""
|
||||
if namespace is not 'creole':
|
||||
raise CreoleLoaderError(_('Only creole namespace is supported'))
|
||||
#if reload_config is not True:
|
||||
# raise CreoleLoaderError(_('Cannot reload the configuration'))
|
||||
if force_flattened is None:
|
||||
force_flattened = join(FLATTENED_CREOLE_DIR, 'flattened_creole.xml')
|
||||
if force_dtdfile is None:
|
||||
force_dtdfile = dtdfilename
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise CreoleLoaderError(_("Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra and force_eoleextraconfig is None:
|
||||
# if force_configeol, cannot calculate extra configfile name
|
||||
raise CreoleLoaderError(_('Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and load_extra is True and force_eoleextradico is None:
|
||||
raise CreoleLoaderError(_('If force_dirs is defined, namespace must be set to creole and '
|
||||
'load_extra must be set to False.'))
|
||||
if not only_load_flattened:
|
||||
#should not load value now because create a Config
|
||||
eolobj = CreoleObjSpace(force_dtdfile)
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
else:
|
||||
dirs = eoledirs
|
||||
if from_zephir is not None and type(dirs) != list:
|
||||
#if dirs is not a list, add subdirectory 'local'
|
||||
#and 'variante'
|
||||
orig_dir = dirs
|
||||
dirs = [dirs]
|
||||
for tdir in [join(orig_dir, 'local'),
|
||||
join(orig_dir, 'variante')]:
|
||||
if isdir(tdir):
|
||||
dirs.append(tdir)
|
||||
eolobj.create_or_populate_from_xml('creole', dirs, from_zephir=from_zephir)
|
||||
|
||||
if load_extra:
|
||||
if force_eoleextradico == None:
|
||||
force_eoleextradico = eoleextradico
|
||||
extranames = _list_extras(force_eoleextradico)
|
||||
extranames.sort()
|
||||
if isdir(force_eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise CreoleLoaderError(
|
||||
_('Namespace {} for extra dictionary not allowed').format(directory))
|
||||
dirname = join(force_eoleextradico, directory)
|
||||
eolobj.create_or_populate_from_xml(directory, [dirname], from_zephir)
|
||||
eolobj.space_visitor()
|
||||
xmlroot = eolobj.save(force_flattened, force_no_save)
|
||||
else:
|
||||
with open(force_flattened, 'r') as fhd:
|
||||
xmlroot = parse(fhd).getroot()
|
||||
tiramisu_objects = PopulateTiramisuObjects()
|
||||
tiramisu_objects.parse_dtd(force_dtdfile)
|
||||
tiramisu_objects.make_tiramisu_objects(xmlroot)
|
||||
config = tiramisu_objects.build()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values, mandatory_permissive=mandatory_permissive,
|
||||
extradico=force_eoleextradico, force_eoleextraconfig=force_eoleextraconfig)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
if from_zephir is not None:
|
||||
return tiramisu_objects.groups, tiramisu_objects.separators, config
|
||||
else:
|
||||
return config
|
|
@ -0,0 +1,769 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
#import cjson
|
||||
import json
|
||||
import fcntl
|
||||
import stat
|
||||
import logging
|
||||
|
||||
from os.path import isdir, isfile, join, basename, dirname, splitext
|
||||
from os import listdir, makedirs, major, minor
|
||||
from os import stat as os_stat
|
||||
from distutils.version import StrictVersion
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from tiramisu.option import UnicodeOption, OptionDescription, \
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption, \
|
||||
NetworkOption, NetmaskOption
|
||||
from tiramisu.error import PropertiesOptionError, LeadershipError
|
||||
from tiramisu.setting import owners
|
||||
|
||||
from .config import configeol, eoledirs, dtdfilename, eoleextradico, \
|
||||
eoleextraconfig, forbiddenextra, VIRTROOT, \
|
||||
VIRTBASE, VIRTMASTER, templatedir
|
||||
from .error import ConfigError
|
||||
from .var_loader import modes_level, CreoleFamily, CreoleConstraint, \
|
||||
CreoleVarLoader
|
||||
try:
|
||||
from .client import CreoleClient, CreoleClientError
|
||||
client = CreoleClient()
|
||||
except:
|
||||
client = None
|
||||
from pyeole.encode import normalize
|
||||
try:
|
||||
from .eosfunc import is_instanciate, get_version
|
||||
except:
|
||||
pass
|
||||
|
||||
from .i18n import _
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class CreoleContainer():
|
||||
"""
|
||||
Charge les conteneurs, les fichiers, les packages, services, interfaces
|
||||
et disknods
|
||||
"""
|
||||
def gen_containers(self, paths):
|
||||
"""
|
||||
Generate Containers information in tiramisu tree
|
||||
|
||||
:paths: paths variables (for added new option in paths's dictionnary)
|
||||
"""
|
||||
containers = []
|
||||
for name, container in self._get_containers().items():
|
||||
container['path'] = 'container_path_{0}'.format(name)
|
||||
container['ip'] = 'container_ip_{0}'.format(name)
|
||||
containers.append(container)
|
||||
|
||||
key_type = {'id': IntOption, 'group': UnicodeOption,
|
||||
'ip': SymLinkOption, 'path': SymLinkOption,
|
||||
'level': UnicodeOption}
|
||||
|
||||
return self._gen_tiramisu_config(paths, "container", containers,
|
||||
key_type)
|
||||
|
||||
def gen_networks(self, paths):
|
||||
var = []
|
||||
descr = None
|
||||
namespace = paths['adresse_ip_br0'].split('.')[0]
|
||||
for descr_ in self.space:
|
||||
if descr_._name == namespace:
|
||||
descr = descr_
|
||||
break
|
||||
if descr == None:
|
||||
raise Exception(_(u'Unable to find namespace: {0}').format(
|
||||
namespace))
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
path = paths[name]
|
||||
subpath = path.split('.')[1:]
|
||||
opt = descr
|
||||
for p in subpath:
|
||||
opt = getattr(opt, p)
|
||||
var.append(SymLinkOption(name, opt))
|
||||
return OptionDescription('network', '', var)
|
||||
|
||||
def gen_interfaces(self, paths):
|
||||
"""Add per container interface linked to inter-containers bridge
|
||||
|
||||
Theses interfaces must come before other containers ones as
|
||||
default gateway.
|
||||
|
||||
"""
|
||||
lxc_net = OrderedDict()
|
||||
if self.containers_enabled:
|
||||
interfaces = OrderedDict()
|
||||
containers = self._get_containers()
|
||||
|
||||
for name, container in containers.items():
|
||||
if name in ['all', 'root']:
|
||||
continue
|
||||
lxc_net[name] = {'name': 'containers',
|
||||
'container': name,
|
||||
'linkto': 'br0',
|
||||
'method': 'bridge',
|
||||
'ip': 'container_ip_{0}'.format(name),
|
||||
'mask': 'adresse_netmask_br0',
|
||||
'bcast': 'adresse_broadcast_br0',
|
||||
'gateway': 'adresse_ip_br0'}
|
||||
|
||||
# Insert default interfaces before
|
||||
self.generic['interfaces'] = lxc_net.values() \
|
||||
+ self.generic['interfaces']
|
||||
|
||||
return self.gen_generic('interfaces', paths, copy_requires='ip')
|
||||
|
||||
def gen_service_accesss(self, paths):
|
||||
return self.__gen_service_access_restriction('service_access', paths)
|
||||
|
||||
def gen_service_restrictions(self, paths):
|
||||
return self.__gen_service_access_restriction('service_restriction', paths)
|
||||
|
||||
def __gen_service_access_restriction(self, service_type, paths):
|
||||
"""Add services requires to service_access/service_restriction
|
||||
If a service is disabled, we remove, also, access to this service
|
||||
"""
|
||||
generic_name = service_type + 's'
|
||||
list_name = service_type + 'list'
|
||||
if 'service' in self.requires:
|
||||
for gen in self.generic[generic_name]:
|
||||
service_name = gen['service']
|
||||
requires_name = gen.get(list_name)
|
||||
if requires_name is None:
|
||||
requires_name = '___auto_{0}'.format(service_name)
|
||||
gen[list_name] = requires_name
|
||||
self.requires[service_type][requires_name] = {'optional': True, 'list': []}
|
||||
if service_name in self.requires['service']:
|
||||
service_requires = self.requires['service'][service_name]['list']
|
||||
if self.requires['service'][service_name]['optional'] is False:
|
||||
self.requires['service'][service_name]['optional'] = False
|
||||
self.requires[service_type][requires_name]['list'].extend(service_requires)
|
||||
return self.gen_generic(generic_name, paths, verify_exists_redefine=False)
|
||||
|
||||
def _gen_file(self, fdata, container, containers):
|
||||
"""Generate one file structure for one container
|
||||
|
||||
:param fdata: file informations
|
||||
:type fdata: `dict`
|
||||
:param container: container of the file
|
||||
:type container: `dict`
|
||||
:return: file information for a container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
file_infos = fdata.copy()
|
||||
# take care of os.path.join and absolute part after first
|
||||
# argument.
|
||||
_file = fdata['name']
|
||||
if _file[0] == '/':
|
||||
_file = _file[1:]
|
||||
|
||||
file_infos['container'] = container['name']
|
||||
file_infos['full_name'] = fdata['name']
|
||||
if self.containers_enabled and container['name'] != VIRTMASTER:
|
||||
# Prefix the full path with container rootfs
|
||||
if fdata['container'] == 'all':
|
||||
cont_grp = container['group']
|
||||
else:
|
||||
cont_grp = fdata['container']
|
||||
cont_name = self.get_real_container_name(containers, cont_grp)
|
||||
_file = join(VIRTROOT, cont_name, VIRTBASE, _file)
|
||||
file_infos['full_name'] = _file
|
||||
|
||||
source = file_infos.get('source', basename(_file))
|
||||
source = join(templatedir, source)
|
||||
file_infos['source'] = source
|
||||
return file_infos
|
||||
|
||||
def gen_files(self, paths):
|
||||
containers = self._get_containers()
|
||||
files = []
|
||||
for fdata in self.generic.get('files', []):
|
||||
if fdata['container'] == 'all':
|
||||
# Generate a file per container
|
||||
for container in containers.values():
|
||||
if container['name'] in ['all', VIRTMASTER]:
|
||||
continue
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
else:
|
||||
container = containers[fdata['container']]
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
|
||||
key_type = {'source': UnicodeOption, 'mode': UnicodeOption,
|
||||
'full_name': UnicodeOption,
|
||||
'owner': UnicodeOption, 'group': UnicodeOption,
|
||||
'mkdir': BoolOption, 'rm': BoolOption,
|
||||
'del_comment': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "file", files, key_type,
|
||||
requires_key='activate')
|
||||
|
||||
def gen_disknods(self, paths):
|
||||
containers = self._get_containers()
|
||||
disknods = []
|
||||
for fdata in self.generic.get('disknods', []):
|
||||
stats = os_stat(fdata['name'])
|
||||
if stat.S_ISBLK(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISCHR(stats.st_mode):
|
||||
dev_type = u'c'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISDIR(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_dev
|
||||
else:
|
||||
dev_type = None
|
||||
device = None
|
||||
fdata['type'] = dev_type
|
||||
if device is not None:
|
||||
fdata['major'] = major(device)
|
||||
fdata['minor'] = minor(device)
|
||||
else:
|
||||
fdata['major'] = None
|
||||
fdata['minor'] = None
|
||||
fdata['mode'] = u'rwm'
|
||||
fdata['permission'] = 'allow'
|
||||
disknods.append(fdata)
|
||||
|
||||
key_type = {'major': IntOption,
|
||||
'minor': IntOption,
|
||||
'name': UnicodeOption,
|
||||
'permission': UnicodeOption,
|
||||
'mode': UnicodeOption,
|
||||
'type': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "disknod", disknods, key_type)
|
||||
|
||||
def gen_packages(self, paths):
|
||||
# c'est le dernier 'package' qui a raison
|
||||
# (si présence de deux balises package avec le même nom dans le
|
||||
# même conteneur)
|
||||
return self.gen_generic('packages', paths, verify_exists_redefine=False)
|
||||
|
||||
|
||||
class CreoleLoader(CreoleVarLoader, CreoleContainer):
|
||||
"""
|
||||
charge les variables + les conteneurs
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _gen_eol_file(namespace, root_path=None):
|
||||
if namespace == 'creole':
|
||||
return unicode(configeol)
|
||||
else:
|
||||
if root_path is None:
|
||||
root_path = eoleextraconfig
|
||||
return unicode(join(root_path, namespace, 'config.eol'))
|
||||
|
||||
|
||||
def _list_extras(extradico=eoleextradico):
|
||||
extranames = []
|
||||
if isdir(extradico):
|
||||
for directory in listdir(extradico):
|
||||
content = listdir(join(extradico, directory))
|
||||
if not len(content) == 0:
|
||||
extensions = [splitext(filename)[1] for filename in content]
|
||||
if ".xml" in extensions:
|
||||
extranames.append(directory)
|
||||
return extranames
|
||||
|
||||
|
||||
def set_mandatory_permissive(config, action):
|
||||
descr = config.cfgimpl_get_description()
|
||||
parent = getattr(descr, action, None)
|
||||
if parent is not None:
|
||||
for family in parent.impl_getchildren():
|
||||
for option in family.impl_getchildren():
|
||||
if 'mandatory' in option.impl_getproperties():
|
||||
config.cfgimpl_get_settings().setpermissive(('mandatory',), option)
|
||||
|
||||
|
||||
def load_extras(config, load_values=True, mandatory_permissive=False, extradico=eoleextradico,
|
||||
force_eoleextraconfig=None):
|
||||
actions = set()
|
||||
if mandatory_permissive and hasattr(config, 'actions'):
|
||||
for name, family in config.actions.iter_groups():
|
||||
for aname, action in family.iter_groups():
|
||||
actions.add(action.name)
|
||||
for extraname in _list_extras(extradico=extradico):
|
||||
if extraname in ['creole', 'containers', 'actions']:
|
||||
raise Exception(_('extra name {} not allowed').format(extraname))
|
||||
eol_file = _gen_eol_file(extraname, root_path=force_eoleextraconfig)
|
||||
config.impl_set_information(extraname, eol_file)
|
||||
if extraname in actions:
|
||||
set_mandatory_permissive(config, extraname)
|
||||
if not load_values:
|
||||
continue
|
||||
#if file not exists, create it (for auto_freeze value)
|
||||
if not isfile(eol_file):
|
||||
try:
|
||||
config_save_values(config, extraname, reload_config=False, check_mandatory=False)
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if isfile(eol_file):
|
||||
config_load_values(config, extraname)
|
||||
|
||||
|
||||
def load_config_eol(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, force_instanciate=None):
|
||||
if not configfile:
|
||||
configfile = _gen_eol_file('creole')
|
||||
config.impl_set_information('creole', configfile)
|
||||
config_load_values(config, 'creole', force_load_owner=force_load_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
configfile=configfile,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version)
|
||||
|
||||
def load_config_store(config, store, unset_default=False,
|
||||
force_load_owner=None, current_eol_version=None,
|
||||
force_instanciate=None, remove_unknown_vars=False,
|
||||
try_upgrade=False):
|
||||
"""used on Zéphir to upgrade values (2.4.X -> 2.4.X+1) on a configuration
|
||||
that has already been migrated (2.2/2.3 −> 2.4)
|
||||
"""
|
||||
config_load_store(config, 'creole', store, force_load_owner=force_load_owner,
|
||||
unset_default=unset_default, force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version,
|
||||
remove_unknown_vars=remove_unknown_vars)
|
||||
|
||||
def load_values(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, remove_unknown_vars=False):
|
||||
load_error = config.impl_get_information('load_error', False)
|
||||
if load_error and try_upgrade:
|
||||
#Try to upgrade
|
||||
from .upgrade import upgrade
|
||||
try:
|
||||
store_dico, version = upgrade(config, configfile)
|
||||
config_load_store(config, 'creole', store_dico, unset_default=True, eol_version='1.0')
|
||||
config.impl_set_information('upgrade', version)
|
||||
remove_unknown_vars = True
|
||||
load_error = False
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(e))
|
||||
config.impl_set_information('load_error', True)
|
||||
#print "fichier de configuration invalide 2.2 ou 2.3: {0} : {1}".format(configfile, e)
|
||||
if current_eol_version == None:
|
||||
current_eol_version = get_version('EOLE_RELEASE')
|
||||
eol_version = str(config.impl_get_information('eol_version'))
|
||||
if try_upgrade and not load_error:
|
||||
if StrictVersion(eol_version) > StrictVersion(current_eol_version):
|
||||
raise Exception(_('eol_version ({0}) is greater than current version ({1})').format(eol_version, current_eol_version))
|
||||
if StrictVersion(eol_version) < StrictVersion(current_eol_version):
|
||||
#can be used to edit lower versions on Zéphir
|
||||
from .upgrade24 import upgrade2
|
||||
try:
|
||||
# 2.4.x (greater than 2.4.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.4.0') and StrictVersion(eol_version) < StrictVersion('2.5.0'):
|
||||
upgrade2('2.4', eol_version, current_eol_version, config)
|
||||
# 2.5.x (greater than 2.5.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.5.0') and StrictVersion(eol_version) < StrictVersion('2.6.0'):
|
||||
upgrade2('2.5', eol_version, current_eol_version, config)
|
||||
# 2.6.x (greater than 2.6.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.6.0') and StrictVersion(eol_version) < StrictVersion('2.7.0'):
|
||||
upgrade2('2.6', eol_version, current_eol_version, config)
|
||||
if config.impl_get_information('upgrade', '') == '':
|
||||
#set the version only if it is the first upgrade
|
||||
config.impl_set_information('upgrade', eol_version)
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(normalize(str(e))))
|
||||
config.impl_set_information('upgrade', False)
|
||||
config.impl_set_information('load_error', True)
|
||||
|
||||
if remove_unknown_vars:
|
||||
# nettoyage des variables inconnues en dernier (#9858)
|
||||
config.impl_set_information('unknown_options', {})
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None):
|
||||
"""
|
||||
charge les dictionnaires Creole et retourne une config Tiramisu
|
||||
|
||||
:load_values: boolean. Charge ou non le fichier config.eol (default True)
|
||||
:rw: boolean. Mode de travail (lecture seule ou lecture/écriture)
|
||||
:namespace: string. Espace de travail (ex: "creole", "bacula", ...)
|
||||
:load_extra: boolean. Charge ou non les dictionnaire extra (si namespace='creole')
|
||||
:reload_config: boolean. Cette option est conservée pour raison de compatibilité
|
||||
ascendante mais n'a plus de justification, a ne pas utiliser
|
||||
:owner: string. Owner forcé sur les variables modifiées
|
||||
:disable_mandatory: boolean.
|
||||
:force_configeol: string. Force le nom du fichier de configuration utilisé
|
||||
:try_upgrade: boolean.
|
||||
:force_dirs: string. Force le nom du réprtoire contenant les dictionnaires
|
||||
:force_load_creole_owner: Owner forcé pour les variables chargées
|
||||
:warnings: affiche les warnings de validation
|
||||
"""
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise ConfigError(_(u"Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra:
|
||||
#if force_configeol, cannot calculated extra configfile name
|
||||
raise Exception(_(u'Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and (load_extra is True or namespace != 'creole'):
|
||||
raise Exception(_(u'If force_dirs is defined, namespace must be set to creole and load_extra must be set to False.'))
|
||||
if namespace != 'creole' and load_extra:
|
||||
raise ValueError(_(u'namespace is not creole, so load_extra is forbidden.'))
|
||||
#should not load value now because create a Config
|
||||
loader = CreoleLoader()
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
elif namespace == 'creole':
|
||||
dirs = eoledirs
|
||||
else:
|
||||
dirs = join(eoleextradico, namespace)
|
||||
#load config
|
||||
loader.read_dir(dirs, namespace)
|
||||
if load_extra:
|
||||
extranames = _list_extras()
|
||||
if isdir(eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise ValueError(
|
||||
_(u'Namespace {} for extra dictionary not allowed').format(directory))
|
||||
loader.read_dir(join(eoleextradico, directory), directory)
|
||||
config = loader.get_config()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
return config
|
||||
|
||||
|
||||
def valid_store(store):
|
||||
if not isinstance(store, dict):
|
||||
raise Exception('store is not a dict: {0}'.format(store))
|
||||
for key, value in store.items():
|
||||
if not isinstance(key, unicode):
|
||||
raise Exception('store key is not an unicode for {0}'.format(key))
|
||||
if key != '___version___' and (not isinstance(value, dict) or value.keys() != ['owner', 'val']):
|
||||
raise Exception('store value is not a dict for {0}'.format(key))
|
||||
|
||||
|
||||
def load_store(config, eol_file=configeol):
|
||||
if not isfile(eol_file):
|
||||
store = {}
|
||||
else:
|
||||
fh = open(eol_file, 'r')
|
||||
fcntl.lockf(fh, fcntl.LOCK_SH)
|
||||
try:
|
||||
store = cjson.decode(fh.read(), all_unicode=True)
|
||||
except cjson.DecodeError:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
fh.close()
|
||||
try:
|
||||
valid_store(store)
|
||||
except Exception as err:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
return store
|
||||
|
||||
|
||||
def config_load_store(config, namespace, store, force_instanciate=None,
|
||||
unset_default=False, force_load_owner=None, eol_version='2.4.0'):
|
||||
subconfig = getattr(config, namespace)
|
||||
cache_paths = config.cfgimpl_get_description()._cache_paths[1]
|
||||
unknown_options = {}
|
||||
|
||||
def reorder_store(path1, path2):
|
||||
"""
|
||||
sorter function.
|
||||
|
||||
sort description : if varname1 is a master and varname 2
|
||||
is a slave, returns [varname1, varname2]
|
||||
"""
|
||||
idx_1 = cache_paths.index(path1)
|
||||
idx_2 = cache_paths.index(path2)
|
||||
return cmp(idx_1, idx_2)
|
||||
|
||||
def store_path_and_reorder(eol_version):
|
||||
"""Convenience function to replace varnames with full paths
|
||||
and to sort an unordered ConfigObj's
|
||||
|
||||
:returns: a sorted ordereddict.
|
||||
"""
|
||||
store_path = {}
|
||||
if namespace == 'creole':
|
||||
paths = {}
|
||||
for path in subconfig.cfgimpl_get_description().impl_getpaths():
|
||||
vname = path.split('.')[-1]
|
||||
paths[vname] = namespace + '.' + path
|
||||
#variable pas dans Tiramisu
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
elif vname not in paths:
|
||||
unknown_options[vname] = value
|
||||
if vname not in paths or value == {}:
|
||||
continue
|
||||
store_path[paths[vname]] = value
|
||||
else:
|
||||
paths = []
|
||||
subpaths = subconfig.cfgimpl_get_description().impl_getpaths()
|
||||
for path in subpaths:
|
||||
paths.append(namespace + '.' + path)
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
continue
|
||||
elif vname not in paths:
|
||||
continue
|
||||
store_path[vname] = value
|
||||
store_order = OrderedDict()
|
||||
store_key = store_path.keys()
|
||||
store_key.sort(reorder_store)
|
||||
for path in store_key:
|
||||
store_order[path] = store_path[path]
|
||||
return eol_version, store_order
|
||||
|
||||
#don't frozen auto_freeze before instance (or enregistrement_zephir for Zephir)
|
||||
if force_instanciate is not None:
|
||||
is_inst = force_instanciate
|
||||
else:
|
||||
is_inst = is_instanciate()
|
||||
eol_version, store = store_path_and_reorder(eol_version)
|
||||
orig_values = {}
|
||||
for path, values in store.items():
|
||||
value = values['val']
|
||||
option = config.unwrap_from_path(path)
|
||||
settings = config.cfgimpl_get_settings()
|
||||
tiramisu_values = config.cfgimpl_get_values()
|
||||
if force_load_owner is not None:
|
||||
owner = force_load_owner
|
||||
else:
|
||||
owner = values['owner']
|
||||
if isinstance(owner, dict):
|
||||
for towner in owner.values():
|
||||
if towner not in dir(owners):
|
||||
owners.addowner(towner)
|
||||
else:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
try:
|
||||
#si unset_default, remet à la valeur par défaut si == à la valeur
|
||||
if unset_default and value == getattr(config, path):
|
||||
continue
|
||||
if isinstance(value, tuple):
|
||||
value = list(value)
|
||||
values['val'] = value
|
||||
orig_values[path.split('.')[-1]] = values
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if not isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner)
|
||||
multi = config.getattr(path, force_permissive=True)
|
||||
if isinstance(value, list):
|
||||
tval = {}
|
||||
for idx, val in enumerate(value):
|
||||
tval[idx] = val
|
||||
value = tval
|
||||
for idx, val in value.items():
|
||||
index = int(idx)
|
||||
if len(multi) > index:
|
||||
multi[index] = val
|
||||
if isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner[idx])
|
||||
tiramisu_values.setowner(option, new_owner, index=index)
|
||||
else:
|
||||
log.error(_("master's len is lower than the slave variable ({})").format(path))
|
||||
else:
|
||||
if isinstance(owner, str):
|
||||
owner = unicode(owner)
|
||||
if not isinstance(owner, unicode):
|
||||
raise Exception(_('owner must be a string for {}').format(path))
|
||||
new_owner = getattr(owners, owner)
|
||||
try:
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
except ValueError as e:
|
||||
if path == 'schedule.schedule.weekday' and 'schedule.schedule.monthday' in store:
|
||||
settings.remove('validator')
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
config.setattr('schedule.schedule.monthday', store['schedule.schedule.monthday'], force_permissive=True)
|
||||
settings.append('validator')
|
||||
else:
|
||||
raise e
|
||||
tiramisu_values.setowner(option, new_owner)
|
||||
except ValueError as e:
|
||||
msg = str(e).decode('utf8')
|
||||
#msg = unicode(e)
|
||||
log.error(_('unable to load variable {} with value {}: {}').format(path, value, msg))
|
||||
settings[option].append('load_error')
|
||||
config.impl_set_information('error_msg_{}'.format(path), msg)
|
||||
config.impl_set_information('orig_value_{}'.format(path), value)
|
||||
except LeadershipError:
|
||||
# ne pas faire d'erreur #8380
|
||||
pass
|
||||
try:
|
||||
config.impl_get_information('force_store_vars').remove(path)
|
||||
except (KeyError, ValueError) as err:
|
||||
pass
|
||||
|
||||
path_split = path.split('.')
|
||||
family_option = config.unwrap_from_path(namespace + '.' + path_split[1])
|
||||
settings.setpermissive(tuple(modes_level), opt=family_option)
|
||||
if len(path_split) == 4:
|
||||
parent_option = config.unwrap_from_path(namespace + '.' + path_split[1] + '.' + path_split[2])
|
||||
settings.setpermissive(tuple(modes_level), opt=parent_option)
|
||||
settings.setpermissive(tuple(modes_level), opt=option)
|
||||
setting = config.cfgimpl_get_settings()
|
||||
if 'auto_freeze' in setting[option] and is_inst == 'oui' and \
|
||||
not tiramisu_values.is_default_owner(option):
|
||||
setting[option].append('frozen')
|
||||
if namespace == 'creole':
|
||||
config.impl_set_information('unknown_options', unknown_options)
|
||||
config.impl_set_information('eol_version', eol_version)
|
||||
config.impl_set_information('orig_values', orig_values)
|
||||
|
||||
def config_load_values(config, namespace, eol_file=None, force_instanciate=None,
|
||||
force_load_owner=None):
|
||||
subconfig = getattr(config, namespace, None)
|
||||
if subconfig is None:
|
||||
return
|
||||
if eol_file is None:
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
else:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
if not isfile(eol_file):
|
||||
raise IOError(_(u'Can not find file {0}').format(
|
||||
eol_file))
|
||||
store = load_store(config, eol_file)
|
||||
config_load_store(config, namespace, store,
|
||||
force_instanciate=force_instanciate,
|
||||
force_load_owner=force_load_owner)
|
||||
|
||||
def config_get_values(config, namespace, check_mandatory=True, ignore_autofreeze=False):
|
||||
"""check_mandatory: allows to disable mandatory checking
|
||||
(i.e : when returning values for partial configuration in Zéphir)
|
||||
"""
|
||||
def _get_varname(path):
|
||||
if namespace == 'creole':
|
||||
value_name = path.split('.')[-1]
|
||||
else:
|
||||
value_name = path
|
||||
return value_name
|
||||
|
||||
subconfig = getattr(config, namespace)
|
||||
if check_mandatory:
|
||||
mandatory_errors = list(config.cfgimpl_get_values(
|
||||
).mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != []:
|
||||
text = []
|
||||
for error in mandatory_errors:
|
||||
if not error.startswith(namespace + '.'):
|
||||
continue
|
||||
error = error.split('.')
|
||||
text.append(_(u"Mandatory variable '{0}' from family '{1}'"
|
||||
u" is not set !").format(unicode(error[-1]),
|
||||
unicode(error[1].capitalize())).encode('utf-8'))
|
||||
if text != []:
|
||||
raise PropertiesOptionError("\n".join(text), ('mandatory',))
|
||||
store = {}
|
||||
opt_values = subconfig.cfgimpl_get_values().get_modified_values()
|
||||
force_store_values = config.impl_get_information('force_store_values', None)
|
||||
|
||||
for path, own_val in opt_values.items():
|
||||
#for variable not related to current namespace
|
||||
if not path.startswith(namespace+'.'):
|
||||
continue
|
||||
if force_store_values and path in force_store_values:
|
||||
force_store_values.remove(path)
|
||||
store[_get_varname(path)] = {'val': own_val[1], 'owner': own_val[0]}
|
||||
if force_store_values:
|
||||
for path in force_store_values:
|
||||
varname = _get_varname(path)
|
||||
if varname not in store:
|
||||
try:
|
||||
store[varname] = {'val': config.getattr(path, force_permissive=True), 'owner': u'forced'}
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if namespace == 'creole':
|
||||
#update with values in store with no known options
|
||||
store.update(config.impl_get_information('unknown_options', {}))
|
||||
return store
|
||||
|
||||
|
||||
def add_eol_version(store, eol_version=None):
|
||||
# on stocke la version passée en paramètre (si >= 2.4.1) ou celle du système le cas échéant
|
||||
if eol_version:
|
||||
if StrictVersion(eol_version) >= StrictVersion('2.4.1'):
|
||||
store['___version___'] = eol_version
|
||||
else:
|
||||
store['___version___'] = get_version('EOLE_RELEASE')
|
||||
|
||||
|
||||
def config_save_values(config, namespace, reload_config=True, eol_file=None, check_mandatory=True, eol_version=None):
|
||||
subconfig = getattr(config, namespace)
|
||||
if eol_file is not None:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
store = config_get_values(config, namespace, check_mandatory)
|
||||
add_eol_version(store, eol_version)
|
||||
try:
|
||||
dirn = dirname(eol_file)
|
||||
if not isdir(dirn):
|
||||
makedirs(dirn)
|
||||
if not isfile(eol_file):
|
||||
fh = file(eol_file, 'w')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
else:
|
||||
fh = file(eol_file, 'r+')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
fh.truncate() # Here's where the magic happens #7073
|
||||
fh.write(cjson.encode(store))
|
||||
fh.close()
|
||||
except Exception as err:
|
||||
raise Exception(_(u"Error saving file: {0}").format(err))
|
||||
if client is not None and reload_config:
|
||||
try:
|
||||
client.reload_eol()
|
||||
#client.reload_config()
|
||||
except CreoleClientError:
|
||||
pass
|
||||
return True
|
|
@ -0,0 +1,454 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Parseur LXML des fichiers XML de collecte des variables EOLE
|
||||
"""
|
||||
from lxml import etree
|
||||
from copy import copy
|
||||
from .error import ConfigError
|
||||
from .utils import string_to_bool #, get_text_node
|
||||
from .config import VIRTMASTER
|
||||
from .dtd_parser import CONVERT_VALUE
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from .i18n import _
|
||||
|
||||
def parse_xml_file(filename, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param filename: nom du fichier xml source
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
document = etree.iterparse(filename, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing file {0}: {1}").format(filename, err))
|
||||
|
||||
def parse_string(xml_string, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param xml_string: dictionnaire xml sous forme de chaîne
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
root_node = etree.fromstring(xml_string)
|
||||
document = etree.iterwalk(root_node, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing: {0}").format(err))
|
||||
|
||||
def _parse_root_node(document, dtd, parse_all, test_duplicate=False):
|
||||
"""
|
||||
@param document: le noeud XML racine
|
||||
"""
|
||||
def _parse_container(node, options, container_name):
|
||||
for name in options:
|
||||
key_name = '{0}s'.format(name)
|
||||
ret.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name),
|
||||
container_name, dtd, name)
|
||||
if values != []:
|
||||
ret[key_name].extend(values)
|
||||
|
||||
for unused, first_node in document:
|
||||
root_node = first_node
|
||||
|
||||
#verifie les doublons de variable dans le meme dico
|
||||
if test_duplicate:
|
||||
all_var_dict = []
|
||||
for var in root_node.findall('variables/family/variable'):
|
||||
name = var.attrib['name']
|
||||
if name in all_var_dict:
|
||||
raise ConfigError(_(u'Error, var {0} already exists in current dictionaries').format(name))
|
||||
all_var_dict.append(name)
|
||||
|
||||
ret = {'families': parse_families(root_node)}
|
||||
families_action = parse_actions(root_node, dtd)
|
||||
if len(families_action) != 0:
|
||||
ret['families_action'] = families_action
|
||||
|
||||
ret['containers'] = []
|
||||
## balise <files> (données sur le maître)
|
||||
file_node = root_node.findall('files')
|
||||
if file_node != []:
|
||||
if len(file_node) != 1:
|
||||
raise Exception(_(u"Error: extra <files> tags in dictionaries."))
|
||||
if parse_all:
|
||||
_parse_container(file_node[0], dtd['files']['options'], VIRTMASTER)
|
||||
ret['containers'].append({'name': VIRTMASTER, 'id': '1'})
|
||||
|
||||
## balise <containers> (données dans les conteneurs)
|
||||
containers_node = root_node.findall('containers')
|
||||
if containers_node != []:
|
||||
if len(containers_node) != 1:
|
||||
raise Exception(_(u"Error: extra <containers> tags in dictionaries."))
|
||||
container = containers_node[0]
|
||||
for container_node in container.getiterator('container'):
|
||||
name = container_node.attrib['name']
|
||||
if name in [VIRTMASTER, 'all']:
|
||||
raise Exception(_(u"Name '{0}' is not allowed in tag <container>.").format(name))
|
||||
if name in ret['containers']:
|
||||
raise Exception(
|
||||
_(u"There must be only one name '{0}' in a dictionary.").format(name))
|
||||
containerid = _get_optional(container_node, 'id')
|
||||
groupid = _get_optional(container_node, 'group')
|
||||
ret['containers'].append({'name': name, 'id': containerid,
|
||||
'group': groupid})
|
||||
if parse_all:
|
||||
_parse_container(container_node, dtd['container']['options'], name)
|
||||
if parse_all:
|
||||
all_node = container.findall('all')
|
||||
if all_node != []:
|
||||
if len(all_node) != 1:
|
||||
raise Exception(_(u"Error: extra <all> tags in dictionaries."))
|
||||
ret['containers'].append({'name': 'all'})
|
||||
_parse_container(all_node[0], dtd['all']['options'], 'all')
|
||||
|
||||
## gestion des contraintes
|
||||
#FIXME
|
||||
ret.update(parse_constraints(root_node))
|
||||
|
||||
## gestion des groupes de variables
|
||||
ret['groups'] = parse_groups(root_node)
|
||||
|
||||
## gestion de l'aide
|
||||
ret['helps'] = parse_help(root_node)
|
||||
|
||||
## gestion des séparateurs
|
||||
ret['separators'] = parse_separators(root_node)
|
||||
return ret
|
||||
|
||||
|
||||
def _get_boolean_attr(node, attr_name, default=False):
|
||||
"""
|
||||
Gestion spécifique pour les attributs booléens
|
||||
Ils sont à False par défaut
|
||||
"""
|
||||
val = node.get(attr_name)
|
||||
if default:
|
||||
return str(val).lower() != 'false'
|
||||
elif val is None:
|
||||
return None
|
||||
else:
|
||||
return str(val).lower() == 'true'
|
||||
|
||||
|
||||
def _get_optional(node, attr_name):
|
||||
"""
|
||||
Valeur d'un attribut optionnel
|
||||
"""
|
||||
return node.get(attr_name)
|
||||
|
||||
|
||||
def _parse_value(varnode, attr='value'):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = []
|
||||
for val in varnode.findall(attr):
|
||||
# FIX for <value></value> !
|
||||
if val.text is not None:
|
||||
res.append(val.text)
|
||||
else:
|
||||
res.append('')
|
||||
return res
|
||||
|
||||
def parse_value(varnode, name):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = None
|
||||
for val in varnode.findall('value'):
|
||||
if val.text is not None:
|
||||
tval = val.text
|
||||
if res != None:
|
||||
#str to list
|
||||
if type(res) == str:
|
||||
res = [res]
|
||||
res.append(tval)
|
||||
else:
|
||||
res = tval
|
||||
return res
|
||||
|
||||
def parse_generic(nodes, container, dtd, name, old_result=None):
|
||||
ret = []
|
||||
keys = dtd[name]
|
||||
for node in nodes:
|
||||
if old_result:
|
||||
result = copy(old_result)
|
||||
result['node_name'] = name
|
||||
elif container is not None:
|
||||
result = {'container': container}
|
||||
else:
|
||||
result = {}
|
||||
if keys['type']:
|
||||
if 'name' in keys['needs'] or 'name' in keys['optionals']:
|
||||
raise Exception('PCDATA + name')
|
||||
result['name'] = node.text
|
||||
for key, values in keys['needs'].items():
|
||||
value = node.attrib[key]
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
for key, values in keys['optionals'].items():
|
||||
value = node.attrib.get(key, values['default'])
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if value != None:
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
if keys['options'] == []:
|
||||
ret.append(result)
|
||||
else:
|
||||
for option in keys['options']:
|
||||
ret.extend(parse_generic(node.findall(option), container, dtd, option, result))
|
||||
return ret
|
||||
|
||||
|
||||
def parse_variables(var_node):
|
||||
"""
|
||||
traitement des variables
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for var in var_node.getiterator('variable'):
|
||||
# Default variables are handled in creole.loader
|
||||
hidden = _get_boolean_attr(var, 'hidden')
|
||||
multi = _get_boolean_attr(var, 'multi')
|
||||
redefine = _get_boolean_attr(var, 'redefine')
|
||||
mandatory = _get_boolean_attr(var, 'mandatory')
|
||||
remove_check = _get_boolean_attr(var, 'remove_check')
|
||||
remove_condition = _get_boolean_attr(var, 'remove_condition')
|
||||
exists = _get_boolean_attr(var, 'exists', default=True)
|
||||
disabled = _get_boolean_attr(var, 'disabled', default=False)
|
||||
auto_freeze = _get_boolean_attr(var, 'auto_freeze')
|
||||
auto_save = _get_boolean_attr(var, 'auto_save')
|
||||
mode = _get_optional(var, 'mode')
|
||||
name = var.attrib['name']
|
||||
value = parse_value(var, var.attrib['name'])
|
||||
typ = _get_optional(var, 'type')
|
||||
if typ == None:
|
||||
typ = 'string'
|
||||
desc = _get_optional(var, 'description')
|
||||
if type(desc) == unicode:
|
||||
desc = desc.encode('utf-8')
|
||||
result[name] = dict(value=value,
|
||||
type=typ,
|
||||
description=desc,
|
||||
hidden=hidden,
|
||||
multi=multi,
|
||||
auto='',
|
||||
redefine=redefine,
|
||||
exists=exists,
|
||||
auto_freeze=auto_freeze,
|
||||
auto_save=auto_save,
|
||||
mode=mode,
|
||||
mandatory=mandatory,
|
||||
disabled=disabled,
|
||||
remove_check=remove_check,
|
||||
remove_condition=remove_condition
|
||||
)
|
||||
return result
|
||||
|
||||
def parse_families(var_node):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for family in var_node.findall('variables/family'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Family {0} is set several times.").format(family_name))
|
||||
hidden = _get_boolean_attr(family, 'hidden')
|
||||
# FIXME: mode='' était admis avec domparser
|
||||
mode = _get_optional(family, 'mode')
|
||||
icon = _get_optional(family, 'icon')
|
||||
variables = parse_variables(family)
|
||||
result[family_name] = {'hidden': hidden,
|
||||
'mode': mode,
|
||||
'vars': variables,
|
||||
'icon': icon
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def parse_actions(root_node, dtd):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
def _parse_action(node, options):
|
||||
parse = {}
|
||||
for name in options:
|
||||
key_name = '{0}'.format(name)
|
||||
parse.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name), None, dtd, name)
|
||||
if values != []:
|
||||
parse[key_name].extend(values)
|
||||
parse['type'] = node.get("type", "custom")
|
||||
parse['title'] = node.get('title')
|
||||
parse['description'] = node.get('description')
|
||||
image = node.get('image')
|
||||
if image:
|
||||
parse['image'] = image
|
||||
url = node.get('url', None)
|
||||
if url:
|
||||
parse['url'] = url
|
||||
return parse
|
||||
|
||||
for family in root_node.findall('family_action'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Action Family {0} is set several times.").format(family_name))
|
||||
description = _get_optional(family, 'description')
|
||||
color = _get_optional(family, 'color')
|
||||
image = _get_optional(family, 'image')
|
||||
## balise <action>
|
||||
action_node = family.findall('action')
|
||||
if action_node != [] and len(action_node) != 1:
|
||||
raise Exception(_(u"Error: extra <action> tags in dictionaries."))
|
||||
action = _parse_action(action_node[0], dtd['action']['options'])
|
||||
result[family_name] = {'name': family_name,
|
||||
'description': description,
|
||||
'color': color,
|
||||
'image': image,
|
||||
'action': action
|
||||
}
|
||||
return result
|
||||
|
||||
def parse_constraints(node):
|
||||
"""
|
||||
@param node: node des contraintes
|
||||
"""
|
||||
constraints = {'checks' : parse_funcs(node,'check'),
|
||||
'fills' : parse_funcs(node,'fill'),
|
||||
'autos' : parse_funcs(node,'auto'),
|
||||
'conditions' : parse_conditions(node)
|
||||
}
|
||||
return constraints
|
||||
|
||||
|
||||
def _parse_param(param_node):
|
||||
"""
|
||||
traitement des paramètres d'une fonction
|
||||
"""
|
||||
return {'name' : _get_optional(param_node, 'name'),
|
||||
'type' : _get_optional(param_node, 'type'),
|
||||
'value' : param_node.text,
|
||||
'optional' : _get_optional(param_node, 'optional'),
|
||||
'hidden' : _get_optional(param_node, 'hidden'),
|
||||
}
|
||||
|
||||
|
||||
def parse_funcs(node, func_type):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
@param func_type: TagName of the functions to find
|
||||
@return: {target: [(param_name, _parse_params('param'))]}
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.findall('constraints/%s' % func_type):
|
||||
# lecture des paramètres
|
||||
params = []
|
||||
#si balise <target>
|
||||
targets = _parse_value(func, 'target')
|
||||
#sinon c'est un attribut target=
|
||||
if not targets:
|
||||
#met dans une liste parce que <target> retourne une liste
|
||||
targets = [_get_optional(func, 'target')]
|
||||
level = _get_optional(func, 'level')
|
||||
if not level:
|
||||
level = 'error'
|
||||
for target in targets:
|
||||
if target is not None:
|
||||
for param in func.getiterator('param'):
|
||||
params.append(_parse_param(param))
|
||||
funcs.setdefault(target, []).append((func.attrib['name'],
|
||||
params, level))
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_conditions(node):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.getiterator('condition'):
|
||||
# lecture des paramètres
|
||||
targets = []
|
||||
family_targets = []
|
||||
list_targets = []
|
||||
# paramètres de la fonction
|
||||
params = [_parse_param(param)
|
||||
for param in func.getiterator('param')]
|
||||
# cibles de la dépendance
|
||||
for target in func.getiterator('target'):
|
||||
ttype = target.get('type')
|
||||
optional = target.get('optional', False)
|
||||
if ttype == 'family':
|
||||
family_targets.append((target.text, optional))
|
||||
elif ttype in ['variable', None]:
|
||||
targets.append((target.text, optional))
|
||||
else:
|
||||
if ttype.endswith('list'):
|
||||
#suppress list in ttype
|
||||
list_targets.append((ttype[:-4], target.text, optional))
|
||||
else:
|
||||
raise Exception(_(u'Unknown type {0} for condition target.').format(ttype))
|
||||
funcdef = {'name': func.attrib['name'], 'family': family_targets,
|
||||
'variable': targets, 'list': list_targets, 'param': params,
|
||||
'fallback': _get_boolean_attr(func, 'fallback')}
|
||||
source = _get_optional(func, 'source')
|
||||
if source == None:
|
||||
raise Exception(_(u'Impossible condition without source for {0}.').format(funcdef))
|
||||
funcs.setdefault(source, []).append(funcdef)
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_groups(node):
|
||||
"""
|
||||
Traitement des groupes de variables
|
||||
"""
|
||||
result = {}
|
||||
for group in node.findall('constraints/group'):
|
||||
slaves = _parse_value(group, 'slave')
|
||||
result[group.attrib['master']] = slaves
|
||||
return result
|
||||
|
||||
|
||||
def parse_help(node):
|
||||
"""
|
||||
Traitement de l'aide
|
||||
"""
|
||||
var_help = {}
|
||||
for var in node.findall('help/variable'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
var_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for variable {0}.").format(name))
|
||||
fam_help = {}
|
||||
for var in node.findall('help/family'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
fam_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for family {0}").format(name))
|
||||
return {'variables':var_help, 'families': fam_help}
|
||||
|
||||
|
||||
def parse_separators(node):
|
||||
"""dictionnaire des séparateurs, format {'variable':'text'}
|
||||
variable : nom de la première variable après le sépateur"""
|
||||
var_sep = {}
|
||||
for var in node.findall('variables/separators/separator'):
|
||||
if not var.text:
|
||||
libelle = ''
|
||||
else:
|
||||
libelle = var.text.strip()
|
||||
var_sep[var.attrib['name']] = (libelle, _get_boolean_attr(var, 'never_hidden'))
|
||||
return var_sep
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
Librairie pour la gestion des mises à jour
|
||||
"""
|
||||
from os import system
|
||||
from dateutil import parser
|
||||
from pyeole.schedule import ManageSchedule, list_once, add_schedule, \
|
||||
del_schedule, apply_schedules, DAY_TO_STRING
|
||||
from pyeole.process import system_out
|
||||
from .client import CreoleClient
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# fichier d'information pour la mise à jour unique
|
||||
DIFF_FILENAME = '/var/lib/eole/reports/maj-diff.txt'
|
||||
|
||||
#########################################
|
||||
## Mise à jour hebdomadaire (maj_auto) ##
|
||||
#########################################
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def maj_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return client.get('/schedule/majauto/day') == 'weekly'
|
||||
|
||||
def get_maj_day():
|
||||
"""
|
||||
renvoie le jour et l'heure des mises à jour
|
||||
par exemple :
|
||||
{'hour': 5, 'minute': 4, 'weekday': 'vendredi'}
|
||||
"""
|
||||
shed = client.get('/schedule/schedule')
|
||||
shed.pop('monthday')
|
||||
shed['weekday'] = DAY_TO_STRING[shed['weekday']]
|
||||
return shed
|
||||
|
||||
def enable_maj_auto():
|
||||
"""
|
||||
active la mise à jour hebdomadaire
|
||||
"""
|
||||
if not maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.add('majauto', 'weekly', 'post')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
def disable_maj_auto():
|
||||
"""
|
||||
désactive la mise à jour hebdomadaire
|
||||
"""
|
||||
if maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.delete('majauto')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (schedule once) ##
|
||||
#########################################
|
||||
|
||||
def maj_once_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return 'majauto' in list_once('post')
|
||||
|
||||
def enable_maj_once():
|
||||
"""
|
||||
active la mise à jour 'once'
|
||||
"""
|
||||
if not maj_once_enabled():
|
||||
cancel_maj_differee()
|
||||
add_schedule('once', 'post', 'majauto')
|
||||
write_diff(True, 'ce soir')
|
||||
return True
|
||||
|
||||
def disable_maj_once():
|
||||
"""
|
||||
désactive la mise à jour 'once'
|
||||
"""
|
||||
if maj_once_enabled():
|
||||
del_schedule('once', 'post', 'majauto')
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (maj_differee) ##
|
||||
#########################################
|
||||
|
||||
def write_diff(enable, heure=None):
|
||||
""" ecrit les informations du gestionnaire de mise a jour
|
||||
dans le fichier de config de l'ead """
|
||||
fic = file(DIFF_FILENAME, 'w')
|
||||
if enable:
|
||||
fic.write(_(u'An update is scheduled at {0}').format(heure))
|
||||
else:
|
||||
fic.write("")
|
||||
fic.close()
|
||||
|
||||
def cancel_maj_differee():
|
||||
"""
|
||||
déprogramme les mises à jour differées
|
||||
"""
|
||||
disable_maj_once()
|
||||
cmd = """for i in `grep -l "Maj-Auto" /var/spool/cron/atjobs/* 2>/dev/null`; do rm -f $i ; done;"""
|
||||
system(cmd)
|
||||
write_diff(False)
|
||||
return True
|
||||
|
||||
def prog_maj_differee(heure, options='-R'):
|
||||
"""
|
||||
Programmation une mise à jour différée de quelques heures
|
||||
Elle est lancée via la commande at pour l'utilisateur root
|
||||
options : options à passer à Maj-Auto
|
||||
"""
|
||||
if heure == 'once':
|
||||
return enable_maj_once()
|
||||
# suppression des éventuelles autres maj différées
|
||||
cancel_maj_differee()
|
||||
stdin = "rm -f %s\nMaj-Auto %s\n" % (DIFF_FILENAME, options)
|
||||
env_path = {'PATH': '/usr/share/eole:/usr/share/eole/sbin:/usr/local/sbin:'
|
||||
'/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
|
||||
'LC_ALL': 'fr_FR.UTF-8'}
|
||||
ret = system_out(['/usr/bin/at', 'now', '+', str(heure), 'hours'], stdin=stdin, env=env_path)
|
||||
if ret[0] != 0:
|
||||
return False
|
||||
scheduled_maj = " ".join(ret[2].splitlines()[1].split()[3:7])
|
||||
scheduled_maj = parser.parse(scheduled_maj)
|
||||
scheduled_day = "{0:0=2d}".format(scheduled_maj.day)
|
||||
scheduled_month = "{0:0=2d}".format(scheduled_maj.month)
|
||||
scheduled_year = "{0:0=2d}".format(scheduled_maj.year)
|
||||
scheduled_hour = "{0:0=2d}".format(scheduled_maj.hour)
|
||||
scheduled_minute = "{0:0=2d}".format(scheduled_maj.minute)
|
||||
scheduled_maj = _(u'{0} the {1}').format(":".join((scheduled_hour, scheduled_minute)), \
|
||||
"/".join((scheduled_day, scheduled_month, scheduled_year)))
|
||||
write_diff(True , scheduled_maj)
|
||||
return True
|
|
@ -0,0 +1,704 @@
|
|||
"""
|
||||
Creole flattener. Takes a bunch of Creole XML dispatched in differents folders
|
||||
as an input and outputs a human readable flatened XML
|
||||
|
||||
Sample usage::
|
||||
|
||||
>>> from creole.objspace import CreoleObjSpace
|
||||
>>> eolobj = CreoleObjSpace('/usr/share/creole/creole.dtd')
|
||||
>>> eolobj.create_or_populate_from_xml('creole', ['/usr/share/eole/creole/dicos'])
|
||||
>>> eolobj.space_visitor()
|
||||
>>> eolobj.save('/tmp/creole_flatened_output.xml')
|
||||
|
||||
The CreoleObjSpace
|
||||
|
||||
- loads the XML into an internal CreoleObjSpace representation
|
||||
- visits/annotates the objects
|
||||
- dumps the object space as XML output into a single XML target
|
||||
|
||||
The visit/annotation stage is a complex step that corresponds to the Creole
|
||||
procedures.
|
||||
|
||||
For example: a variable is redefined and shall be moved to another family
|
||||
means that a variable1 = Variable() object in the object space who lives in the family1 parent
|
||||
has to be moved in family2. The visit procedure changes the varable1's object space's parent.
|
||||
"""
|
||||
from collections import OrderedDict
|
||||
from lxml.etree import Element, SubElement # pylint: disable=E0611
|
||||
import sys
|
||||
from json import dump
|
||||
|
||||
|
||||
from .i18n import _
|
||||
from .xmlreflector import XMLReflector, HIGH_COMPATIBILITY
|
||||
from .annotator import ERASED_ATTRIBUTES, ActionAnnotator, ContainerAnnotator, SpaceAnnotator
|
||||
from .utils import normalize_family
|
||||
from .error import CreoleOperationError, SpaceObjShallNotBeUpdated, CreoleDictConsistencyError
|
||||
|
||||
# CreoleObjSpace's elements like 'family' or 'slave', that shall be forced to the Redefinable type
|
||||
FORCE_REDEFINABLES = ('family', 'slave', 'container', 'disknod', 'variables', 'family_action')
|
||||
# CreoleObjSpace's elements that shall be forced to the UnRedefinable type
|
||||
FORCE_UNREDEFINABLES = ('value', 'input', 'profile', 'ewtapp', 'tag', 'saltaction')
|
||||
# CreoleObjSpace's elements that shall be set to the UnRedefinable type
|
||||
UNREDEFINABLE = ('multi', 'type')
|
||||
|
||||
PROPERTIES = ('hidden', 'frozen', 'auto_freeze', 'auto_save', 'force_default_on_freeze',
|
||||
'force_store_value', 'disabled', 'mandatory')
|
||||
CONVERT_PROPERTIES = {'auto_save': ['force_store_value'], 'auto_freeze': ['force_store_value', 'auto_freeze']}
|
||||
|
||||
RENAME_ATTIBUTES = {'description': 'doc'}
|
||||
|
||||
#TYPE_TARGET_CONDITION = ('variable', 'family')
|
||||
|
||||
# _____________________________________________________________________________
|
||||
# special types definitions for the Object Space's internal representation
|
||||
class RootCreoleObject(object):
|
||||
""
|
||||
|
||||
|
||||
class CreoleObjSpace(object):
|
||||
"""DOM XML reflexion free internal representation of a Creole Dictionary
|
||||
"""
|
||||
choice = type('Choice', (RootCreoleObject,), OrderedDict())
|
||||
# Creole ObjectSpace's Master variable class type
|
||||
Master = type('Master', (RootCreoleObject,), OrderedDict())
|
||||
"""
|
||||
This Atom type stands for singleton, that is
|
||||
an Object Space's atom object is present only once in the
|
||||
object space's tree
|
||||
"""
|
||||
Atom = type('Atom', (RootCreoleObject,), OrderedDict())
|
||||
"A variable that can't be redefined"
|
||||
Redefinable = type('Redefinable', (RootCreoleObject,), OrderedDict())
|
||||
"A variable can be redefined"
|
||||
UnRedefinable = type('UnRedefinable', (RootCreoleObject,), OrderedDict())
|
||||
|
||||
|
||||
def __init__(self, dtdfilename): # pylint: disable=R0912
|
||||
self.index = 0
|
||||
class ObjSpace(object): # pylint: disable=R0903
|
||||
"""
|
||||
Base object space
|
||||
"""
|
||||
self.space = ObjSpace()
|
||||
self.xmlreflector = XMLReflector()
|
||||
self.xmlreflector.parse_dtd(dtdfilename)
|
||||
self.redefine_variables = None
|
||||
self.probe_variables = []
|
||||
|
||||
# elt container's attrs list
|
||||
self.container_elt_attr_list = [] #
|
||||
# ['variable', 'separator', 'family']
|
||||
self.forced_text_elts = set()
|
||||
# ['disknod', 'slave', 'target', 'service', 'package', 'ip', 'value', 'tcpwrapper',
|
||||
# 'interface', 'input', 'port']
|
||||
self.forced_text_elts_as_name = set(['choice'])
|
||||
self.forced_choice_option = {}
|
||||
self.paths = Path()
|
||||
self.list_conditions = {}
|
||||
|
||||
self.booleans_attributs = []
|
||||
|
||||
for elt in self.xmlreflector.dtd.iterelements():
|
||||
attrs = {}
|
||||
clstype = self.UnRedefinable
|
||||
atomic = True
|
||||
forced_text_elt = False
|
||||
if elt.type == 'mixed':
|
||||
forced_text_elt = True
|
||||
if elt.name == 'container':
|
||||
self.container_elt_attr_list = [elt.content.left.name]
|
||||
self.parse_dtd_right_left_elt(elt.content)
|
||||
for attr in elt.iterattributes():
|
||||
atomic = False
|
||||
if attr.default_value:
|
||||
if attr.default_value == 'True':
|
||||
default_value = True
|
||||
elif attr.default_value == 'False':
|
||||
default_value = False
|
||||
else:
|
||||
default_value = attr.default_value
|
||||
attrs[attr.name] = default_value
|
||||
if not attr.name.endswith('_type'):
|
||||
values = list(attr.itervalues())
|
||||
if values != []:
|
||||
self.forced_choice_option.setdefault(elt.name, {})[attr.name] = values
|
||||
|
||||
if attr.name == 'redefine':
|
||||
clstype = self.Redefinable
|
||||
if attr.name == 'name' and forced_text_elt is True:
|
||||
self.forced_text_elts.add(elt.name)
|
||||
forced_text_elt = False
|
||||
|
||||
if set(attr.itervalues()) == set(['True', 'False']):
|
||||
self.booleans_attributs.append(attr.name)
|
||||
|
||||
if forced_text_elt is True:
|
||||
self.forced_text_elts_as_name.add(elt.name)
|
||||
|
||||
if elt.name in FORCE_REDEFINABLES:
|
||||
clstype = self.Redefinable
|
||||
elif elt.name in FORCE_UNREDEFINABLES:
|
||||
clstype = self.UnRedefinable
|
||||
elif atomic:
|
||||
clstype = self.Atom
|
||||
|
||||
# Creole ObjectSpace class types, it enables us to create objects like:
|
||||
# Service_restriction(), Ip(), Interface(), Host(), Fstab(), Package(), Disknod(),
|
||||
# File(), Variables(), Family(), Variable(), Separators(), Separator(), Value(),
|
||||
# Constraints()... and so on. Creole ObjectSpace is an object's reflexion of
|
||||
# the XML elements
|
||||
setattr(self, elt.name, type(elt.name.capitalize(), (clstype,), attrs))
|
||||
|
||||
def parse_dtd_right_left_elt(self, elt):
|
||||
if elt.right.type == 'or':
|
||||
self.container_elt_attr_list.append(elt.right.left.name)
|
||||
self.parse_dtd_right_left_elt(elt.right)
|
||||
else:
|
||||
self.container_elt_attr_list.append(elt.right.name)
|
||||
|
||||
def _convert_boolean(self, value): # pylint: disable=R0201
|
||||
"""Boolean coercion. The Creole XML may contain srings like `True` or `False`
|
||||
"""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if value == 'True':
|
||||
return True
|
||||
elif value == 'False':
|
||||
return False
|
||||
else:
|
||||
raise TypeError(_('{} is not True or False').format(value).encode('utf8')) # pragma: no cover
|
||||
|
||||
def _is_already_exists(self, name, space, child, namespace):
|
||||
if isinstance(space, self.family): # pylint: disable=E1101
|
||||
if namespace != 'creole':
|
||||
name = space.path + '.' + name
|
||||
return self.paths.path_is_defined(name)
|
||||
if child.tag in ['family', 'family_action']:
|
||||
norm_name = normalize_family(name)
|
||||
else:
|
||||
norm_name = name
|
||||
return norm_name in getattr(space, child.tag, {})
|
||||
|
||||
def _translate_in_space(self, name, family, variable, namespace):
|
||||
if not isinstance(family, self.family): # pylint: disable=E1101
|
||||
if variable.tag in ['family', 'family_action']:
|
||||
norm_name = normalize_family(name)
|
||||
else:
|
||||
norm_name = name
|
||||
return getattr(family, variable.tag)[norm_name]
|
||||
if namespace == 'creole':
|
||||
path = name
|
||||
else:
|
||||
path = family.path + '.' + name
|
||||
old_family_name = self.paths.get_variable_family_name(path)
|
||||
if normalize_family(family.name) == old_family_name:
|
||||
return getattr(family, variable.tag)[name]
|
||||
old_family = self.space.variables['creole'].family[old_family_name] # pylint: disable=E1101
|
||||
variable_obj = old_family.variable[name]
|
||||
del old_family.variable[name]
|
||||
if 'variable' not in vars(family):
|
||||
family.variable = OrderedDict()
|
||||
family.variable[name] = variable_obj
|
||||
self.paths.append('variable', name, namespace, family.name, variable_obj)
|
||||
return variable_obj
|
||||
|
||||
def remove_check(self, name): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'constraints') and hasattr(self.space.constraints, 'check'):
|
||||
remove_checks = []
|
||||
for idx, check in enumerate(self.space.constraints.check): # pylint: disable=E1101
|
||||
if hasattr(check, 'target') and check.target == name:
|
||||
remove_checks.append(idx)
|
||||
|
||||
remove_checks = list(set(remove_checks))
|
||||
remove_checks.sort(reverse=True)
|
||||
for idx in remove_checks:
|
||||
self.space.constraints.check.pop(idx) # pylint: disable=E1101
|
||||
def remove_condition(self, name): # pylint: disable=C0111
|
||||
for idx, condition in enumerate(self.space.constraints.condition): # pylint: disable=E1101
|
||||
remove_targets = []
|
||||
if hasattr(condition, 'target'):
|
||||
for target_idx, target in enumerate(condition.target):
|
||||
if target.name == name:
|
||||
remove_targets.append(target_idx)
|
||||
remove_targets = list(set(remove_targets))
|
||||
remove_targets.sort(reverse=True)
|
||||
for idx in remove_targets:
|
||||
del condition.target[idx]
|
||||
|
||||
def create_or_update_space_object(self, subspace, space, child, namespace):
|
||||
"""Creates or retrieves the space object that corresponds
|
||||
to the `child` XML object
|
||||
|
||||
Two attributes of the `child` XML object are important:
|
||||
|
||||
- with the `redefine` boolean flag attribute we know whether
|
||||
the corresponding space object shall be created or updated
|
||||
|
||||
- `True` means that the corresponding space object shall be updated
|
||||
- `False` means that the corresponding space object shall be created
|
||||
|
||||
- with the `exists` boolean flag attribute we know whether
|
||||
the corresponding space object shall be created
|
||||
(or nothing -- that is the space object isn't modified)
|
||||
|
||||
- `True` means that the corresponding space object shall be created
|
||||
- `False` means that the corresponding space object is not updated
|
||||
|
||||
In the special case `redefine` is True and `exists` is False,
|
||||
we create the corresponding space object if it doesn't exist
|
||||
and we update it if it exists.
|
||||
|
||||
:return: the corresponding space object of the `child` XML object
|
||||
"""
|
||||
if child.tag in self.forced_text_elts_as_name:
|
||||
name = child.text
|
||||
else:
|
||||
name = subspace['name']
|
||||
if self._is_already_exists(name, space, child, namespace):
|
||||
if child.tag in FORCE_REDEFINABLES:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', True))
|
||||
else:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', False))
|
||||
exists = self._convert_boolean(subspace.get('exists', True))
|
||||
if redefine is True:
|
||||
return self._translate_in_space(name, space, child, namespace)
|
||||
elif exists is False:
|
||||
raise SpaceObjShallNotBeUpdated()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Already present in another XML file, {} '
|
||||
'cannot be re-created').format(name).encode('utf8'))
|
||||
else:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', False))
|
||||
exists = self._convert_boolean(subspace.get('exists', False))
|
||||
if redefine is False or exists is True:
|
||||
return getattr(self, child.tag)()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Redefined object: '
|
||||
'{} does not exist yet').format(name).encode('utf8'))
|
||||
|
||||
def generate_creoleobj(self, child, space, namespace):
|
||||
"""
|
||||
instanciates or creates Creole Object Subspace objects
|
||||
"""
|
||||
if issubclass(getattr(self, child.tag), self.Redefinable):
|
||||
creoleobj = self.create_or_update_space_object(child.attrib, space, child, namespace)
|
||||
else:
|
||||
# instanciates an object from the CreoleObjSpace's builtins types
|
||||
# example : child.tag = constraints -> a self.Constraints() object is created
|
||||
creoleobj = getattr(self, child.tag)()
|
||||
# this Atom instance has to be a singleton here
|
||||
# we do not re-create it, we reuse it
|
||||
if isinstance(creoleobj, self.Atom) and child.tag in vars(space):
|
||||
creoleobj = getattr(space, child.tag)
|
||||
self.create_tree_structure(space, child, creoleobj)
|
||||
return creoleobj
|
||||
|
||||
def create_tree_structure(self, space, child, creoleobj): # pylint: disable=R0201
|
||||
"""
|
||||
Builds the tree structure of the object space here
|
||||
we set containers attributes in order to be populated later on
|
||||
for example::
|
||||
|
||||
space = Family()
|
||||
space.variable = OrderedDict()
|
||||
another example:
|
||||
space = Variable()
|
||||
space.value = list()
|
||||
"""
|
||||
if child.tag not in vars(space):
|
||||
if isinstance(creoleobj, self.Redefinable):
|
||||
setattr(space, child.tag, OrderedDict())
|
||||
elif isinstance(creoleobj, self.UnRedefinable):
|
||||
setattr(space, child.tag, [])
|
||||
elif isinstance(creoleobj, self.Atom):
|
||||
pass
|
||||
else: # pragma: no cover
|
||||
raise CreoleOperationError(_("Creole object {} "
|
||||
"has a wrong type").format(type(creoleobj)))
|
||||
|
||||
def _add_to_tree_structure(self, creoleobj, space, child): # pylint: disable=R0201
|
||||
if isinstance(creoleobj, self.Redefinable):
|
||||
name = creoleobj.name
|
||||
if child.tag == 'family' or child.tag == 'family_action':
|
||||
name = normalize_family(name)
|
||||
getattr(space, child.tag)[name] = creoleobj
|
||||
elif isinstance(creoleobj, self.UnRedefinable):
|
||||
getattr(space, child.tag).append(creoleobj)
|
||||
else:
|
||||
setattr(space, child.tag, creoleobj)
|
||||
|
||||
def _set_text_to_obj(self, child, creoleobj):
|
||||
if child.text is None:
|
||||
text = None
|
||||
else:
|
||||
text = child.text.strip()
|
||||
if text:
|
||||
if child.tag in self.forced_text_elts_as_name:
|
||||
creoleobj.name = text
|
||||
else:
|
||||
creoleobj.text = text
|
||||
|
||||
def _set_xml_attributes_to_obj(self, child, creoleobj):
|
||||
redefine = self._convert_boolean(child.attrib.get('redefine', False))
|
||||
has_value = hasattr(creoleobj, 'value')
|
||||
if HIGH_COMPATIBILITY and has_value:
|
||||
has_value = len(child) != 1 or child[0].text != None
|
||||
if (redefine is True and child.tag == 'variable' and has_value
|
||||
and len(child) != 0):
|
||||
del creoleobj.value
|
||||
for attr, val in child.attrib.items():
|
||||
if redefine and attr in UNREDEFINABLE:
|
||||
# UNREDEFINABLE concerns only 'variable' node so we can fix name
|
||||
# to child.attrib['name']
|
||||
name = child.attrib['name']
|
||||
raise CreoleDictConsistencyError(_("cannot redefine attribute {} for variable {}").format(attr, name).encode('utf8'))
|
||||
if isinstance(getattr(creoleobj, attr, None), bool):
|
||||
if val == 'False':
|
||||
val = False
|
||||
elif val == 'True':
|
||||
val = True
|
||||
else: # pragma: no cover
|
||||
raise CreoleOperationError(_('value for {} must be True or False, '
|
||||
'not {}').format(attr, val).encode('utf8'))
|
||||
if not (attr == 'name' and getattr(creoleobj, 'name', None) != None):
|
||||
setattr(creoleobj, attr, val)
|
||||
|
||||
def _creoleobj_tree_visitor(self, child, creoleobj, namespace):
|
||||
"""Creole object tree manipulations
|
||||
"""
|
||||
if child.tag == 'variable' and child.attrib.get('remove_check', False):
|
||||
self.remove_check(creoleobj.name)
|
||||
if child.tag == 'variable' and child.attrib.get('remove_condition', False):
|
||||
self.remove_condition(creoleobj.name)
|
||||
if child.tag in ['auto', 'fill', 'check']:
|
||||
variable_name = child.attrib['target']
|
||||
# XXX not working with variable not in creole and in master/slave
|
||||
if variable_name in self.redefine_variables:
|
||||
creoleobj.redefine = True
|
||||
else:
|
||||
creoleobj.redefine = False
|
||||
if not hasattr(creoleobj, 'index'):
|
||||
creoleobj.index = self.index
|
||||
if child.tag in ['auto', 'fill', 'condition', 'check', 'action']:
|
||||
creoleobj.namespace = namespace
|
||||
|
||||
def xml_parse_document(self, document, space, namespace, is_in_family=False):
|
||||
"""Parses a Creole XML file
|
||||
populates the CreoleObjSpace
|
||||
"""
|
||||
family_names = []
|
||||
for child in document:
|
||||
# this index enables us to reorder the 'fill' and 'auto' objects
|
||||
self.index += 1
|
||||
# doesn't proceed the XML commentaries
|
||||
if not isinstance(child.tag, str):
|
||||
continue
|
||||
if child.tag == 'family':
|
||||
is_in_family = True
|
||||
if child.attrib['name'] in family_names:
|
||||
raise CreoleDictConsistencyError(_('Family {} is set several times').format(child.attrib['name']).encode('utf8'))
|
||||
family_names.append(child.attrib['name'])
|
||||
if child.tag == 'variables':
|
||||
child.attrib['name'] = namespace
|
||||
if HIGH_COMPATIBILITY and child.tag == 'value' and child.text == None:
|
||||
continue
|
||||
# creole objects creation
|
||||
try:
|
||||
creoleobj = self.generate_creoleobj(child, space, namespace)
|
||||
except SpaceObjShallNotBeUpdated:
|
||||
continue
|
||||
self._set_text_to_obj(child, creoleobj)
|
||||
self._set_xml_attributes_to_obj(child, creoleobj)
|
||||
self._creoleobj_tree_visitor(child, creoleobj, namespace)
|
||||
self._fill_creoleobj_path_attribute(space, child, namespace, document, creoleobj)
|
||||
self._add_to_tree_structure(creoleobj, space, child)
|
||||
if list(child) != []:
|
||||
self.xml_parse_document(child, creoleobj, namespace, is_in_family)
|
||||
|
||||
def _fill_creoleobj_path_attribute(self, space, child, namespace, document, creoleobj): # pylint: disable=R0913
|
||||
"""Fill self.paths attributes
|
||||
"""
|
||||
if not isinstance(space, self.help): # pylint: disable=E1101
|
||||
if child.tag == 'variable':
|
||||
family_name = normalize_family(document.attrib['name'])
|
||||
self.paths.append('variable', child.attrib['name'], namespace, family_name,
|
||||
creoleobj)
|
||||
if child.attrib.get('redefine', 'False') == 'True':
|
||||
if namespace == 'creole':
|
||||
self.redefine_variables.append(child.attrib['name'])
|
||||
else:
|
||||
self.redefine_variables.append(namespace + '.' + family_name + '.' +
|
||||
child.attrib['name'])
|
||||
|
||||
if child.tag == 'family':
|
||||
family_name = normalize_family(child.attrib['name'])
|
||||
if namespace != 'creole':
|
||||
family_name = namespace + '.' + family_name
|
||||
self.paths.append('family', family_name, namespace, creoleobj=creoleobj)
|
||||
creoleobj.path = self.paths.get_family_path(family_name, namespace)
|
||||
|
||||
def create_or_populate_from_xml(self, namespace, xmlfolders, from_zephir=None):
|
||||
"""Parses a bunch of XML files
|
||||
populates the CreoleObjSpace
|
||||
"""
|
||||
documents = self.xmlreflector.load_xml_from_folders(xmlfolders, from_zephir)
|
||||
for xmlfile, document in documents:
|
||||
try:
|
||||
self.redefine_variables = []
|
||||
self.xml_parse_document(document, self.space, namespace)
|
||||
except Exception as err:
|
||||
#print(_('error in XML file {}').format(xmlfile))
|
||||
raise err
|
||||
|
||||
def populate_from_zephir(self, namespace, xmlfile):
|
||||
self.redefine_variables = []
|
||||
document = self.xmlreflector.parse_xmlfile(xmlfile, from_zephir=True, zephir2=True)
|
||||
self.xml_parse_document(document, self.space, namespace)
|
||||
|
||||
def space_visitor(self, eosfunc_file): # pylint: disable=C0111
|
||||
ActionAnnotator(self.space, self.paths, self)
|
||||
ContainerAnnotator(self.space, self.paths, self)
|
||||
SpaceAnnotator(self.space, self.paths, self, eosfunc_file)
|
||||
|
||||
def save(self, filename, force_no_save=False):
|
||||
"""Save an XML output on disk
|
||||
|
||||
:param filename: the full XML filename
|
||||
"""
|
||||
xml = Element('creole')
|
||||
self._xml_export(xml, self.space)
|
||||
if not force_no_save:
|
||||
self.xmlreflector.save_xmlfile(filename, xml)
|
||||
return xml
|
||||
|
||||
def save_probes(self, filename, force_no_save=False):
|
||||
"""Save an XML output on disk
|
||||
|
||||
:param filename: the full XML filename
|
||||
"""
|
||||
ret = {}
|
||||
for variable in self.probe_variables:
|
||||
args = []
|
||||
kwargs = {}
|
||||
if hasattr(variable, 'param'):
|
||||
for param in variable.param:
|
||||
list_param = list(vars(param).keys())
|
||||
if 'index' in list_param:
|
||||
list_param.remove('index')
|
||||
if list_param == ['text']:
|
||||
args.append(param.text)
|
||||
elif list_param == ['text', 'name']:
|
||||
kwargs[param.name] = param.text
|
||||
else:
|
||||
print(vars(param))
|
||||
raise Exception('hu?')
|
||||
ret[variable.target] = {'function': variable.name,
|
||||
'args': args,
|
||||
'kwargs': kwargs}
|
||||
if not force_no_save:
|
||||
with open(filename, 'w') as fhj:
|
||||
dump(ret, fhj)
|
||||
return ret
|
||||
|
||||
def _get_attributes(self, space): # pylint: disable=R0201
|
||||
for attr in dir(space):
|
||||
if not attr.startswith('_'):
|
||||
yield attr
|
||||
|
||||
def _sub_xml_export(self, name, node, node_name, space):
|
||||
if isinstance(space, dict):
|
||||
space = list(space.values())
|
||||
if isinstance(space, list):
|
||||
for subspace in space:
|
||||
if isinstance(subspace, self.Master):
|
||||
_name = 'master'
|
||||
subspace.doc = subspace.variable[0].description
|
||||
#subspace.doc = 'Master {}'.format(subspace.name)
|
||||
else:
|
||||
_name = name
|
||||
if name in ['containers', 'variables', 'actions']:
|
||||
_name = 'family'
|
||||
if HIGH_COMPATIBILITY and not hasattr(subspace, 'doc'):
|
||||
subspace.doc = ''
|
||||
if _name == 'value' and (not hasattr(subspace, 'name') or subspace.name is None):
|
||||
continue
|
||||
child_node = SubElement(node, _name)
|
||||
self._xml_export(child_node, subspace, _name)
|
||||
elif isinstance(space, self.Atom):
|
||||
if name == 'containers':
|
||||
child_node = SubElement(node, 'family')
|
||||
child_node.attrib['name'] = name
|
||||
else:
|
||||
child_node = SubElement(node, name)
|
||||
for subname in self._get_attributes(space):
|
||||
subspace = getattr(space, subname)
|
||||
self._sub_xml_export(subname, child_node, name, subspace)
|
||||
elif isinstance(space, self.Redefinable):
|
||||
child_node = SubElement(node, 'family')
|
||||
child_node.attrib['name'] = name
|
||||
for subname in self._get_attributes(space):
|
||||
subspace = getattr(space, subname)
|
||||
self._sub_xml_export(subname, child_node, name, subspace)
|
||||
else:
|
||||
if name in PROPERTIES and node.tag == 'variable':
|
||||
if space is True:
|
||||
for prop in CONVERT_PROPERTIES.get(name, [name]):
|
||||
if sys.version_info[0] < 3:
|
||||
SubElement(node, 'property').text = unicode(prop)
|
||||
else:
|
||||
SubElement(node, 'property').text = prop
|
||||
|
||||
elif name not in ERASED_ATTRIBUTES:
|
||||
if name == 'name' and node_name in self.forced_text_elts_as_name:
|
||||
if sys.version_info[0] < 3 and isinstance(space, unicode):
|
||||
node.text = space
|
||||
elif isinstance(space, str):
|
||||
if sys.version_info[0] < 3:
|
||||
node.text = space.decode('utf8')
|
||||
else:
|
||||
node.text = space
|
||||
else:
|
||||
node.text = str(space)
|
||||
elif name == 'text' and node_name in self.forced_text_elts:
|
||||
node.text = space
|
||||
elif node.tag == 'family' and name == 'name':
|
||||
if 'doc' not in node.attrib.keys():
|
||||
node.attrib['doc'] = space
|
||||
node.attrib['name'] = normalize_family(space, check_name=False)
|
||||
elif node.tag == 'variable' and name == 'mode':
|
||||
if space is not None:
|
||||
SubElement(node, 'property').text = space
|
||||
else:
|
||||
if name in RENAME_ATTIBUTES:
|
||||
name = RENAME_ATTIBUTES[name]
|
||||
if space is not None:
|
||||
if sys.version_info[0] < 3:
|
||||
node.attrib[name] = unicode(space)
|
||||
else:
|
||||
node.attrib[name] = str(space)
|
||||
|
||||
def _xml_export(self, node, space, node_name='creole'):
|
||||
for name in self._get_attributes(space):
|
||||
subspace = getattr(space, name)
|
||||
self._sub_xml_export(name, node, node_name, subspace)
|
||||
|
||||
|
||||
class Path(object):
|
||||
"""Helper class to handle the `path` attribute of a CreoleObjSpace
|
||||
instance.
|
||||
|
||||
sample: path="creole.general.condition"
|
||||
"""
|
||||
def __init__(self):
|
||||
self.variables = {}
|
||||
self.families = {}
|
||||
|
||||
def append(self, pathtype, name, namespace, family=None, creoleobj=None): # pylint: disable=C0111
|
||||
if pathtype == 'family':
|
||||
self.families[name] = dict(name=name, namespace=namespace, creoleobj=creoleobj)
|
||||
elif pathtype == 'variable':
|
||||
if namespace == 'creole':
|
||||
varname = name
|
||||
else:
|
||||
if '.' in name:
|
||||
varname = name
|
||||
else:
|
||||
varname = '.'.join([namespace, family, name])
|
||||
self.variables[varname] = dict(name=name, family=family, namespace=namespace,
|
||||
master=None, creoleobj=creoleobj)
|
||||
else: # pragma: no cover
|
||||
raise Exception('unknown pathtype {}'.format(pathtype))
|
||||
|
||||
def get_family_path(self, name, current_namespace): # pylint: disable=C0111
|
||||
if current_namespace is None: # pragma: no cover
|
||||
raise CreoleOperationError('current_namespace must not be None')
|
||||
dico = self.families[normalize_family(name, check_name=False)]
|
||||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A family located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
path = dico['name']
|
||||
if dico['namespace'] is not None and '.' not in dico['name']:
|
||||
path = '.'.join([dico['namespace'], path])
|
||||
return path
|
||||
|
||||
def get_family_namespace(self, name): # pylint: disable=C0111
|
||||
dico = self.families[name]
|
||||
if dico['namespace'] is None:
|
||||
return dico['name']
|
||||
return dico['namespace']
|
||||
|
||||
def get_family_obj(self, name): # pylint: disable=C0111
|
||||
if name not in self.families:
|
||||
raise CreoleDictConsistencyError(_('unknown family {}').format(name).encode('utf8'))
|
||||
dico = self.families[name]
|
||||
return dico['creoleobj']
|
||||
|
||||
def get_variable_name(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['name']
|
||||
|
||||
def get_variable_obj(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['creoleobj']
|
||||
|
||||
def get_variable_family_name(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['family']
|
||||
|
||||
def get_variable_family_path(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def get_variable_namespace(self, name): # pylint: disable=C0111
|
||||
return self._get_variable(name)['namespace']
|
||||
|
||||
def get_variable_path(self, name, current_namespace, allow_source=False): # pylint: disable=C0111
|
||||
if current_namespace is None: # pragma: no cover
|
||||
raise CreoleOperationError('current_namespace must not be None')
|
||||
dico = self._get_variable(name)
|
||||
if not allow_source:
|
||||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A variable located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
if '.' in dico['name']:
|
||||
return dico['name']
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
list_path.append(dico['name'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def path_is_defined(self, name): # pylint: disable=C0111
|
||||
return name in self.variables
|
||||
|
||||
def set_master(self, name, master): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
namespace = dico['namespace']
|
||||
if dico['master'] != None:
|
||||
raise CreoleDictConsistencyError(_('Already defined master {} for variable'
|
||||
' {}'.format(dico['master'], name)).encode('utf8'))
|
||||
dico['master'] = master
|
||||
if namespace != 'creole':
|
||||
new_path = self.get_variable_path(name, namespace)
|
||||
self.append('variable', new_path, namespace, family=dico['family'], creoleobj=dico['creoleobj'])
|
||||
self.variables[new_path]['master'] = master
|
||||
del self.variables[name]
|
||||
|
||||
def _get_variable(self, name):
|
||||
if name not in self.variables:
|
||||
if name.startswith('creole.'):
|
||||
raise CreoleDictConsistencyError(
|
||||
_("don't set full path variable in creole's namespace "
|
||||
"(set '{}' not '{}')").format(name.split('.')[-1], name).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_('unknown option {}').format(name).encode('utf8'))
|
||||
return self.variables[name]
|
||||
|
||||
def get_master(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['master']
|
|
@ -0,0 +1,1006 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Apply configuration of EOLE servers.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import time
|
||||
import shutil
|
||||
|
||||
from glob import glob
|
||||
|
||||
import spwd
|
||||
import getpass
|
||||
from itertools import count
|
||||
|
||||
from pyeole.log import getLogger
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.log import set_formatter
|
||||
from pyeole.log import set_filters
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.lock import acquire, release
|
||||
from pyeole import process
|
||||
from pyeole.schedule import display_schedules, apply_schedules
|
||||
from pyeole import ihm
|
||||
from pyeole.pkg import report, EolePkg, _configure_sources_mirror, _MIRROR_DIST
|
||||
from pyeole.pkg import PackageNotFoundError, RepositoryError, AptProxyError, AptCacherError
|
||||
from pyeole.service import manage_service, unmanaged_service, manage_services, \
|
||||
ServiceError
|
||||
from pyeole.encode import normalize
|
||||
from pyeole.diagnose.diagnose import MAJ_SUCCES_LOCK
|
||||
|
||||
from .error import FileNotFound, LockError, UnlockError
|
||||
from .error import UserExit, UserExitError
|
||||
from .error import VirtError
|
||||
from .client import CreoleClient, CreoleClientError, NotFoundError
|
||||
import fonctionseole, template, cert
|
||||
from .eosfunc import is_instanciate
|
||||
from .config import configeol, INSTANCE_LOCKFILE, UPGRADE_LOCKFILE, \
|
||||
container_instance_lockfile, gen_conteneurs_needed, VIRTROOT, charset
|
||||
from .containers import is_lxc_enabled, is_lxc_running, is_lxc_started, \
|
||||
generate_lxc_container, create_mount_point, lxc_need_restart
|
||||
from .error import NetworkConfigError
|
||||
|
||||
from pyeole.i18n import i18n
|
||||
_ = i18n('creole')
|
||||
|
||||
try:
|
||||
from zephir.lib_zephir import lock, unlock
|
||||
zephir_libs = True
|
||||
except Exception:
|
||||
zephir_libs = False
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
global PKGMGR
|
||||
PKGMGR = None
|
||||
|
||||
error_msg_documentation = _(u"""For more informations, read section
|
||||
'Mise en œuvre des modules EOLE' in module documentation or
|
||||
common documentation.""")
|
||||
def load_pkgmgr():
|
||||
global PKGMGR
|
||||
if PKGMGR is None:
|
||||
cache()
|
||||
PKGMGR = EolePkg('apt', container_mode=CACHE['is_lxc_enabled'])
|
||||
PKGMGR.pkgmgr.groups = CACHE
|
||||
PKGMGR.pkgmgr._load_apt_cache()
|
||||
eoles = []
|
||||
for eole in client.get_creole(u'serveur_maj'):
|
||||
eoles.append('http://{0}/eole/'.format(eole))
|
||||
ubuntus = []
|
||||
for ubuntu in client.get_creole(u'ubuntu_update_mirrors'):
|
||||
ubuntus.append('http://{0}/ubuntu/'.format(ubuntu))
|
||||
envoles = []
|
||||
try:
|
||||
for envole in client.get_creole(u'envole_update_mirrors'):
|
||||
envoles.append('http://{0}/envole/'.format(envole))
|
||||
except NotFoundError:
|
||||
pass
|
||||
for cache_ in PKGMGR.pkgmgr.cache._list.list:
|
||||
if cache_.uri in eoles:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['EOLE'])
|
||||
eoles = []
|
||||
if cache_.uri in ubuntus:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['Ubuntu'])
|
||||
ubuntus = []
|
||||
if cache_.uri in envoles:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['Envole'])
|
||||
envoles = []
|
||||
fonctionseole.PkgManager = PKGMGR
|
||||
|
||||
_LOGFILENAME = '/var/log/reconfigure.log'
|
||||
|
||||
# Command line options
|
||||
class Option:
|
||||
"""Manage commande line options with defaults
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
self.parser = argparse.ArgumentParser(
|
||||
description=_(u"Applying EOLE configuration."),
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging(level='info')])
|
||||
self.parser.add_argument('-i', '--interactive', action='store_true',
|
||||
help=_(u"leave process in interactive mode"))
|
||||
self.parser.add_argument('-f', '--force', action='store_true',
|
||||
help=_(u"override Zéphir lock"))
|
||||
self.parser.add_argument('-a', '--auto', action='store_true',
|
||||
help=_(u"automatic reboot if necessary"))
|
||||
self.__opts = self.parser.parse_args([])
|
||||
|
||||
def update_from_cmdline(self, force_args=None, force_options=None):
|
||||
"""Parse command line
|
||||
"""
|
||||
self.__opts = self.parser.parse_args(force_args)
|
||||
if self.__opts.verbose:
|
||||
self.__opts.log_level = 'info'
|
||||
if self.__opts.debug:
|
||||
self.__opts.log_level = 'debug'
|
||||
if force_options is not None:
|
||||
for key, value in force_options.items():
|
||||
setattr(self.__opts, key, value)
|
||||
self.__dict__.update(self.__opts.__dict__)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in ['__opts', 'update_from_cmdline']:
|
||||
return self.__dict__[name]
|
||||
else:
|
||||
return getattr(self.__opts, name)
|
||||
|
||||
options = Option()
|
||||
|
||||
# To use log from every functions
|
||||
log = getLogger(__name__)
|
||||
|
||||
# Same name for instance and reconfigure
|
||||
LOCK_NAME = u'reconfigure'
|
||||
|
||||
# Run scripts in directories
|
||||
RUNPARTS_PATH = u'/usr/share/eole'
|
||||
RUNPARTS_CMD = u'/bin/run-parts --exit-on-error -v {directory} --arg {compat} 2>&1'
|
||||
|
||||
# Compatibility
|
||||
COMPAT_NAME = u'reconfigure'
|
||||
|
||||
#def parse_cmdline():
|
||||
# """Parse command line
|
||||
# """
|
||||
# descr = u"Application de la configuration EOLE"
|
||||
# parser = argparse.ArgumentParser(description=descr,
|
||||
# parents=[scriptargs.container(),
|
||||
# scriptargs.logging(level='info')])
|
||||
# parser.add_argument('-i', '--interactive', action='store_true',
|
||||
# help=u"lancer le processus en mode interactif")
|
||||
# parser.add_argument('-f', '--force', action='store_true',
|
||||
# help=u"force l'action même s'il existe des verrous")
|
||||
# parser.add_argument('-a', '--auto', action='store_true',
|
||||
# help=u"redémarrage automatique si nécessaire")
|
||||
#
|
||||
# opts = parser.parse_args()
|
||||
# if opts.verbose:
|
||||
# opts.log_level = 'info'
|
||||
# if opts.debug:
|
||||
# opts.log_level = 'debug'
|
||||
# return opts
|
||||
|
||||
def copyDirectoryContent(src, dst):
|
||||
for fic in os.listdir(src):
|
||||
# Skip links or we ovewrite existing certificates
|
||||
if os.path.islink(os.path.join(src, fic)):
|
||||
continue
|
||||
try:
|
||||
shutil.copy2(os.path.join(src, fic), dst)
|
||||
except shutil.Error, err:
|
||||
# ignore if files already exists
|
||||
pass
|
||||
|
||||
def user_exit(*args, **kwargs):
|
||||
"""
|
||||
sortie utilisateur "propre"
|
||||
"""
|
||||
log.warn(_(u'! Abandoning configuration !'))
|
||||
log.warn(_(u'System may be in an incoherent state.\n\n'))
|
||||
raise UserExitError()
|
||||
|
||||
def unlock_actions(need_lock=True):
|
||||
if zephir_libs:
|
||||
#FIXME: lock de Zephir !
|
||||
unlock('actions')
|
||||
try:
|
||||
release(LOCK_NAME, level='system')
|
||||
except Exception, err:
|
||||
# FIXME: move lock exception to pyeole.lock #7400
|
||||
if need_lock:
|
||||
raise UnlockError(str(err))
|
||||
|
||||
def lock_actions():
|
||||
try:
|
||||
acquire(LOCK_NAME, level="system")
|
||||
except Exception, err:
|
||||
# FIXME: move lock exception to pyeole.lock #7400
|
||||
raise LockError(str(err))
|
||||
if zephir_libs:
|
||||
#FIXME: lock de Zephir !
|
||||
lock('actions')
|
||||
|
||||
def reset_compat_name():
|
||||
"""
|
||||
Réinitialise le nom de la procédure en cours
|
||||
en fonction de l'environnement
|
||||
"""
|
||||
global COMPAT_NAME
|
||||
if options.interactive:
|
||||
COMPAT_NAME = u'instance'
|
||||
else:
|
||||
COMPAT_NAME = u'reconfigure'
|
||||
|
||||
def run_parts(directory):
|
||||
"""Run scripts in a directory
|
||||
|
||||
@param directory: name of a directory
|
||||
@type directory: C{str}
|
||||
"""
|
||||
dirpath = os.path.join(RUNPARTS_PATH, directory)
|
||||
if os.path.isdir(dirpath):
|
||||
ihm.print_title(_(u'Running scripts {0}').format(directory))
|
||||
code = os.system(RUNPARTS_CMD.format(directory=dirpath, compat=COMPAT_NAME))
|
||||
if code != 0:
|
||||
raise Exception(_(u'Error {0}').format(directory))
|
||||
|
||||
def restart_creoled():
|
||||
"""
|
||||
Restart creoled service and verify if the client is OK
|
||||
"""
|
||||
unmanaged_service(u'restart', u'creoled', u'service', display='console')
|
||||
try:
|
||||
client.get_creole(u'eole_version')
|
||||
except CreoleClientError:
|
||||
msg = _(u"Please check creoled's log (/var/log/rsyslog/local/creoled/creoled.info.log)\nand restart service with command 'service creoled start'")
|
||||
raise CreoleClientError(msg)
|
||||
|
||||
def prepare(need_lock=True):
|
||||
"""Sanity checks.
|
||||
|
||||
"""
|
||||
global RUNPARTS_CMD
|
||||
# Clean exit
|
||||
if need_lock:
|
||||
ihm.catch_signal(user_exit)
|
||||
lock_actions()
|
||||
|
||||
if options.container != None:
|
||||
RUNPARTS_CMD += u" --regex '^[09][09]-{0}$'".format(options.container)
|
||||
|
||||
ihm.print_title(_(u"Preparation for {0}").format(COMPAT_NAME))
|
||||
|
||||
if not os.path.isfile(configeol):
|
||||
print _(u"Server is not configured.")
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
raise FileNotFound(_(u'Missing file {0}.').format(configeol))
|
||||
|
||||
display_info = False
|
||||
|
||||
if not options.interactive and (is_instanciate() == 'non' or os.path.isfile(UPGRADE_LOCKFILE)):
|
||||
ihm.print_red(_(u"Server must be instantiated before any reconfiguration can occur."))
|
||||
display_info = True
|
||||
|
||||
if options.interactive and is_instanciate() == 'oui' and \
|
||||
not os.path.isfile(UPGRADE_LOCKFILE) and \
|
||||
not os.path.isfile(container_instance_lockfile):
|
||||
ihm.print_red(_(u"Server already instantiated."))
|
||||
print
|
||||
print _(u"To modify configuration parameter (e.g. IP address), use:")
|
||||
print _(u"'gen_config'")
|
||||
print _(u"then 'reconfigure' to apply changes.")
|
||||
display_info = True
|
||||
|
||||
if os.path.isfile(container_instance_lockfile) and not options.interactive:
|
||||
raise Exception(_('you have run gen_conteneurs, please use instance instead of reconfigure'))
|
||||
|
||||
if os.path.isfile(gen_conteneurs_needed):
|
||||
raise Exception(_('You have to run gen_conteneurs before instance'))
|
||||
|
||||
if display_info:
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
if not options.interactive:
|
||||
raise Exception(_(u"First instantiate server."))
|
||||
else:
|
||||
if ihm.prompt_boolean(_(u"Proceeding with instantiation ?"),
|
||||
interactive=options.interactive,
|
||||
default=False) is False:
|
||||
raise UserExit()
|
||||
else:
|
||||
fonctionseole.zephir("MSG", "Instance forcée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
# redémarrage du service creoled
|
||||
restart_creoled()
|
||||
|
||||
if fonctionseole.init_proc(COMPAT_NAME.upper()) == False and not options.force:
|
||||
log.warn(_(u"This process is blocked, contact Zéphir administrator."))
|
||||
if ihm.prompt_boolean(_(u"Force execution?"),
|
||||
interactive=options.interactive,
|
||||
default=False) is False:
|
||||
if not options.interactive:
|
||||
log.warn(_(u"Use -f option if you want to force execution"))
|
||||
raise UserExitError()
|
||||
else:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Instance forcée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
|
||||
def valid_mandatory(need_lock):
|
||||
try:
|
||||
client.valid_mandatory()
|
||||
except Exception, err:
|
||||
log.warn(_('Configuration validation problem, please check server configuration.'))
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
unlock_actions(need_lock)
|
||||
raise ValueError(str(err))
|
||||
|
||||
def _start_containers():
|
||||
""" Try to start containers and make sure they are started
|
||||
"""
|
||||
cache()
|
||||
for group_name in CACHE['groups_container']:
|
||||
group = CACHE['group_infos'][group_name]
|
||||
create_mount_point(group)
|
||||
|
||||
if os.access('/usr/share/eole/preservice/00-lxc-net', os.X_OK):
|
||||
log.debug("Override lxc-net systemd script")
|
||||
process.system_code(['/usr/share/eole/preservice/00-lxc-net'])
|
||||
|
||||
unmanaged_service(u'start', u'lxc-net', u'systemd', display='console', ctx=CACHE['group_infos']['root'])
|
||||
try:
|
||||
unmanaged_service(u'status', u'lxc', u'systemd')
|
||||
except ServiceError:
|
||||
unmanaged_service(u'start', u'lxc', u'systemd', display='console', ctx=CACHE['group_infos']['root'])
|
||||
#if lxc not started, do not wait for it
|
||||
#(we already waiting for it in systemd service)
|
||||
#if started, waiting for ssh access
|
||||
|
||||
max_try = 10
|
||||
for count in range(max_try):
|
||||
s_code, s_out, s_err = process.system_out(['lxc-ls', '--stopped'])
|
||||
stopped = s_out.split()
|
||||
f_code, f_out, f_err = process.system_out(['lxc-ls', '--frozen'])
|
||||
frozen = f_out.split()
|
||||
|
||||
if stopped or frozen:
|
||||
not_running = stopped + frozen
|
||||
else:
|
||||
# Everything is started by LXC
|
||||
# Are they reachable by SSH?
|
||||
not_running = []
|
||||
for group_name in CACHE['groups_container']:
|
||||
group_infos = CACHE['group_infos'][group_name]
|
||||
if not is_lxc_running(group_infos):
|
||||
not_running.append(group_name)
|
||||
|
||||
log.debug('Waiting 1 second for SSH access')
|
||||
time.sleep(1)
|
||||
|
||||
if not not_running:
|
||||
break
|
||||
|
||||
if stopped:
|
||||
for cont in stopped:
|
||||
log.debug('Manual start of stopped container “{0}”'.format(cont))
|
||||
process.system_out(['lxc-start', '--name', cont, '--daemon',
|
||||
'-o', '/var/log/lxc-{0}.log'.format(cont)])
|
||||
|
||||
if frozen:
|
||||
for cont in frozen:
|
||||
log.debug('Manual unfreeze of frozen container “{0}”'.format(cont))
|
||||
process.system_out(['lxc-unfreeze', '--name', cont,
|
||||
'-o', '/var/log/lxc-{0}.log'.format(cont)])
|
||||
|
||||
if not_running:
|
||||
waiting_for = ', '.join(not_running)
|
||||
msg = _(u'Unable to start LXC container : {0}',
|
||||
u'Unable to start LXC containers : {0}', len(not_running))
|
||||
raise VirtError(msg.format(waiting_for))
|
||||
|
||||
|
||||
def containers(minimal=False, log_=None):
|
||||
"""Generate containers
|
||||
"""
|
||||
if log_ is None:
|
||||
log_ = log
|
||||
VAR_LXC='/var/lib/lxc'
|
||||
OPT_LXC='/opt/lxc'
|
||||
|
||||
cache()
|
||||
if not CACHE['is_lxc_enabled']:
|
||||
log.debug(_(u'Container mode is disabled.'))
|
||||
return True
|
||||
if not options.interactive:
|
||||
for group in CACHE['groups_container']:
|
||||
if not os.path.isdir(os.path.join(VIRTROOT, group)):
|
||||
raise Exception(_(u'container {0} does not already exist, please use gen_conteneurs to create this container').format(group))
|
||||
else:
|
||||
# make /var/lib/lxc -> /opt/lxc
|
||||
if os.path.isdir(VAR_LXC) and not os.path.exists(OPT_LXC):
|
||||
ihm.print_title(_(u"Setting up {0}").format(OPT_LXC))
|
||||
unmanaged_service(u'stop', u'lxc', u'systemd', display='console')
|
||||
unmanaged_service(u'stop', u'lxc-net', u'systemd', display='console')
|
||||
shutil.move(VAR_LXC, OPT_LXC)
|
||||
os.symlink(OPT_LXC, VAR_LXC)
|
||||
#first instance should be in minimal mode
|
||||
minimal = True
|
||||
|
||||
ihm.print_title(_(u'Generating containers'))
|
||||
|
||||
engine = template.CreoleTemplateEngine()
|
||||
rootctx = CACHE['group_infos']['root']
|
||||
if minimal:
|
||||
# inject var _minimal_mode in creole's vars that can be used in template
|
||||
engine.creole_variables_dict['_minimal_mode'] = True
|
||||
engine.instance_file(u'/etc/ssh/ssh_config', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/lxc/default.conf', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/dnsmasq.d/lxc', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/default/lxc-net', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/apt/apt.conf.d/02eoleproxy', ctx=rootctx)
|
||||
if CACHE['module_instancie'] == 'oui':
|
||||
engine.instance_file(u'/etc/resolv.conf', ctx=rootctx)
|
||||
|
||||
load_pkgmgr()
|
||||
PKGMGR.pkgmgr._prepare_cache()
|
||||
for group in CACHE['groups_container']:
|
||||
generate_lxc_container(group)
|
||||
groupctx = CACHE['group_infos'][group]
|
||||
if minimal:
|
||||
engine.instance_file(u'../fstab', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'../config', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'../devices.hook', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/network/interfaces', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/apt/apt.conf.d/02eoleproxy', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/ssh/sshd_config', container=group, ctx=groupctx)
|
||||
if CACHE['module_instancie'] == 'oui':
|
||||
container_path = os.path.join(groupctx['path'], 'etc/resolv.conf')
|
||||
if os.path.islink(container_path):
|
||||
os.remove(container_path)
|
||||
engine.instance_file(u'/etc/resolv.conf', container=group, ctx=groupctx)
|
||||
PKGMGR.pkgmgr._umount_cdrom()
|
||||
|
||||
ihm.print_title(_(u'Starting containers'))
|
||||
_start_containers()
|
||||
|
||||
def remove_packages():
|
||||
""" Remove packages listed in /usr/share/eole/remove.d/ files
|
||||
param: repo: EoleApt Object
|
||||
"""
|
||||
torm_conf = glob(u'/usr/share/eole/remove.d/*.conf')
|
||||
pkg_list = []
|
||||
for config in torm_conf:
|
||||
try:
|
||||
f_h = open(config, 'r')
|
||||
for line in f_h.readlines():
|
||||
pkg_list.append(line.strip('\n'))
|
||||
f_h.close()
|
||||
except IOError, err:
|
||||
log.error(_(u'Can not read file {0}: {1}').format(config, err))
|
||||
|
||||
try:
|
||||
load_pkgmgr()
|
||||
except (RepositoryError, AptProxyError, AptCacherError), err:
|
||||
pass
|
||||
|
||||
kernels = fonctionseole.get_kernel_to_remove()
|
||||
|
||||
if kernels:
|
||||
ihm.print_line(_(u"Removing old linux kernels and associate headers."))
|
||||
pkg_list.extend(kernels)
|
||||
|
||||
if pkg_list != []:
|
||||
try:
|
||||
PKGMGR.remove(packages=pkg_list)
|
||||
except (PackageNotFoundError, SystemError), err:
|
||||
msg = _(u'Unable to remove some packages: {0}')
|
||||
log.warn(msg.format(err))
|
||||
log.warn(_(u"These packages will be removed next 'reconfigure'"))
|
||||
|
||||
|
||||
CACHE = {}
|
||||
def cache():
|
||||
global CACHE
|
||||
if not 'groups' in CACHE:
|
||||
CACHE['groups'] = client.get_groups()
|
||||
CACHE['groups_container'] = []
|
||||
for group in CACHE['groups']:
|
||||
if group not in ['root', 'all']:
|
||||
CACHE['groups_container'].append(group)
|
||||
CACHE['group_infos'] = {}
|
||||
for group_name in CACHE['groups']:
|
||||
group_infos = client.get_group_infos(group_name)
|
||||
CACHE['group_infos'][group_name] = group_infos
|
||||
CACHE['is_lxc_enabled'] = is_lxc_enabled()
|
||||
CACHE['module_instancie'] = client.get_creole('module_instancie')
|
||||
|
||||
|
||||
|
||||
def install_packages(silent=False):
|
||||
"""Install package for each container group
|
||||
"""
|
||||
load_pkgmgr()
|
||||
|
||||
cache()
|
||||
header = _(u'Checking Packages for container')
|
||||
for group_name, group_infos in CACHE['group_infos'].items():
|
||||
package_names = [pkg[u'name'] for pkg in group_infos[u'packages']]
|
||||
if package_names != []:
|
||||
msg = header + ' {0}: {1}'.format(group_name, ' '.join(package_names))
|
||||
ihm.print_line(msg)
|
||||
PKGMGR.install(packages=package_names,
|
||||
silent=silent,
|
||||
container=group_infos[u'name'])
|
||||
|
||||
|
||||
def packages():
|
||||
"""Manage packages
|
||||
"""
|
||||
ihm.print_title(_(u'Managing packages'))
|
||||
log.info(_(u' Removing packages'))
|
||||
ihm.print_line(_(u'Removing packages'))
|
||||
remove_packages()
|
||||
log.info(_(u' Installing packages'))
|
||||
ihm.print_line(_(u'Installing packages'))
|
||||
install_packages()
|
||||
|
||||
|
||||
def templates():
|
||||
"""Run pretemplate scripts and manage templates
|
||||
"""
|
||||
ihm.print_title(_(u'Generating configuration files'))
|
||||
log.info(_(u'Generating configuration files'))
|
||||
cache()
|
||||
try:
|
||||
tmpl = template.CreoleTemplateEngine()
|
||||
tmpl.instance_files(container=options.container, containers_ctx=CACHE['group_infos'].values())
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
raise err
|
||||
|
||||
|
||||
def services(action, display_title=True, try_restart_lxc=True):
|
||||
"""Manage services
|
||||
"""
|
||||
cache()
|
||||
exclude = None
|
||||
if action == u'stop':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Stopping services"))
|
||||
exclude = (('root', 'networking'),)
|
||||
elif action == u'start':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Starting services"))
|
||||
# ne pas demarrer le service certbot, c'est un service oneshot
|
||||
# et pyeole.service n'a pas l'air d'aimer ... #22092
|
||||
exclude = (('root', 'networking'), ('root', 'certbot'))
|
||||
ctx = CACHE['group_infos']['root']
|
||||
manage_services(action, u'networking', display='console', containers_ctx=[ctx])
|
||||
if try_restart_lxc and CACHE['is_lxc_enabled']:
|
||||
if lxc_need_restart():
|
||||
unmanaged_service(u'stop', u'lxc', u'systemd', display='console', ctx=ctx)
|
||||
unmanaged_service(u'stop', u'lxc-net', u'systemd', display='console', ctx=ctx)
|
||||
_start_containers()
|
||||
elif action == u'configure':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Configuring services"))
|
||||
else:
|
||||
raise ValueError(_(u"Unknown service action: {0}").format(action))
|
||||
if options.container is not None:
|
||||
containers_ctx = [CACHE['group_infos'][options.containers]]
|
||||
else:
|
||||
containers_ctx = CACHE['group_infos'].values()
|
||||
manage_services(action, container=options.container, display='console', exclude=exclude, containers_ctx=containers_ctx)
|
||||
|
||||
|
||||
def _gen_user_list():
|
||||
"""Generate list of users for password modification
|
||||
|
||||
Start with basic one and ask for supplementary users.
|
||||
"""
|
||||
yield 'root'
|
||||
|
||||
node = client.get_creole(u'activer_onenode', 'non')
|
||||
master = client.get_creole(u'activer_onesinglenode', 'non')
|
||||
if node == 'oui' and master == 'non':
|
||||
yield 'oneadmin'
|
||||
|
||||
for number in count(1):
|
||||
if number == 1:
|
||||
yield 'eole'
|
||||
else:
|
||||
yield 'eole{0}'.format(number)
|
||||
|
||||
|
||||
|
||||
def users():
|
||||
"""Manage users
|
||||
"""
|
||||
from passlib.context import CryptContext
|
||||
ihm.print_title(_(u'Managing system user accounts'))
|
||||
schemes = [u'sha512_crypt', u'sha256_crypt', u'sha1_crypt', u'md5_crypt']
|
||||
cryptctx = CryptContext(schemes=schemes)
|
||||
default_pass = {u'root': u'$eole&123456$',
|
||||
u'eole': u'$fpmf&123456$',
|
||||
u'oneadmin': u'$eole&123456$'}
|
||||
|
||||
if not options.interactive:
|
||||
log.debug(_(u'No system user account management in non-interactive mode.'))
|
||||
return
|
||||
|
||||
for user in _gen_user_list():
|
||||
try:
|
||||
user_infos = spwd.getspnam(user)
|
||||
except KeyError:
|
||||
if user == u'root':
|
||||
msg = _(u"'root' user unknown. This is abnormal.")
|
||||
raise Exception(msg)
|
||||
|
||||
# no new administrator with NFS (#16321)
|
||||
if user != 'eole' and client.get_creole(u'adresse_serveur_nfs', None) is not None:
|
||||
log.warn(_(u'No new EOLE account with /home on NFS'))
|
||||
break
|
||||
|
||||
prompt = _('Create new administrator user account {0}?')
|
||||
if user != 'eole' and ihm.prompt_boolean(prompt.format(user)) is False:
|
||||
break
|
||||
|
||||
msg = _(u"Creating unexistent user {0}")
|
||||
log.info(msg.format(user))
|
||||
|
||||
cmd = ['adduser', '--quiet', '--shell', '/usr/bin/manage-eole',
|
||||
'--gecos', '{0} user'.format(user.upper()),
|
||||
'--disabled-password', user]
|
||||
code = process.system_code(cmd)
|
||||
if code != 0:
|
||||
msg = _(u"Unable to create user {0}")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
cmd = ['usermod', '--append', '--groups', 'adm,mail', user]
|
||||
code, out, err = process.system_out(cmd)
|
||||
if code != 0:
|
||||
msg = _(u"Unable to add '{0}' to group 'adm'.")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
# Update informations
|
||||
user_infos = spwd.getspnam(user)
|
||||
|
||||
if user not in default_pass and user_infos.sp_pwd not in ['!', '*']:
|
||||
msg = _(u"No modification of password of administrator user account {0}.")
|
||||
log.warn(msg.format(user))
|
||||
continue
|
||||
|
||||
# Change password:
|
||||
# - on first instance
|
||||
# - if user is not an EOLE default user
|
||||
# - if user password match default ones
|
||||
if (not os.path.isfile(INSTANCE_LOCKFILE)
|
||||
or (user not in default_pass or user_infos.sp_pwd in ['!', '*']
|
||||
or cryptctx.verify(default_pass[user], user_infos.sp_pwd))):
|
||||
|
||||
msg = _(u"# Modificating password for user account {0} #")
|
||||
msg = msg.format(user)
|
||||
log.warn(u'#' * len(msg))
|
||||
log.warn(msg)
|
||||
log.warn(u'#' * len(msg))
|
||||
max_try = 5
|
||||
prompt = u'{0}{1}: '
|
||||
first_prompt = _(u"New password")
|
||||
second_prompt = _(u"Confirming new password")
|
||||
loop_counter = u''
|
||||
for attempt in range(1, max_try+2):
|
||||
if attempt == max_try+1:
|
||||
msg = _(u"Password input errors for {0}. Abandon.")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
loop_counter = loop_counter.format(attempt, max_try)
|
||||
passwd = getpass.getpass(prompt.format(first_prompt,
|
||||
loop_counter))
|
||||
confirm_pass = getpass.getpass(prompt.format(second_prompt,
|
||||
loop_counter))
|
||||
if passwd == confirm_pass:
|
||||
if user in default_pass and default_pass[user] == passwd:
|
||||
log.error(_(u"Can not use default password."))
|
||||
else:
|
||||
# Now we have the password
|
||||
stdin = '{0}:{1}'.format(user, passwd)
|
||||
code, stdout, stderr = process.system_out(['chpasswd'],
|
||||
stdin=stdin)
|
||||
if code == 0:
|
||||
msg = _(u'User {0} password updated.')
|
||||
log.info(msg.format(user))
|
||||
# Success
|
||||
break
|
||||
msg = _(u"Error changing password for {0}.")
|
||||
try_again_pos = stdout.find('Try again.')
|
||||
chpassmsg = stdout[0:try_again_pos]
|
||||
log.error(msg.format(user))
|
||||
print chpassmsg
|
||||
else:
|
||||
log.error(_(u"Passwords mismatch."))
|
||||
|
||||
# Display counter
|
||||
loop_counter = u' ({0}/{1})'
|
||||
|
||||
|
||||
def certificates():
|
||||
"""Manage certificates
|
||||
|
||||
"""
|
||||
ihm.print_title(_(u'Managing certificates'))
|
||||
try:
|
||||
# regénération des hashes des certificats SSL après avec créé les nouveaux certificats
|
||||
# porté de 2.3 #8488
|
||||
cert.rehash_if_needed()
|
||||
cert.gen_certs()
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
raise Exception(_(u"Error while generating certificates: {0}").format(err))
|
||||
cache()
|
||||
if CACHE['is_lxc_enabled']:
|
||||
src = os.path.join(cert.ssl_dir, "certs")
|
||||
for group_name in CACHE['groups_container']:
|
||||
group = CACHE['group_infos'][group_name]
|
||||
ihm.print_line(_("Copying certificates in {0}").format(group['name']))
|
||||
dst = os.path.join('/', group['path'].lstrip('/').encode(charset), src.lstrip('/'))
|
||||
copyDirectoryContent(src, dst)
|
||||
process.system_out(['/usr/bin/c_rehash'], container=group_name)
|
||||
|
||||
|
||||
def param_kernel():
|
||||
"""Manage kernel parameters
|
||||
"""
|
||||
ihm.print_title(_(u'Applying kernel parameters'))
|
||||
os.system('/sbin/sysctl -p >/dev/null')
|
||||
|
||||
def kill_dhclient():
|
||||
"""Kill dhclient for static IP configuration.
|
||||
|
||||
"""
|
||||
if client.get_creole(u'eth0_method') == u'statique':
|
||||
os.system('killall dhclient dhclient3 2>/dev/null')
|
||||
|
||||
def finalize(need_lock=True):
|
||||
"""Clean up
|
||||
"""
|
||||
ihm.print_title(_(u'Finalizing configuration'))
|
||||
# enregistrement
|
||||
try:
|
||||
process.system_out("/usr/share/creole/diag.py")
|
||||
except Exception:
|
||||
pass
|
||||
fonctionseole.zephir("FIN", "Configuration terminée", COMPAT_NAME.upper())
|
||||
if not os.path.isfile(INSTANCE_LOCKFILE):
|
||||
# l'instance est allée à son terme (#7051)
|
||||
file(INSTANCE_LOCKFILE, 'w').close()
|
||||
|
||||
if os.path.isfile(UPGRADE_LOCKFILE):
|
||||
os.unlink(UPGRADE_LOCKFILE)
|
||||
|
||||
if os.path.isfile(container_instance_lockfile):
|
||||
os.unlink(container_instance_lockfile)
|
||||
|
||||
# sauvegarde des 2 dernières versions de la configuration (#8455)
|
||||
old = '{0}.bak'.format(configeol)
|
||||
old1 = '{0}.bak.1'.format(configeol)
|
||||
if not os.path.isfile(old):
|
||||
log.debug(_(u'Backup {0} in {1}'.format(configeol, old)))
|
||||
shutil.copy(configeol, old)
|
||||
elif process.system_out(['diff', '-q', configeol, old])[0] == 0:
|
||||
log.debug(_(u"{0} was not modified".format(configeol)))
|
||||
else:
|
||||
log.debug(_(u'Backup {0} in {1}'.format(old, old1)))
|
||||
shutil.copy(old, old1)
|
||||
log.debug(_(u'Backup {0} in {1}'.format(configeol, old)))
|
||||
shutil.copy(configeol, old)
|
||||
if need_lock:
|
||||
unlock_actions()
|
||||
|
||||
def update_server():
|
||||
"""Manage server update
|
||||
"""
|
||||
if os.path.isfile(MAJ_SUCCES_LOCK):
|
||||
os.remove(MAJ_SUCCES_LOCK)
|
||||
if options.interactive:
|
||||
log.info(_(u'Managing update'))
|
||||
|
||||
ihm.print_title(_(u'Updating server'))
|
||||
if ihm.prompt_boolean(_(u"""An update is recommended.
|
||||
Do you want to proceed with network update now ?"""),
|
||||
default=True, level='warn',
|
||||
default_uninteractive=False) is True:
|
||||
report(2)
|
||||
try:
|
||||
load_pkgmgr()
|
||||
_configure_sources_mirror(PKGMGR.pkgmgr)
|
||||
PKGMGR.update(silent=True)
|
||||
upgrades = PKGMGR.get_upgradable_list(silent=True)
|
||||
require_dist_upgrade = False
|
||||
for container, upgrades in upgrades.items():
|
||||
if upgrades:
|
||||
require_dist_upgrade = True
|
||||
break
|
||||
if require_dist_upgrade:
|
||||
# At least one container require upgrade
|
||||
PKGMGR.dist_upgrade()
|
||||
# Update lock => OK, will be deleted at next reconfigure
|
||||
report(0)
|
||||
# recall reconfigure
|
||||
main(force_options={'interactive': False})
|
||||
# back to instance
|
||||
options.interactive = True
|
||||
reset_compat_name()
|
||||
else:
|
||||
log.warn(_(u"No updates available."))
|
||||
report(3)
|
||||
except Exception, err:
|
||||
report(1, normalize(err))
|
||||
raise err
|
||||
|
||||
|
||||
def schedule():
|
||||
"""Manage task scheduling
|
||||
"""
|
||||
ihm.print_title(_(u'Task scheduling'))
|
||||
apply_schedules()
|
||||
display_schedules()
|
||||
# 1er lancement de instance
|
||||
#if not os.path.isfile(schedule.SCHEDULE_FILE):
|
||||
# schedule.add_post_schedule('majauto', 'weekly')
|
||||
#schedule.prog_schedule()
|
||||
|
||||
def is_valid_ip_eth0():
|
||||
"""Check if adresse_ip_eth0 is 169.254.0.1
|
||||
"""
|
||||
ip_eth0 = client.get_creole(u'adresse_ip_eth0')
|
||||
if ip_eth0 == "169.254.0.1":
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def reboot_server():
|
||||
"""Reboot the server if required
|
||||
"""
|
||||
if fonctionseole.controle_kernel():
|
||||
if options.interactive:
|
||||
print
|
||||
if ihm.prompt_boolean(_(u"""Reboot is necessary.
|
||||
Do you want to reboot now?"""),
|
||||
default=True, level='warn') is True:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Demande de redémarrage acceptée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
process.system_code(['reboot'])
|
||||
else:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Demande de redémarrage refusée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
else:
|
||||
print
|
||||
ihm.print_orange(_(u'Reboot necessary'))
|
||||
time.sleep(1)
|
||||
print
|
||||
if options.auto:
|
||||
fonctionseole.zephir("MSG", "Redémarrage automatique",
|
||||
COMPAT_NAME.upper())
|
||||
process.system_code(['reboot'])
|
||||
else:
|
||||
fonctionseole.zephir("MSG", "Redémarrage du serveur à planifier",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
|
||||
def main(force_options=None, force_args=None, need_lock=True):
|
||||
"""Entry point
|
||||
"""
|
||||
global log
|
||||
options.update_from_cmdline(force_args=force_args,
|
||||
force_options=force_options)
|
||||
|
||||
try:
|
||||
# module level logger
|
||||
log = init_logging(name=u'reconfigure', level=options.log_level,
|
||||
console=['stderr', 'stddebug'],
|
||||
filename=_LOGFILENAME)
|
||||
|
||||
# Remove module name prefix from Warn/error messages emitted
|
||||
# from here
|
||||
set_formatter(log, 'stderr', 'brief')
|
||||
|
||||
# Define handlers for additional loggers
|
||||
# Thoses logger are not for use
|
||||
# Log pyeole.service
|
||||
pyeole_service_log = init_logging(name=u'pyeole.service',
|
||||
level=options.log_level,
|
||||
filename=_LOGFILENAME,
|
||||
console=['stderr'])
|
||||
# Log pyeole.pkg
|
||||
pyeole_pkg_log = init_logging(name=u'pyeole.pkg',
|
||||
level=options.log_level,
|
||||
filename=_LOGFILENAME)
|
||||
passlib_log = init_logging(name=u'passlib.registry',
|
||||
level='error',
|
||||
filename=_LOGFILENAME)
|
||||
|
||||
# Disable warnings from pyeole.service
|
||||
set_filters(pyeole_service_log, 'stderr',
|
||||
['error', 'critical'])
|
||||
|
||||
if options.verbose or options.debug:
|
||||
# Enable creole logs
|
||||
creole_log = init_logging(name=u'creole', level=options.log_level,
|
||||
filename=_LOGFILENAME)
|
||||
# Define a root logger when verbose or debug is activated
|
||||
root_log = init_logging(level=options.log_level)
|
||||
else:
|
||||
# Enable creole logs
|
||||
creole_log = init_logging(name=u'creole', level=options.log_level,
|
||||
filename=_LOGFILENAME,
|
||||
console=['stderr'])
|
||||
|
||||
creolemajauto_log = init_logging(name=u'creole.majauto', level=options.log_level,
|
||||
filename=_LOGFILENAME, console=['stderr', 'stdout'])
|
||||
|
||||
ihm.print_title(_(u'Beginning of configuration'))
|
||||
# instance or reconfigure ?
|
||||
reset_compat_name()
|
||||
fonctionseole.zephir("INIT", "Début de configuration",
|
||||
COMPAT_NAME.upper())
|
||||
prepare(need_lock)
|
||||
valid_mandatory(need_lock)
|
||||
cache()
|
||||
containers()
|
||||
packages()
|
||||
run_parts(u'preservice')
|
||||
services(action=u'stop')
|
||||
run_parts(u'pretemplate')
|
||||
templates()
|
||||
if not is_valid_ip_eth0():
|
||||
log.info(_(u"eth0 network interface does not have a valid IP address."))
|
||||
log.info(_(u"Restarting networking service"))
|
||||
manage_service(u'restart', u'networking', display='console')
|
||||
templates()
|
||||
if not is_valid_ip_eth0():
|
||||
log.info(_(u"eth0 network interface does not have a valid IP address."))
|
||||
msg = _(u"Unable to obtain IP address.")
|
||||
raise NetworkConfigError(msg)
|
||||
|
||||
services(action=u'configure')
|
||||
# posttemplate/00-annuaire needs the certificates
|
||||
certificates()
|
||||
run_parts(u'posttemplate')
|
||||
#close all connexion before param kernel #17408
|
||||
client.close()
|
||||
param_kernel()
|
||||
kill_dhclient()
|
||||
services(action=u'start')
|
||||
users()
|
||||
run_parts(u'postservice')
|
||||
schedule()
|
||||
finalize(need_lock)
|
||||
ihm.print_title(_(u'Reconfiguration OK'))
|
||||
update_server()
|
||||
# IMPORTANT : Ne rien faire après ces lignes
|
||||
# car le serveur est susceptible d'être redémarré
|
||||
reboot_server()
|
||||
|
||||
except (UserExit, UserExitError), err:
|
||||
unlock_actions(need_lock)
|
||||
fonctionseole.zephir("FIN", "Abandon par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
raise err
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
fonctionseole.zephir('ERR', str(err),
|
||||
COMPAT_NAME.upper(),
|
||||
console=False)
|
||||
if need_lock:
|
||||
release(LOCK_NAME, valid=False, level='system')
|
||||
raise err
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,651 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.server - distribute creole variables through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Distribute Creole configuration through REST API
|
||||
|
||||
Setup a daemon based on `cherrypy` listening by default on
|
||||
127.0.0.1:8000 for queries on Creole configuration.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import threading
|
||||
|
||||
from creole import eosfunc
|
||||
|
||||
from traceback import format_exc
|
||||
|
||||
from os.path import basename, dirname, isdir, samefile, splitext
|
||||
|
||||
from pyeole.log import init_logging, getLogger
|
||||
from pyeole import scriptargs
|
||||
|
||||
from .config import configeoldir, eoledirs, eoleextradico, \
|
||||
eoleextraconfig
|
||||
from .loader import creole_loader, load_config_eol, load_extras
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu.config import Config, SubConfig, undefined
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
|
||||
from pyeole.cherrypy_plugins import InotifyMonitor
|
||||
|
||||
import cherrypy
|
||||
import socket
|
||||
|
||||
from pyinotify import ProcessEvent
|
||||
from pyinotify import IN_DELETE
|
||||
from pyinotify import IN_CREATE
|
||||
from pyinotify import IN_MODIFY
|
||||
from pyinotify import IN_MOVED_TO
|
||||
from pyinotify import IN_MOVED_FROM
|
||||
|
||||
from systemd import daemon
|
||||
|
||||
import logging
|
||||
|
||||
# Global logger
|
||||
log = getLogger(__name__)
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
num_error = [(PropertiesOptionError, 1), (KeyError, 2),
|
||||
(AttributeError, 4), (Exception, 3)]
|
||||
|
||||
# For pyinotify handler and filtering
|
||||
_INOTIFY_EOL_DIRS = [configeoldir, eoleextraconfig]
|
||||
|
||||
_INOTIFY_MASK = IN_DELETE | IN_CREATE | IN_MODIFY | IN_MOVED_TO | IN_MOVED_FROM
|
||||
|
||||
|
||||
def _inotify_filter(event):
|
||||
"""Check if the path must be excluded from being watched.
|
||||
|
||||
:param event: event to look for
|
||||
:type event: :class:`pyinotify.Event`
|
||||
:return: if the :data:`event` must be excluded
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
_INOTIFY_EOL = True
|
||||
|
||||
if isdir(event.pathname):
|
||||
# Always ok for EOLE directories
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.pathname, directory):
|
||||
_INOTIFY_EOL = False
|
||||
|
||||
if not _INOTIFY_EOL:
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
extension = splitext(event.name)[1]
|
||||
|
||||
if event.mask != IN_DELETE and not os.access(event.pathname, os.F_OK):
|
||||
log.debug(_(u'File not accessible: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
if event.mask != IN_DELETE and os.stat(event.pathname).st_size == 0:
|
||||
log.debug(_(u'File with null size: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
# Check only for files in EOLE directories
|
||||
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.path, directory) or str(event.path).startswith(directory):
|
||||
_INOTIFY_EOL = extension != '.eol'
|
||||
break
|
||||
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
|
||||
class CreoleInotifyHandler(ProcessEvent):
|
||||
"""Process inotify events
|
||||
|
||||
"""
|
||||
|
||||
_server = None
|
||||
"""Instance of :class:`CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
def my_init(self, server):
|
||||
"""Subclass constructor.
|
||||
|
||||
This is the constructor, it is automatically called from
|
||||
:meth:`ProcessEvent.__init__()`,
|
||||
|
||||
Extra arguments passed to ``__init__()`` would be delegated
|
||||
automatically to ``my_init()``.
|
||||
|
||||
"""
|
||||
self._server = server
|
||||
|
||||
def process_default(self, event):
|
||||
"""Reload :class:`CreoleServer` on all managed inotify events
|
||||
|
||||
"""
|
||||
inotify_data = _inotify_filter(event)
|
||||
if not inotify_data["EOL"]:
|
||||
log.warn(_(u'Reload config.eol due to {0} on {1}').format(event.maskname,
|
||||
event.pathname))
|
||||
try:
|
||||
self._server.reload_eol()
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
log.debug(_(u'Filtered inotify event for {0}').format(event.pathname))
|
||||
|
||||
|
||||
class CreoleServer(object):
|
||||
"""Cherrypy application answering REST requests
|
||||
"""
|
||||
|
||||
def __init__(self, running=True):
|
||||
"""Initialize the server
|
||||
|
||||
Load the tiramisu configuration.
|
||||
|
||||
:param `bool` running: Is the web server running during server
|
||||
initialization.
|
||||
|
||||
"""
|
||||
|
||||
log.debug(_(u"Loading tiramisu configuration"))
|
||||
self.config = None
|
||||
self.reload_config(running)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_config(self, running=True):
|
||||
lock.acquire()
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify('RELOADING=1')
|
||||
|
||||
try:
|
||||
log.debug(u"Set umask to 0022")
|
||||
os.umask(0022)
|
||||
reload(eosfunc)
|
||||
eosfunc.load_funcs(force_reload=True)
|
||||
self.config = creole_loader(load_extra=True, reload_config=False,
|
||||
disable_mandatory=True, owner='creoled',
|
||||
try_upgrade=False)
|
||||
if log.isEnabledFor(logging.DEBUG) and self.config.impl_get_information('load_error', False):
|
||||
msg = _('Load creole configuration with errors')
|
||||
log.debug(msg)
|
||||
ret = self.response()
|
||||
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify('READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_eol(self):
|
||||
if not self.config:
|
||||
return self.reload_config()
|
||||
|
||||
lock.acquire()
|
||||
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify(u'RELOADING=1')
|
||||
|
||||
config = Config(self.config.cfgimpl_get_description())
|
||||
try:
|
||||
load_config_eol(config)
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration from config.eol: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
try:
|
||||
load_extras(config)
|
||||
except:
|
||||
msg = _('Unable to load creole configuration from extra: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
else:
|
||||
config.read_only()
|
||||
self.config = config
|
||||
ret = self.response()
|
||||
|
||||
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def valid_mandatory(self):
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
msg = _(u'All variables are not set, please configure your system:')
|
||||
error = False
|
||||
mandatory_errors = set(self.config.cfgimpl_get_values().mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables are mandatories') + ' (' + ', '.join(mandatory_errors) + ')'
|
||||
force_vars = set()
|
||||
for force_store_var in self.config.impl_get_information('force_store_vars'):
|
||||
if force_store_var not in mandatory_errors:
|
||||
try:
|
||||
getattr(self.config, force_store_var)
|
||||
force_vars.add(force_store_var)
|
||||
except:
|
||||
pass
|
||||
if force_vars != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables must be in config file') + ' (' + ', '.join(force_vars) + ')'
|
||||
|
||||
if error:
|
||||
log.debug(mandatory_errors)
|
||||
return self.response(msg, 3)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
return self.response(str(err), 3)
|
||||
return self.response()
|
||||
|
||||
@staticmethod
|
||||
def response(response='OK', status=0):
|
||||
"""Generate a normalized response
|
||||
|
||||
:param response: message of the response
|
||||
:type response: `object`
|
||||
:param status: status code for the response, ``0`` for OK
|
||||
:type status: `int`
|
||||
:return: response of the form: ``{"status": `int`, "response": `message`}``
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return {u'status': status, u'response': response}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def get(self, *args, **kwargs):
|
||||
"""Return the content of a tiramisu path
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
:return: Value of a single variable or sub tree
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
def _remove_properties_error(val):
|
||||
new_val = []
|
||||
for v in val:
|
||||
if isinstance(v, PropertiesOptionError):
|
||||
new_val.append({'err': str(v)})
|
||||
else:
|
||||
new_val.append(v)
|
||||
return new_val
|
||||
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) != 0:
|
||||
subconfig = getattr(config, '.'.join(args))
|
||||
else:
|
||||
subconfig = config
|
||||
if isinstance(subconfig, SubConfig):
|
||||
if u'variable' in kwargs:
|
||||
name = kwargs[u'variable']
|
||||
path = subconfig.find_first(byname=name,
|
||||
type_=u'path',
|
||||
check_properties=False)
|
||||
try:
|
||||
val = getattr(config, path)
|
||||
except PropertiesOptionError as err:
|
||||
if err.proptype == ['mandatory']:
|
||||
raise Exception(_(u'Mandatory variable {0} '
|
||||
u'is not set.').format(name))
|
||||
raise err
|
||||
if isinstance(val, list):
|
||||
val = _remove_properties_error(val)
|
||||
return self.response(val)
|
||||
else:
|
||||
withoption = kwargs.get(u'withoption')
|
||||
withvalue = kwargs.get(u'withvalue')
|
||||
if withvalue is None:
|
||||
withvalue = undefined
|
||||
dico = subconfig.make_dict(withoption=withoption, withvalue=withvalue)
|
||||
for key, val in dico.items():
|
||||
if isinstance(val, list):
|
||||
dico[key] = _remove_properties_error(val)
|
||||
return self.response(dico)
|
||||
else:
|
||||
#if config is a value, not a SubConfig
|
||||
if isinstance(subconfig, list):
|
||||
subconfig = _remove_properties_error(subconfig)
|
||||
return self.response(subconfig)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def list(self, *args):
|
||||
"""List subtree pointed by :data:`args`
|
||||
|
||||
List the nodes and variables under a path.
|
||||
|
||||
If the path point to a single variable, then return its value.
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
|
||||
:return: Nodes and/or variables under a path, or value of a
|
||||
variable
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) == 0:
|
||||
# root of configuration
|
||||
obj = config
|
||||
else:
|
||||
# Path to a sub configuration
|
||||
base = '.'.join(args)
|
||||
obj = getattr(config, base)
|
||||
if isinstance(obj, SubConfig):
|
||||
# Path is a node
|
||||
groups = [u'%s/' % g[0] for g in obj.iter_groups()]
|
||||
items = [u'%s' % i[0] for i in obj]
|
||||
return self.response(groups + items)
|
||||
else:
|
||||
# Path is a leaf
|
||||
value = self.get(*args)[u'response']
|
||||
return self.response([value])
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
def _no_config(self):
|
||||
"""Return an error message when no configuration is loaded
|
||||
|
||||
:return: a failure response
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return self.response(_(u'No configuration'), status=3)
|
||||
|
||||
class CreoleDaemon(object):
|
||||
"""Run the CreoleServer
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the cherrypy daemon
|
||||
"""
|
||||
|
||||
# Built-in configuration
|
||||
self.argparse = self._load_argparse()
|
||||
# Read command line arguments
|
||||
self.option = self.argparse.parse_args()
|
||||
if self.option.verbose:
|
||||
self.option.log_level = u'info'
|
||||
if self.option.debug:
|
||||
self.option.log_level = u'debug'
|
||||
self._configure_log()
|
||||
|
||||
def _load_argparse(self):
|
||||
"""Parse command line arguments
|
||||
|
||||
:return: command line parser
|
||||
:rtype: `argparse.ArgumentParser`
|
||||
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description=u'Run creole daemon',
|
||||
parents=[scriptargs.logging('warning')],
|
||||
conflict_handler='resolve')
|
||||
parser.add_argument("-b", "--base-dir", default='/tmp',
|
||||
help=_(u"Base directory in which the server"
|
||||
" is launched (default: /tmp)"))
|
||||
parser.add_argument("-c", "--conf-file",
|
||||
default='/etc/eole/creoled.conf',
|
||||
help=_(u"Configuration file of the server"
|
||||
" (default: /etc/eole/creoled.conf"))
|
||||
parser.add_argument("-d", "--daemon", action='store_true',
|
||||
help=_(u"Run the server as a daemon (default: false)"))
|
||||
parser.add_argument("-l", "--listen", action='store',
|
||||
default='127.0.0.1:8000',
|
||||
help=_(u"Listen on the specified IP:PORT"
|
||||
" (default: 127.0.0.1:8000)"))
|
||||
parser.add_argument("-m", "--mount-base", default='/',
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-p", "--pidfile",
|
||||
default='/tmp/{0}.pid'.format(
|
||||
basename(sys.argv[0])),
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-u", "--user", default='nobody',
|
||||
help=_(u"User of the running process"
|
||||
" (default: nobody)"))
|
||||
parser.add_argument("-g", "--group", default='nogroup',
|
||||
help=_(u"Group of the running process"
|
||||
" (default: nogroup)"))
|
||||
parser.add_argument("--umask", default='0640',
|
||||
help=_(u"Umask of the running process"
|
||||
" (default: 0644)"))
|
||||
return parser
|
||||
|
||||
def _get_conf(self, name):
|
||||
"""Map command line arguments to cherrypy configuration
|
||||
|
||||
:param name: internal name of argparse option store
|
||||
:returns: piece of cherrypy configuration
|
||||
:rtype: `dict`
|
||||
"""
|
||||
try:
|
||||
option_map = { 'listen' :
|
||||
{ 'server.socket_host' :
|
||||
self.option.listen.split(':')[0],
|
||||
'server.socket_port' :
|
||||
int(self.option.listen.split(':')[1])},
|
||||
}
|
||||
return option_map[name]
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def load_conf(self):
|
||||
"""Load daemon configuration
|
||||
|
||||
Take care to load the configuration in proper order and avoid
|
||||
overriding configuration file parameter by default command
|
||||
line arguments.
|
||||
|
||||
Order is:
|
||||
|
||||
- default values from command line option parser
|
||||
|
||||
- option from a configuration file
|
||||
|
||||
- command line arguments
|
||||
|
||||
"""
|
||||
# Load all default value
|
||||
config = {'engine.autoreload.on': False}
|
||||
for opt in vars(self.option):
|
||||
config.update(self._get_conf(opt))
|
||||
|
||||
cherrypy.config.update( { 'global' : config} )
|
||||
|
||||
# Load configuration file
|
||||
if os.access(self.option.conf_file, os.F_OK):
|
||||
cherrypy.config.update(self.option.conf_file)
|
||||
|
||||
# Override config file option present on command line
|
||||
config = {}
|
||||
for opt in sys.argv[1:]:
|
||||
config.update(self._get_conf(opt))
|
||||
cherrypy.config.update( {'global' : config } )
|
||||
|
||||
def _configure_log(self):
|
||||
"""Configure the module logger
|
||||
|
||||
Avoid logging apache style time since the logger does it.
|
||||
|
||||
"""
|
||||
global log
|
||||
log_filename = None
|
||||
if self.option.daemon:
|
||||
log_filename = u'/var/log/creoled.log'
|
||||
|
||||
log = init_logging(name=u'creoled', as_root=True,
|
||||
level=self.option.log_level,
|
||||
console=not self.option.daemon,
|
||||
syslog=None,
|
||||
filename=log_filename)
|
||||
|
||||
# Cherrypy do not handle logs
|
||||
cherrypy.log.error_file = None
|
||||
cherrypy.log.access_file = None
|
||||
# Do not output on screen
|
||||
cherrypy.log.screen = False
|
||||
# Hack to avoid time in log message
|
||||
cherrypy.log.time = lambda : ''
|
||||
|
||||
def run(self):
|
||||
"""Start the cherrypy server.
|
||||
"""
|
||||
engine = cherrypy.engine
|
||||
|
||||
# Load server but we are not running now
|
||||
# Do not let him tell systemd otherwise
|
||||
server = CreoleServer(running=False)
|
||||
|
||||
inotify_handler = CreoleInotifyHandler(server=server)
|
||||
|
||||
if hasattr(engine, "signal_handler"):
|
||||
engine.signal_handler.subscribe()
|
||||
# Error exit on SIGINT (Ctl-c) #6177
|
||||
engine.signal_handler.set_handler(2, self._kill)
|
||||
|
||||
if hasattr(engine, "console_control_handler"):
|
||||
engine.console_control_handler.subscribe()
|
||||
|
||||
cherrypy.tree.mount(server, self.option.mount_base,
|
||||
config={'global' : {} })
|
||||
|
||||
# Merge configuration from build-in, configuration file and command line
|
||||
self.load_conf()
|
||||
|
||||
if server.config is None:
|
||||
msg = _(u"No configuration found: do not check for container mode.")
|
||||
log.warn(msg)
|
||||
elif server.config.creole.general.mode_conteneur_actif == 'oui':
|
||||
container_ip = server.config.creole.containers.adresse_ip_br0
|
||||
container_port = cherrypy.config.get('server.socket_port')
|
||||
# Start a server for containers if ip can be bounded
|
||||
try:
|
||||
container_socket = socket.socket(socket.AF_INET,
|
||||
socket.SOCK_STREAM)
|
||||
container_socket.setsockopt(socket.SOL_SOCKET,
|
||||
socket.SO_REUSEADDR,
|
||||
1)
|
||||
container_socket.bind((container_ip, container_port))
|
||||
container_socket.close()
|
||||
except socket.error, err:
|
||||
log.error(_(u"Unable to listen for containers: {0}").format(err))
|
||||
else:
|
||||
container_server = cherrypy._cpserver.Server()
|
||||
container_server.socket_host = container_ip
|
||||
container_server.socket_port = container_port
|
||||
container_server.subscribe()
|
||||
|
||||
monitor = InotifyMonitor(engine, inotify_handler)
|
||||
monitor.subscribe()
|
||||
|
||||
monitor.watch.add_watch(_INOTIFY_EOL_DIRS, _INOTIFY_MASK, auto_add=True, rec=True)
|
||||
|
||||
if self.option.pidfile:
|
||||
cherrypy.process.plugins.PIDFile(engine,
|
||||
self.option.pidfile).subscribe()
|
||||
|
||||
if self.option.daemon:
|
||||
cherrypy.process.plugins.Daemonizer(engine).subscribe()
|
||||
|
||||
# Drop priviledges
|
||||
cherrypy.process.plugins.DropPrivileges(engine,
|
||||
uid = self.option.user,
|
||||
gid = self.option.group,
|
||||
umask = self.option.umask)
|
||||
|
||||
# Let's start the CherryPy engine so that
|
||||
# everything works
|
||||
engine.start()
|
||||
|
||||
# Tell systemd that we are ready
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
# Run the engine main loop
|
||||
engine.block()
|
||||
|
||||
@staticmethod
|
||||
def _kill():
|
||||
"""Exit the server with non zero exit code
|
||||
"""
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
daemon = CreoleDaemon()
|
||||
daemon.run()
|
|
@ -0,0 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pyeole.service import manage_services
|
||||
from pyeole.decorator import deprecated
|
||||
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def instance_services(container=None):
|
||||
"""
|
||||
instancie les services
|
||||
"""
|
||||
manage_services(u'configure', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def stop_services(container=None):
|
||||
"""Stop all services
|
||||
|
||||
The networking service is never stopped.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'stop', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def start_services(container=None):
|
||||
"""Start all services
|
||||
|
||||
The networking service is a special case.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'start', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def restart_services(container=None):
|
||||
"""
|
||||
redemarrage des services
|
||||
"""
|
||||
manage_services(u'restart', container=container)
|
|
@ -0,0 +1,607 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Gestion du mini-langage de template
|
||||
On travaille sur les fichiers cibles
|
||||
"""
|
||||
|
||||
import sys
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
import traceback
|
||||
import os
|
||||
from os import listdir, unlink
|
||||
from os.path import basename, join
|
||||
|
||||
from tempfile import mktemp
|
||||
|
||||
from Cheetah import Parser
|
||||
# l'encoding du template est déterminé par une regexp (encodingDirectiveRE dans Parser.py)
|
||||
# il cherche un ligne qui ressemble à '#encoding: utf-8
|
||||
# cette classe simule le module 're' et retourne toujours l'encoding utf-8
|
||||
# 6224
|
||||
class FakeEncoding():
|
||||
def groups(self):
|
||||
return ('utf-8',)
|
||||
|
||||
def search(self, *args):
|
||||
return self
|
||||
Parser.encodingDirectiveRE = FakeEncoding()
|
||||
|
||||
from Cheetah.Template import Template as ChtTemplate
|
||||
from Cheetah.NameMapper import NotFound as CheetahNotFound
|
||||
|
||||
import config as cfg
|
||||
|
||||
from .client import CreoleClient, CreoleClientError
|
||||
from .error import FileNotFound, TemplateError, TemplateDisabled
|
||||
import eosfunc
|
||||
|
||||
from .i18n import _
|
||||
|
||||
import pyeole
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
||||
class IsDefined(object):
|
||||
"""
|
||||
filtre permettant de ne pas lever d'exception au cas où
|
||||
la variable Creole n'est pas définie
|
||||
"""
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def __call__(self, varname):
|
||||
if '.' in varname:
|
||||
splitted_var = varname.split('.')
|
||||
if len(splitted_var) != 2:
|
||||
msg = _(u"Group variables must be of type master.slave")
|
||||
raise KeyError(msg)
|
||||
master, slave = splitted_var
|
||||
if master in self.context:
|
||||
return slave in self.context[master].slave.keys()
|
||||
return False
|
||||
else:
|
||||
return varname in self.context
|
||||
|
||||
|
||||
class CreoleGet(object):
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def __call__(self, varname):
|
||||
return self.context[varname]
|
||||
|
||||
def __getitem__(self, varname):
|
||||
"""For bracket and dotted notation
|
||||
"""
|
||||
return self.context[varname]
|
||||
|
||||
def __contains__(self, varname):
|
||||
"""Check variable existence in context
|
||||
"""
|
||||
return varname in self.context
|
||||
|
||||
|
||||
@classmethod
|
||||
def cl_compile(kls, *args, **kwargs):
|
||||
kwargs['compilerSettings'] = {'directiveStartToken' : u'%',
|
||||
'cheetahVarStartToken' : u'%%',
|
||||
'EOLSlurpToken' : u'%',
|
||||
'PSPStartToken' : u'µ' * 10,
|
||||
'PSPEndToken' : u'µ' * 10,
|
||||
'commentStartToken' : u'µ' * 10,
|
||||
'commentEndToken' : u'µ' * 10,
|
||||
'multiLineCommentStartToken' : u'µ' * 10,
|
||||
'multiLineCommentEndToken' : u'µ' * 10}
|
||||
return kls.old_compile(*args, **kwargs)
|
||||
ChtTemplate.old_compile = ChtTemplate.compile
|
||||
ChtTemplate.compile = cl_compile
|
||||
|
||||
|
||||
class CheetahTemplate(ChtTemplate):
|
||||
"""classe pour personnaliser et faciliter la construction
|
||||
du template Cheetah
|
||||
"""
|
||||
def __init__(self, filename, context, current_container):
|
||||
"""Initialize Creole CheetahTemplate
|
||||
|
||||
@param filename: name of the file to process
|
||||
@type filename: C{str}
|
||||
@param context: flat dictionary of creole variables as 'name':'value',
|
||||
@type context: C{dict}
|
||||
@param current_container: flat dictionary describing the current container
|
||||
@type current_container: C{dict}
|
||||
"""
|
||||
eos = {}
|
||||
for func in dir(eosfunc):
|
||||
if not func.startswith('_'):
|
||||
eos[func] = getattr(eosfunc, func)
|
||||
# ajout des variables decrivant les conteneurs
|
||||
#FIXME chercher les infos dans le client !
|
||||
ChtTemplate.__init__(self, file=filename,
|
||||
searchList=[context, eos, {u'is_defined' : IsDefined(context),
|
||||
u'creole_client' : CreoleClient(),
|
||||
u'current_container':CreoleGet(current_container),
|
||||
}])
|
||||
|
||||
|
||||
class CreoleMaster(object):
|
||||
def __init__(self, value, slave=None, index=None):
|
||||
"""
|
||||
On rend la variable itérable pour pouvoir faire:
|
||||
for ip in iplist:
|
||||
print ip.network
|
||||
print ip.netmask
|
||||
print ip
|
||||
index is used for CreoleLint
|
||||
"""
|
||||
self._value = value
|
||||
if slave is not None:
|
||||
self.slave = slave
|
||||
else:
|
||||
self.slave = {}
|
||||
self._index = index
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Get slave variable or attribute of master value.
|
||||
|
||||
If the attribute is a name of a slave variable, return its value.
|
||||
Otherwise, returns the requested attribute of master value.
|
||||
"""
|
||||
if name in self.slave:
|
||||
value = self.slave[name]
|
||||
if isinstance(value, Exception):
|
||||
raise value
|
||||
return value
|
||||
else:
|
||||
return getattr(self._value, name)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Get a master.slave at requested index.
|
||||
"""
|
||||
ret = {}
|
||||
for key, values in self.slave.items():
|
||||
ret[key] = values[index]
|
||||
return CreoleMaster(self._value[index], ret, index)
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over master.slave.
|
||||
|
||||
Return synchronised value of master.slave.
|
||||
"""
|
||||
for i in range(len(self._value)):
|
||||
ret = {}
|
||||
for key, values in self.slave.items():
|
||||
ret[key] = values[i]
|
||||
yield CreoleMaster(self._value[i], ret, i)
|
||||
|
||||
def __len__(self):
|
||||
"""Delegate to master value
|
||||
"""
|
||||
return len(self._value)
|
||||
|
||||
def __repr__(self):
|
||||
"""Show CreoleMaster as dictionary.
|
||||
|
||||
The master value is stored under 'value' key.
|
||||
The slaves are stored under 'slave' key.
|
||||
"""
|
||||
return repr({u'value': self._value, u'slave': self.slave})
|
||||
|
||||
def __eq__(self, value):
|
||||
return value == self._value
|
||||
|
||||
def __ne__(self, value):
|
||||
return value != self._value
|
||||
|
||||
def __lt__(self, value):
|
||||
return self._value < value
|
||||
|
||||
def __le__(self, value):
|
||||
return self._value <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
return self._value > value
|
||||
|
||||
def __ge__(self, value):
|
||||
return self._value >= value
|
||||
|
||||
def __str__(self):
|
||||
"""Delegate to master value
|
||||
"""
|
||||
return str(self._value)
|
||||
|
||||
def __add__(self, val):
|
||||
return self._value.__add__(val)
|
||||
|
||||
def __radd__(self, val):
|
||||
return val + self._value
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._value
|
||||
|
||||
def add_slave(self, name, value):
|
||||
"""Add a slave variable
|
||||
|
||||
Minimal check on type and value of the slave in regards to the
|
||||
master one.
|
||||
|
||||
@param name: name of the slave variable
|
||||
@type name: C{str}
|
||||
@param value: value of the slave variable
|
||||
"""
|
||||
if isinstance(self._value, list):
|
||||
if not isinstance(value, list):
|
||||
raise TypeError
|
||||
elif len(value) != len(self._value):
|
||||
raise ValueError(_(u'length mismatch'))
|
||||
new_value = []
|
||||
for val in value:
|
||||
if isinstance(val, dict):
|
||||
new_value.append(ValueError(val['err']))
|
||||
else:
|
||||
new_value.append(val)
|
||||
value = new_value
|
||||
elif isinstance(value, list):
|
||||
raise TypeError
|
||||
self.slave[name] = value
|
||||
|
||||
class CreoleTemplateEngine(object):
|
||||
"""Engine to process Creole cheetah template
|
||||
"""
|
||||
def __init__(self, force_values=None):
|
||||
#force_values permit inject value and not used CreoleClient (used by CreoleLint)
|
||||
self.client = CreoleClient()
|
||||
self.creole_variables_dict = {}
|
||||
self.force_values = force_values
|
||||
self.load_eole_variables()
|
||||
|
||||
def load_eole_variables(self):
|
||||
# remplacement des variables EOLE
|
||||
self.creole_variables_dict = {}
|
||||
if self.force_values is not None:
|
||||
values = self.force_values
|
||||
else:
|
||||
values = self.client.get_creole()
|
||||
for varname, value in values.items():
|
||||
if varname in self.creole_variables_dict:
|
||||
# Creation of a slave create the master
|
||||
continue
|
||||
if varname.find('.') != -1:
|
||||
#support des groupes
|
||||
mastername, slavename = varname.split('.')
|
||||
if not mastername in self.creole_variables_dict or not \
|
||||
isinstance(self.creole_variables_dict [mastername],
|
||||
CreoleMaster):
|
||||
# Create the master variable
|
||||
if mastername in values:
|
||||
self.creole_variables_dict[mastername] = CreoleMaster(values[mastername])
|
||||
else:
|
||||
#only for CreoleLint
|
||||
self.creole_variables_dict[mastername] = CreoleMaster(value)
|
||||
#test only for CreoleLint
|
||||
if mastername != slavename:
|
||||
self.creole_variables_dict[mastername].add_slave(slavename, value)
|
||||
else:
|
||||
self.creole_variables_dict[varname] = value
|
||||
|
||||
def patch_template(self, filevar, force_no_active=False):
|
||||
"""Apply patch to a template
|
||||
"""
|
||||
var_dir = os.path.join(cfg.patch_dir,'variante')
|
||||
patch_cmd = ['patch', '-d', cfg.templatedir, '-N', '-p1']
|
||||
patch_no_debug = ['-s', '-r', '-', '--backup-if-mismatch']
|
||||
|
||||
tmpl_filename = os.path.split(filevar[u'source'])[1]
|
||||
# patches variante + locaux
|
||||
for directory in [var_dir, cfg.patch_dir]:
|
||||
patch_file = os.path.join(directory, '{0}.patch'.format(tmpl_filename))
|
||||
if os.access(patch_file, os.F_OK):
|
||||
msg = _(u"Patching template '{0}' with '{1}'")
|
||||
log.info(msg.format(filevar[u'source'], patch_file))
|
||||
ret, out, err = pyeole.process.system_out(patch_cmd + patch_no_debug + ['-i', patch_file])
|
||||
if ret != 0:
|
||||
msg = _(u"Error applying patch: '{0}'\nTo reproduce and fix this error {1}")
|
||||
log.error(msg.format(patch_file, ' '.join(patch_cmd + ['-i', patch_file])))
|
||||
#8307 : recopie le template original et n'arrête pas le processus
|
||||
self._copy_to_template_dir(filevar, force_no_active)
|
||||
#raise TemplateError(msg.format(patch_file, err))
|
||||
|
||||
def strip_template_comment(self, filevar):
|
||||
"""Strip comment from template
|
||||
|
||||
This apply if filevar has a del_comment attribut
|
||||
"""
|
||||
# suppression des commentaires si demandé (attribut del_comment)
|
||||
strip_cmd = ['sed', '-i']
|
||||
if u'del_comment' in filevar and filevar[u'del_comment'] != '':
|
||||
log.info(_(u"Cleaning file '{0}'").format( filevar[u'source'] ))
|
||||
ret, out, err = pyeole.process.system_out(strip_cmd
|
||||
+ ['/^\s*{0}/d ; /^$/d'.format(filevar[u'del_comment']),
|
||||
filevar[u'source'] ])
|
||||
if ret != 0:
|
||||
msg = _(u"Error removing comments '{0}': {1}")
|
||||
raise TemplateError(msg.format(filevar[u'del_comment'], err))
|
||||
|
||||
def _check_filevar(self, filevar, force_no_active=False):
|
||||
"""Verify that filevar is processable
|
||||
|
||||
:param filevar: template file informations
|
||||
:type filevar: `dict`
|
||||
|
||||
:raise CreoleClientError: if :data:`filevar` is disabled
|
||||
inexistant or unknown.
|
||||
|
||||
"""
|
||||
if not force_no_active and (u'activate' not in filevar or not filevar[u'activate']):
|
||||
|
||||
raise CreoleClientError(_(u"Template file not enabled:"
|
||||
u" {0}").format(basename(filevar[u'source'])))
|
||||
if u'source' not in filevar or filevar[u'source'] is None:
|
||||
raise CreoleClientError(_(u"Template file not set:"
|
||||
u" {0}").format(basename(filevar['source'])))
|
||||
|
||||
if u'name' not in filevar or filevar[u'name'] is None:
|
||||
raise CreoleClientError(_(u"Template target not set:"
|
||||
u" {0}").format(basename(filevar[u'source'])))
|
||||
|
||||
def _copy_to_template_dir(self, filevar, force_no_active=False):
|
||||
"""Copy template to processing temporary directory.
|
||||
|
||||
:param filevar: template file informations
|
||||
:type filevar: `dict`
|
||||
:param force_no_active: copy disabled template if `True`
|
||||
:type filevar: `bool`
|
||||
:raise FileNotFound: if source template does not exist
|
||||
|
||||
"""
|
||||
self._check_filevar(filevar, force_no_active)
|
||||
tmpl_source_name = os.path.split(filevar[u'source'])[1]
|
||||
tmpl_source_file = os.path.join(cfg.distrib_dir, tmpl_source_name)
|
||||
if not os.path.isfile(tmpl_source_file):
|
||||
msg = _(u"Template {0} unexistent").format(tmpl_source_file)
|
||||
raise FileNotFound(msg)
|
||||
else:
|
||||
log.info(_(u"Copy template: '{0}' -> '{1}'").format(tmpl_source_file, cfg.templatedir))
|
||||
shutil.copy(tmpl_source_file, cfg.templatedir)
|
||||
|
||||
def prepare_template(self, filevar, force_no_active=False):
|
||||
"""Prepare template source file
|
||||
"""
|
||||
self._copy_to_template_dir(filevar, force_no_active)
|
||||
self.patch_template(filevar, force_no_active)
|
||||
self.strip_template_comment(filevar)
|
||||
|
||||
def verify(self, filevar):
|
||||
"""
|
||||
verifie que les fichiers existent
|
||||
@param mkdir : création du répertoire si nécessaire
|
||||
"""
|
||||
if not os.path.isfile(filevar[u'source']):
|
||||
raise FileNotFound(_(u"File {0} does not exist.").format(filevar[u'source']))
|
||||
destfilename = filevar[u'full_name']
|
||||
dir_target = os.path.dirname(destfilename)
|
||||
if dir_target != '' and not os.path.isdir(dir_target):
|
||||
if not filevar[u'mkdir']:
|
||||
raise FileNotFound(_(u"Folder {0} does not exist but is required by {1}").format(dir_target, destfilename))
|
||||
os.makedirs(dir_target)
|
||||
# FIXME: pose plus de problème qu'autre chose (cf. #3048)
|
||||
#if not isfile(target):
|
||||
# system('cp %s %s' % (source, target))
|
||||
|
||||
def process(self, filevar, container):
|
||||
"""Process a cheetah template
|
||||
|
||||
Process a cheetah template and copy the file to destination.
|
||||
@param filevar: dictionary describing the file to process
|
||||
@type filevar: C{dict}
|
||||
@param container: dictionary describing the container
|
||||
@type container: C{dict}
|
||||
"""
|
||||
UTF = "#encoding: utf-8"
|
||||
|
||||
self._check_filevar(filevar)
|
||||
|
||||
# full path of the destination file
|
||||
destfilename = filevar[u'full_name']
|
||||
|
||||
log.info(_(u"Cheetah processing: '{0}' -> '{1}'").format(filevar[u'source'],
|
||||
destfilename))
|
||||
|
||||
# utilisation d'un fichier temporaire
|
||||
# afin de ne pas modifier l'original
|
||||
tmpfile = mktemp()
|
||||
shutil.copy(filevar[u'source'], tmpfile)
|
||||
|
||||
# ajout de l'en-tête pour le support de l'UTF-8
|
||||
# FIXME: autres encodages ?
|
||||
#os.system("sed -i '1i{0}' {1}".format(UTF, tmpfile)) (supprimé depuis #6224)
|
||||
|
||||
try:
|
||||
cheetah_template = CheetahTemplate(tmpfile, self.creole_variables_dict, container)
|
||||
os.unlink(tmpfile)
|
||||
# suppression de l'en-tête UTF-8 ajouté !!! (supprimé depuis #6224)
|
||||
data = str(cheetah_template) # .replace("{0}\n".format(UTF), '', 1)
|
||||
except CheetahNotFound, err:
|
||||
varname = err.args[0][13:-1]
|
||||
msg = _(u"Error: unknown variable used in template {0} : {1}").format(filevar[u'name'], varname)
|
||||
raise TemplateError, msg
|
||||
except UnicodeDecodeError, err:
|
||||
msg = _(u"Encoding issue detected in template {0}").format(filevar[u'name'])
|
||||
raise TemplateError, msg
|
||||
except Exception, err:
|
||||
msg = _(u"Error while instantiating template {0}: {1}").format(filevar[u'name'], err)
|
||||
raise TemplateError, msg
|
||||
|
||||
# écriture du fichier cible
|
||||
if destfilename == '':
|
||||
# CreoleCat may need to write on stdout (#10065)
|
||||
sys.stdout.write(data)
|
||||
else:
|
||||
try:
|
||||
file_h = file(destfilename, 'w')
|
||||
file_h.write(data)
|
||||
file_h.close()
|
||||
except IOError, e:
|
||||
msg = _(u"Unable to write in file '{0}': '{1}'").format(destfilename, e)
|
||||
raise FileNotFound, msg
|
||||
|
||||
def change_properties(self, filevar, container=None, force_full_name=False):
|
||||
chowncmd = [u'chown']
|
||||
chownarg = ''
|
||||
chmodcmd = [u'chmod']
|
||||
chmodarg = ''
|
||||
|
||||
if not force_full_name:
|
||||
destfilename = filevar[u'name']
|
||||
else:
|
||||
destfilename = filevar[u'full_name']
|
||||
|
||||
if u'owner' in filevar and filevar[u'owner']:
|
||||
chownarg = filevar[u'owner']
|
||||
else:
|
||||
chownarg = u'root'
|
||||
|
||||
if u'group' in filevar and filevar[u'group']:
|
||||
chownarg += ":" + filevar[u'group']
|
||||
else:
|
||||
chownarg += u':root'
|
||||
|
||||
if u'mode' in filevar and filevar[u'mode']:
|
||||
chmodarg = filevar[u'mode']
|
||||
else:
|
||||
chmodarg = u'0644'
|
||||
|
||||
chowncmd.extend( [chownarg, destfilename] )
|
||||
chmodcmd.extend( [chmodarg, destfilename] )
|
||||
|
||||
log.info(_(u'Changing properties: {0}').format(' '.join(chowncmd)) )
|
||||
ret, out, err = pyeole.process.creole_system_out( chowncmd, container=container, context=False )
|
||||
if ret != 0:
|
||||
log.error(_(u'Error changing properties {0}: {1}').format(ret, err) )
|
||||
|
||||
log.info(_(u'Changing properties: {0}').format(' '.join(chmodcmd)) )
|
||||
ret, out, err = pyeole.process.creole_system_out( chmodcmd, container=container, context=False )
|
||||
if ret != 0:
|
||||
log.error(_(u'Error changing properties {0}: {1}').format(ret, err) )
|
||||
|
||||
def remove_destfile(self, filevar):
|
||||
"""
|
||||
suppression du fichier de destination
|
||||
"""
|
||||
destfilename = filevar[u'full_name']
|
||||
if os.path.isfile(destfilename):
|
||||
os.unlink(destfilename)
|
||||
else:
|
||||
log.debug(_(u"File '{0}' unexistent.").format(destfilename))
|
||||
|
||||
|
||||
def _instance_file(self, filevar, container=None):
|
||||
"""Run templatisation on one file of one container
|
||||
|
||||
@param filevar: Dictionary describing the file
|
||||
@type filevar: C{dict}
|
||||
@param container: Dictionary describing a container
|
||||
@type container: C{dict}
|
||||
"""
|
||||
if not filevar.get(u'activate', False):
|
||||
try:
|
||||
# copy and patch disabled templates too (#11029)
|
||||
self.prepare_template(filevar, force_no_active=True)
|
||||
except FileNotFound:
|
||||
pass
|
||||
|
||||
if u'rm' in filevar and filevar[u'rm']:
|
||||
log.info(_(u"Removing file '{0}'"
|
||||
u" from container '{1}'").format(filevar[u'name'],
|
||||
container[u'name']))
|
||||
self.remove_destfile(filevar)
|
||||
|
||||
# The caller handles if it's an error
|
||||
raise TemplateDisabled(_(u"Instantiation of file '{0}' disabled").format(filevar[u'name']))
|
||||
|
||||
log.info(_(u"Instantiating file '{0}'"
|
||||
u" from '{1}'").format(filevar[u'name'], filevar[u'source']))
|
||||
self.prepare_template(filevar)
|
||||
self.verify(filevar)
|
||||
self.process(filevar, container)
|
||||
if filevar['name'].startswith('..') and container not in [None, 'root']:
|
||||
self.change_properties(filevar, None, True)
|
||||
else:
|
||||
self.change_properties(filevar, container)
|
||||
|
||||
|
||||
def instance_file(self, filename=None, container='root', ctx=None):
|
||||
"""Run templatisation on one file
|
||||
|
||||
@param filename: name of a file
|
||||
@type filename: C{str}
|
||||
@param container: name of a container
|
||||
@type container: C{str}
|
||||
"""
|
||||
if container == 'all':
|
||||
if ctx is None:
|
||||
groups = self.client.get_groups()
|
||||
else:
|
||||
groups = ctx.keys()
|
||||
for group in groups:
|
||||
if group in ['all', 'root']:
|
||||
continue
|
||||
if ctx is None:
|
||||
lctx = None
|
||||
else:
|
||||
lctx = ctx[group]
|
||||
self.instance_file(filename=filename, container=group, ctx=lctx)
|
||||
else:
|
||||
if ctx is None:
|
||||
ctx = self.client.get_container_infos(container)
|
||||
|
||||
filevars = [f for f in ctx[u'files'] if f[u'name'] == filename]
|
||||
for f in filevars:
|
||||
self._instance_file(f, ctx)
|
||||
|
||||
def instance_files(self, filenames=None, container=None, containers_ctx=None):
|
||||
"""Run templatisation on all files of all containers
|
||||
|
||||
@param filenames: names of files
|
||||
@type filename: C{list}
|
||||
@param container: name of a container
|
||||
@type container: C{str}
|
||||
"""
|
||||
if containers_ctx is None:
|
||||
containers_ctx = []
|
||||
if container is not None:
|
||||
containers_ctx = [self.client.get_container_infos(container)]
|
||||
else:
|
||||
for group_name in self.client.get_groups():
|
||||
containers_ctx.append(self.client.get_group_infos(group_name))
|
||||
if filenames is None:
|
||||
all_files = set(listdir(cfg.distrib_dir))
|
||||
prev_files = set(listdir(cfg.templatedir))
|
||||
all_declared_files = set()
|
||||
for ctx in containers_ctx:
|
||||
for fdict in ctx[u'files']:
|
||||
all_declared_files.add(basename(fdict['source']))
|
||||
undeclared_files = all_files - all_declared_files
|
||||
toremove_files = prev_files - all_files
|
||||
# delete old templates (#6600)
|
||||
for fname in toremove_files:
|
||||
rm_file = join(cfg.templatedir, fname)
|
||||
log.debug(_(u"Removing file '{0}'").format(rm_file))
|
||||
unlink(rm_file)
|
||||
# copy template not referenced in a dictionary (#6303)
|
||||
for fname in undeclared_files:
|
||||
fobj = {'source': join(cfg.templatedir, fname), 'name': ''}
|
||||
self.prepare_template(fobj, True)
|
||||
|
||||
for ctx in containers_ctx:
|
||||
for fdict in ctx[u'files']:
|
||||
if not filenames or fdict[u'name'] in filenames:
|
||||
try:
|
||||
self._instance_file(fdict, container=ctx)
|
||||
except TemplateDisabled, err:
|
||||
# Information on disabled template only useful
|
||||
# in debug
|
||||
log.debug(err, exc_info=True)
|
|
@ -0,0 +1,799 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
utilitaire d'importation de configuration config.eol 2.2 ou config.eol 2.3
|
||||
vers config.eol 2.4
|
||||
|
||||
usage :
|
||||
|
||||
%prog <config_file_name>
|
||||
|
||||
"""
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
from tiramisu.option import SymLinkOption, ChoiceOption
|
||||
from .eosfunc import is_empty
|
||||
from .var_loader import convert_value
|
||||
import re
|
||||
from itertools import product
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# ____ logger utility ____
|
||||
# log_filename = u'/var/log/creole.log'
|
||||
# try:
|
||||
# from pyeole.log import init_logging
|
||||
# except:
|
||||
# # compatibilité pour Zéphir 2.3
|
||||
# from pyeole.log import make_logger
|
||||
# log = make_logger(u'creole3.upgrade',
|
||||
# logfile=log_filename,
|
||||
# loglevel='INFO')
|
||||
# else:
|
||||
# log = init_logging(name=u'creoleUpgrade',
|
||||
# level='info',
|
||||
# console=False,
|
||||
# syslog=None,
|
||||
# filename=log_filename)
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
KEYS = ['val', 'valprec', 'valdefault']
|
||||
|
||||
|
||||
def migration_23_to_tiramisu(opt, val):
|
||||
if not opt.impl_is_multi():
|
||||
if (val == [] or val == ['']) and not isinstance(opt, ChoiceOption):
|
||||
val = None
|
||||
else:
|
||||
if val == []:
|
||||
val = None
|
||||
else:
|
||||
try:
|
||||
val = convert_value(opt, val[0])
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
val = val[0]
|
||||
else:
|
||||
if val == ['']:
|
||||
val = []
|
||||
else:
|
||||
new_val = []
|
||||
for v in val:
|
||||
if v == '':
|
||||
new_val.append(None)
|
||||
else:
|
||||
try:
|
||||
new_val.append(convert_value(opt, v))
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
new_val.append(v)
|
||||
val = new_val
|
||||
return val
|
||||
|
||||
class Dico(ConfigParser):
|
||||
|
||||
def get_val(self, var, default=''):
|
||||
"""
|
||||
Renvoie la valeur d'une variable
|
||||
"""
|
||||
if self.has_section(var):
|
||||
return self.get(var, 'val')
|
||||
return default
|
||||
|
||||
def copy(self, old, new, keep=True):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
if self.has_section(old):
|
||||
if not self.has_section(new):
|
||||
self.add_section(new)
|
||||
for key in KEYS:
|
||||
value = self.get(old, key)
|
||||
self.set(new, key, value)
|
||||
if keep:
|
||||
log.info(_(u"Variable {0} has been copied in {1}").format(old, new))
|
||||
else:
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old, new))
|
||||
|
||||
def move(self, old, new):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
self.copy(old, new, keep=False)
|
||||
|
||||
def remove(self, old):
|
||||
if self.has_section(old):
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been removed").format(old))
|
||||
|
||||
def simple2multi(self, src, new):
|
||||
"""
|
||||
n variables simples => 1 multi
|
||||
"""
|
||||
res = []
|
||||
for var in src:
|
||||
if self.has_section(var):
|
||||
try:
|
||||
value = eval(self.get(var, 'val'))[0]
|
||||
if value != '':
|
||||
res.append(value)
|
||||
except:
|
||||
log.error(_(u"Source variable {0} invalid").format(var))
|
||||
if res != []:
|
||||
self.fill_var(new, res)
|
||||
|
||||
def fill_var(self, var, val, valprec=[], valdefault=[]):
|
||||
"""
|
||||
Crée ou met à jour une variable
|
||||
"""
|
||||
if type(val) != list:
|
||||
val = [val]
|
||||
if not self.has_section(var):
|
||||
self.add_section(var)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(var, val))
|
||||
self.set(var, 'val', str(val))
|
||||
self.set(var, 'valprec', valprec)
|
||||
self.set(var, 'valdefault', valdefault)
|
||||
|
||||
def save(self, fichier):
|
||||
"""
|
||||
Enregistre le résultat
|
||||
"""
|
||||
fic = file(fichier, 'w')
|
||||
self.write(fic)
|
||||
fic.close()
|
||||
|
||||
def upgrade(config, configfile):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.4
|
||||
ou de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
:param version: config.eol version ('2.3' ou '2.4')
|
||||
"""
|
||||
log.info(_(u"config.eol upgrade started"))
|
||||
dico = Dico()
|
||||
dico.read(configfile)
|
||||
version = get_version(dico)
|
||||
if version == '2.2':
|
||||
upgrade22to23(dico)
|
||||
upgrade23to24(dico)
|
||||
# FIXME do stuff on 2.4 variables
|
||||
# chargement des valeurs par default depuis les dicos XML 2.4
|
||||
owner = u"upgrade"
|
||||
store_dico = export_to_store(dico, config)
|
||||
return store_dico, version
|
||||
|
||||
def export_to_store(dico, config):
|
||||
"""
|
||||
exporte depuis un dico vers un dico qui a été mis à jour par les
|
||||
valeurs par défaut creole 2.4::
|
||||
|
||||
{"libelle_etab": {"owner": "gen_config", "val": "monchapet"},
|
||||
{"owner": "gen_config", "val": ["0.0.0.0"]}
|
||||
|
||||
:param dico: configparser dict
|
||||
:returns: config parser dico
|
||||
"""
|
||||
default_owner = u'upgrade'
|
||||
store = {}
|
||||
# modification des settings pour accéder aux options disabled
|
||||
config.cfgimpl_get_settings().remove('disabled')
|
||||
old_format = False
|
||||
for section in dico.sections():
|
||||
val = eval(dico.get_val(section))
|
||||
try:
|
||||
path = config.find_first(byname=section, type_='path', check_properties=False)
|
||||
if not path.startswith('creole.') or path.startswith('creole.containers.'):
|
||||
continue
|
||||
|
||||
opt = config.unwrap_from_path(path)
|
||||
if isinstance(opt, SymLinkOption):
|
||||
continue
|
||||
val = migration_23_to_tiramisu(opt, val)
|
||||
except AttributeError:
|
||||
log.error(_(u"Unknown variable: {0}").format(section))
|
||||
old_format = True
|
||||
if val is None or val == []:
|
||||
continue
|
||||
store[section] = {"owner": default_owner, "val": val}
|
||||
if old_format:
|
||||
store[section]['old_format'] = True
|
||||
return store
|
||||
|
||||
def upgrade22to23(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.3
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.2', '2.3'))
|
||||
# famille General
|
||||
dico.move('proxy', 'activer_proxy_client')
|
||||
dico.move('proxy_server', 'proxy_client_adresse')
|
||||
dico.move('proxy_port', 'proxy_client_port')
|
||||
dico.simple2multi(['serveur_maj', 'serveur_maj2'], 'serveur_maj')
|
||||
# spécifique Amon
|
||||
domaine = dico.get_val('nom_domaine_academique')
|
||||
if domaine != '':
|
||||
if '.' in domaine:
|
||||
ac, dom = eval(domaine)[0].rsplit('.', 1)
|
||||
else:
|
||||
# gère le cas particulier de sphynx ou le suffixe n'était pas
|
||||
# dans le domaine académique (.fr par défaut)
|
||||
ac = eval(domaine)[0]
|
||||
dom = 'fr'
|
||||
dico.fill_var('nom_academie', ac)
|
||||
dico.fill_var('suffixe_domaine_academique', dom)
|
||||
# rien sur Zéphir 2.2
|
||||
if dico.has_section('ip_ssh_eth0'):
|
||||
# ip/netmask facultatifs sur Scribe-2.2
|
||||
if 'oui' in dico.get_val('ssh_eth0') and dico.get_val('ip_ssh_eth0') == '[]':
|
||||
dico.fill_var('ip_ssh_eth0', '0.0.0.0')
|
||||
dico.fill_var('netmask_ssh_eth0', '0.0.0.0')
|
||||
# pas de ssh_eth0 sur Horus-2.2
|
||||
if not dico.has_section('ssh_eth0'):
|
||||
# FIXME ip_ssh_eth0 semble faculatif
|
||||
dico.fill_var('ssh_eth0', 'oui')
|
||||
# familles Interface-X
|
||||
for num in range(0, 5):
|
||||
dico.copy('ssh_eth%s' % num, 'admin_eth%s' % num)
|
||||
dico.copy('ip_ssh_eth%s' % num, 'ip_admin_eth%s' % num)
|
||||
dico.copy('netmask_ssh_eth%s' % num, 'netmask_admin_eth%s' % num)
|
||||
dico.move('agregation', 'activer_agregation')
|
||||
|
||||
# famille Services
|
||||
dico.move('cups', 'activer_cups')
|
||||
dico.move('ftp_perso', 'activer_proftpd')
|
||||
dico.move('ead_web', 'activer_ead_web')
|
||||
dico.move('apache', 'activer_apache')
|
||||
dico.move('mysql', 'activer_mysql')
|
||||
dico.move('xinet_interbase', 'activer_interbase')
|
||||
if 'oui' in dico.get_val('sso'):
|
||||
dico.fill_var('activer_sso', 'local')
|
||||
else:
|
||||
dico.fill_var('activer_sso', 'non')
|
||||
|
||||
# migration DHCP
|
||||
dhcp = dico.get_val('dhcp', None)
|
||||
if dhcp is not None:
|
||||
dico.move('dhcp', 'activer_dhcp')
|
||||
if dico.get_val('adresse_network_dhcp', None) is None:
|
||||
#migration d'un Horus 2.2
|
||||
len_dhcp = len(eval(dico.get_val('ip_basse_dhcp', "[]")))
|
||||
#recuperation des variables a migrer
|
||||
adresse_network_dhcp = eval(dico.get_val("adresse_network_eth0"))
|
||||
dico.fill_var("adresse_network_dhcp", adresse_network_dhcp * len_dhcp)
|
||||
adresse_netmask_dhcp = eval(dico.get_val("adresse_netmask_eth0"))
|
||||
dico.fill_var("adresse_netmask_dhcp", adresse_netmask_dhcp * len_dhcp)
|
||||
adresse_ip_gw_dhcp = eval(dico.get_val("adresse_ip_gw", "[]"))
|
||||
if adresse_ip_gw_dhcp != []:
|
||||
dico.fill_var("adresse_ip_gw_dhcp", adresse_ip_gw_dhcp * len_dhcp)
|
||||
nom_domaine_dhcp = eval(dico.get_val("nom_domaine_local", "[]"))
|
||||
if nom_domaine_dhcp != []:
|
||||
dico.fill_var("nom_domaine_dhcp", nom_domaine_dhcp * len_dhcp)
|
||||
adresse_ip_dns_dhcp = eval(dico.get_val("adresse_ip_dns", "[]"))
|
||||
if adresse_ip_dns_dhcp != []:
|
||||
dico.fill_var("adresse_ip_dns_dhcp", [adresse_ip_dns_dhcp[0]] * len_dhcp)
|
||||
|
||||
# famille Messagerie
|
||||
dico.move('passerelle_smtp_aca', 'passerelle_smtp')
|
||||
dico.move('spamassassin', 'activer_spamassassin')
|
||||
if 'oui' in dico.get_val('courier_imap'):
|
||||
if 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop - imap')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'imap')
|
||||
elif 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'non')
|
||||
# Zéphir
|
||||
dico.move('serveur_smtp', 'passerelle_smtp')
|
||||
dico.move('compte_smtp', 'system_mail_from')
|
||||
if '465' in dico.get_val('port_smtp'):
|
||||
dico.fill_var('tls_smtp', 'port 465')
|
||||
|
||||
# famille Client_ldap
|
||||
dico.move('base_ldap', 'ldap_base_dn')
|
||||
serveur_ldap = dico.get_val('serveur_ldap', '[]')
|
||||
if serveur_ldap != '[]':
|
||||
dico.move('serveur_ldap', 'adresse_ip_ldap')
|
||||
if eval(serveur_ldap)[0] not in ['127.0.0.1', 'localhost']:
|
||||
dico.fill_var('activer_client_ldap', 'distant')
|
||||
|
||||
# famille Eole-sso
|
||||
dico.move('adresse_ip_sso', 'eolesso_adresse')
|
||||
dico.move('port_sso', 'eolesso_port')
|
||||
# eolesso_ldap (multi)
|
||||
dico.move('ldap_sso', 'eolesso_ldap')
|
||||
dico.move('port_ldap_sso', 'eolesso_port_ldap')
|
||||
dico.move('base_ldap_sso', 'eolesso_base_ldap')
|
||||
dico.move('sso_ldap_label', 'eolesso_ldap_label')
|
||||
dico.move('sso_ldap_reader', 'eolesso_ldap_reader')
|
||||
dico.move('sso_ldap_reader_passfile', 'eolesso_ldap_reader_passfile')
|
||||
# la "suite"
|
||||
dico.move('adresse_sso_parent', 'eolesso_adresse_parent')
|
||||
dico.move('port_sso_parent', 'eolesso_port_parent')
|
||||
dico.move('sso_pam_securid', 'eolesso_pam_securid')
|
||||
dico.move('sso_cert', 'eolesso_cert')
|
||||
dico.move('sso_ca_location', 'eolesso_ca_location')
|
||||
dico.move('sso_session_timeout', 'eolesso_session_timeout')
|
||||
dico.move('sso_css', 'eolesso_css')
|
||||
|
||||
# famille Applications web
|
||||
dico.move('phpmyadmin', 'activer_phpmyadmin')
|
||||
dico.move('posh', 'activer_envole')
|
||||
dico.move('web_domain', 'web_url')
|
||||
dico.move('web_default', 'web_redirection')
|
||||
posh_path = dico.get_val('posh_path', '[]')
|
||||
if posh_path != '[]' and eval(posh_path)[0] != '':
|
||||
dico.fill_var('alias_envole', '/' + eval(posh_path)[0])
|
||||
|
||||
# famille Bacula
|
||||
if 'oui' in "%s%s%s" % (dico.get_val('active_bacula_dir'),
|
||||
dico.get_val('active_bacula_fd'),
|
||||
dico.get_val('active_bacula_sd')):
|
||||
dico.fill_var('activer_bacula', 'oui')
|
||||
dico.move('active_bacula_dir', 'activer_bacula_dir')
|
||||
dico.move('active_bacula_sd', 'activer_bacula_sd')
|
||||
# bacula_fd n'est plus géré
|
||||
else:
|
||||
dico.fill_var('activer_bacula', 'non')
|
||||
|
||||
# famille Clamav
|
||||
dico.move('enable_clamd', 'dansguardian_clam')
|
||||
|
||||
# famille Certifs-ssl
|
||||
dico.move('ssl_serveur_name', 'ssl_server_name')
|
||||
|
||||
# famille Authentification
|
||||
dico.move('active_nufw', 'activer_nufw')
|
||||
dico.move('freeradius', 'activer_freeradius')
|
||||
|
||||
# famille Logs
|
||||
if 'Oui' in dico.get_val('activate_tls'):
|
||||
dico.fill_var('rsyslog_tls', 'oui')
|
||||
|
||||
# famille Reverse proxy
|
||||
revprox = dico.get_val('revprox_domainname', '[]')
|
||||
if revprox != '[]' and eval(revprox)[0] != '':
|
||||
dico.fill_var('revprox_activate_http', 'oui')
|
||||
|
||||
# famille réseau avancé
|
||||
route = dico.get_val('route_adresse', '[]')
|
||||
if route != '[]' and eval(route)[0] != '':
|
||||
dico.fill_var('activer_route', 'oui')
|
||||
|
||||
# famille Vpn-pki
|
||||
dico.simple2multi(['url_crl1', 'url_crl2'], 'url_crl')
|
||||
|
||||
|
||||
def upgrade23to24(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.3', '2.4'))
|
||||
cache_dir = dico.get_val('cache_dir', '[]')
|
||||
if cache_dir != '[]' and eval(cache_dir)[0] == '/var/spool/squid':
|
||||
dico.fill_var('cache_dir', '')
|
||||
|
||||
system_mail_to = dico.get_val('system_mail_to', '[]')
|
||||
if system_mail_to != '[]' and eval(system_mail_to)[0] == 'postmaster':
|
||||
dico.fill_var('system_mail_to', '')
|
||||
|
||||
varname = 'alias_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
for i in range(0, 5):
|
||||
dico.move('adresse_ip_vlan_eth{0}'.format(i), 'vlan_ip_eth{0}'.format(i))
|
||||
dico.move('adresse_netmask_vlan_eth{0}'.format(i), 'vlan_netmask_eth{0}'.format(i))
|
||||
dico.move('adresse_network_vlan_eth{0}'.format(i), 'vlan_network_eth{0}'.format(i))
|
||||
dico.move('adresse_broadcast_vlan_eth{0}'.format(i), 'vlan_broadcast_eth{0}'.format(i))
|
||||
dico.move('adresse_gw_vlan_eth{0}'.format(i), 'vlan_gw_eth{0}'.format(i))
|
||||
dico.move('id_vlan_eth{0}'.format(i), 'vlan_id_eth{0}'.format(i))
|
||||
|
||||
varname = 'vlan_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
dico.move('proxy_eth0_adresse', 'proxy_eth0_ip')
|
||||
dico.move('proxy_eth0_network', 'proxy_eth0_network')
|
||||
dico.move('nom_interface1', 'nom_zone_eth1')
|
||||
dico.move('era_proxy_bypass', 'era_proxy_bypass_eth1')
|
||||
dico.move('smb_adresse_ip_wins', 'smb_wins_server')
|
||||
|
||||
dico.remove('http_port')
|
||||
dico.remove('http_port_2')
|
||||
dico.remove('test_nutmaster')
|
||||
dico.remove('test_activer_routage_ipv6')
|
||||
dico.remove('test_activer_kerberos')
|
||||
dico.remove('test_activer_clam_proxy')
|
||||
dico.remove('test_activer_proxy_eth0')
|
||||
dico.remove('revprox_poshadmin')
|
||||
dico.remove('ip_client_logs_udp')
|
||||
dico.remove('adresse_ip_conteneur_dns')
|
||||
|
||||
dico.simple2multi(['test_distant_domaine1', 'test_distant_domaine2'], 'test_distant_domaine')
|
||||
dico.remove('test_distant_domaine1')
|
||||
dico.remove('test_distant_domaine2')
|
||||
dico.simple2multi(['ssl_subjectaltname_ip', 'ssl_subjectaltname_ns'], 'ssl_subjectaltname')
|
||||
dico.remove('ssl_subjectaltname_ip')
|
||||
dico.remove('ssl_subjectaltname_ns')
|
||||
|
||||
old_serveur_maj = eval(dico.get_val('serveur_maj', '[]'))
|
||||
if old_serveur_maj != []:
|
||||
serveur_maj = []
|
||||
for maj in old_serveur_maj:
|
||||
if maj == 'eoleng.ac-dijon.fr':
|
||||
maj = 'eole.ac-dijon.fr'
|
||||
if maj == 'test-eoleng.ac-dijon.fr':
|
||||
maj = 'test-eole.ac-dijon.fr'
|
||||
serveur_maj.append(maj)
|
||||
dico.fill_var('serveur_maj', serveur_maj)
|
||||
|
||||
ssl_country_name = eval(dico.get_val('ssl_country_name', '[""]'))[0].upper()
|
||||
dico.fill_var('ssl_country_name', ssl_country_name)
|
||||
|
||||
tmp_short_name = []
|
||||
tmp_long_name = []
|
||||
tmp_ip = []
|
||||
nom_domaine_local = eval(dico.get_val('nom_domaine_local', "['']"))[0]
|
||||
def _append_tmp_name(ip, long_name, short_name="NONE"):
|
||||
splitted_labels = long_name.split('.')
|
||||
if short_name == "NONE":
|
||||
short_name = splitted_labels[0]
|
||||
# ajout si non déjà défini dans Réseau avancé
|
||||
if long_name not in tmp_long_name:
|
||||
if short_name not in tmp_short_name:
|
||||
#le nom court n'existe pas dans la liste, donc l'ajoute
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
if '.'.join(splitted_labels[1:]) == nom_domaine_local:
|
||||
# le nom court est déjà présent
|
||||
# privilégie le nom court pour le nom de domaine local
|
||||
tmp_short_name[tmp_short_name.index(short_name)] = None
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
# ne pas doublonner le nom court
|
||||
tmp_short_name.append(None)
|
||||
if len(splitted_labels) > 1:
|
||||
tmp_long_name.append(long_name)
|
||||
else:
|
||||
# si nom court, transforme en nom long
|
||||
tmp_long_name.append(long_name + '.' + nom_domaine_local)
|
||||
tmp_ip.append(ip)
|
||||
|
||||
if eval(dico.get_val('activer_ajout_hosts', '["non"]'))[0] == 'oui':
|
||||
# récupération et passage en minuscules des
|
||||
# nom_court_hosts et nom_long_hosts existants #11473
|
||||
ips = eval(dico.get_val('adresse_ip_hosts', '[]').lower())
|
||||
long_names = eval(dico.get_val('nom_long_hosts', '[]').lower())
|
||||
for idx, short_name in enumerate(eval(dico.get_val('nom_court_hosts', '[]').lower())):
|
||||
_append_tmp_name(ips[idx], long_names[idx], short_name)
|
||||
|
||||
# Migration des variables hosts #2795
|
||||
# noms d'hôtes forcés en minuscules #9790
|
||||
nom_host_dns = eval(dico.get_val('nom_host_dns', '[]').lower())
|
||||
if not is_empty(nom_host_dns):
|
||||
ips = eval(dico.get_val('ip_host_dns'))
|
||||
# transforme les nom_host_dns en nom_court_hosts et nom_long_hosts
|
||||
# donc force activer_ajout_hosts à oui
|
||||
dico.fill_var('activer_ajout_hosts', 'oui')
|
||||
for idx, long_name in enumerate(nom_host_dns):
|
||||
_append_tmp_name(ips[idx], long_name)
|
||||
|
||||
if not is_empty(tmp_short_name):
|
||||
dico.fill_var('adresse_ip_hosts', tmp_ip)
|
||||
dico.fill_var('nom_court_hosts', tmp_short_name)
|
||||
dico.fill_var('nom_long_hosts', tmp_long_name)
|
||||
dico.remove('nom_host_dns')
|
||||
dico.remove('ip_host_dns')
|
||||
|
||||
# Ajout du point devant chaque zone #7008
|
||||
old_nom_zone_dns_cache = eval(dico.get_val('nom_zone_dns_cache', '[]'))
|
||||
if not is_empty(old_nom_zone_dns_cache):
|
||||
nom_zone_dns_cache = []
|
||||
for old in old_nom_zone_dns_cache:
|
||||
nom_zone_dns_cache.append('.' + old)
|
||||
dico.fill_var('nom_zone_dns_cache', nom_zone_dns_cache)
|
||||
|
||||
# Modification du chemin de la corbeille Samba #7463
|
||||
smb_trash_dir = eval(dico.get_val('smb_trash_dir', '["/"]'))[0]
|
||||
if not smb_trash_dir.startswith('/'):
|
||||
dico.fill_var('smb_trash_dir', 'perso/{0}'.format(smb_trash_dir))
|
||||
|
||||
# antivirus temps réel => remis à default #19833
|
||||
if dico.get_val('smb_vscan', "['non']") == "['oui']":
|
||||
dico.remove('smb_vscan')
|
||||
|
||||
# Famille Proxy parent #7823
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere', '[]'))):
|
||||
dico.fill_var('activer_cache_pere', 'oui')
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere_zone', '[]'))):
|
||||
dico.fill_var('activer_cache_pere_zone', 'oui')
|
||||
if not is_empty(eval(dico.get_val('proxy_sibling_ip', '[]'))):
|
||||
dico.fill_var('activer_proxy_sibling', 'oui')
|
||||
|
||||
# Autorisation proxy eth0 #8167
|
||||
if not is_empty(eval(dico.get_val('proxy_eth0_ip', '[]'))):
|
||||
dico.fill_var('activer_supp_proxy_eth0', 'oui')
|
||||
|
||||
# Famille Rvp #8164
|
||||
if not is_empty(eval(dico.get_val('adresse_network_zone_rvp', '[]'))):
|
||||
dico.fill_var('acces_proxy_zone_rvp', 'oui')
|
||||
|
||||
# half_closed_clients => remise à default #19813
|
||||
if dico.get_val('half_closed_clients', "['off']") == "['on']":
|
||||
dico.remove('half_closed_clients')
|
||||
|
||||
##
|
||||
## Modification de la configuration exim
|
||||
##
|
||||
# passerelle SMTP
|
||||
log.info(_(u"Migrating SMTP parameters"))
|
||||
passerelle_smtp = dico.get_val('passerelle_smtp', '[]')
|
||||
dico.move('passerelle_smtp', 'exim_relay_smtp')
|
||||
if is_empty(passerelle_smtp):
|
||||
# No SMTP gateway
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
|
||||
# Type de serveur SMTP
|
||||
exim_mail_type = eval(dico.get_val('exim_mail_type', '["satellite"]'))[0]
|
||||
log.info("Migration de exim_mail_type: '{0}'".format(exim_mail_type))
|
||||
dico.remove('exim_mail_type')
|
||||
if exim_mail_type == 'satellite':
|
||||
# Nothing to do
|
||||
pass
|
||||
elif exim_mail_type == 'local':
|
||||
# Local is smarthost without relay, should not happen
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'smarthost':
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'mailhub':
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
elif exim_mail_type == 'internet':
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
else:
|
||||
log.warn(_(u'Mail configuration not recognised, not processed'))
|
||||
|
||||
# Réécriture
|
||||
mail_rewrite_domain = eval(dico.get_val('mail_rewrite_domain', '["non"]'))[0]
|
||||
dico.remove('mail_rewrite_domain')
|
||||
if mail_rewrite_domain == 'oui':
|
||||
dico.fill_var('exim_qualify_domain', 'nom de domaine local')
|
||||
|
||||
# Modèle Era utilisé (#9082)
|
||||
mapping = {'2zones-amonecole-nginx' : u'2zones-amonecole',
|
||||
'3zones-scribe-nginx' : u'3zones-dmz',
|
||||
'3zones-scribe' : u'3zones-dmz',
|
||||
'4zones-scribe-nginx' : u'4zones',
|
||||
'4zones-scribe-nufw' : u'4zones',
|
||||
'4zones-scribe' : u'4zones',
|
||||
'5zones-scribe-nginx' : u'5zones',
|
||||
'5zones-scribe' : u'5zones',
|
||||
}
|
||||
model = eval(dico.get_val('type_amon', '[""]'))[0]
|
||||
if model in mapping:
|
||||
dico.fill_var('type_amon', mapping[model])
|
||||
|
||||
# Migration des modules ecdl
|
||||
if dico.get_val('ecdl_regles_filtrage_supplementaires', 'Pas un eCDL') != 'Pas un eCDL':
|
||||
dico.move('ecdl_ldap_machine_suffix', 'ldap_machine_suffix')
|
||||
dico.move('ecdl_ldap_group_suffix', 'ldap_group_suffix')
|
||||
dico.move('ecdl_smb_share_model', 'smb_share_model')
|
||||
dico.move('ecdl_smb_vscan', 'smb_vscan')
|
||||
dico.move('ecdl_smb_ports', 'smb_ports')
|
||||
dico.move('ecdl_smb_server_string', 'smb_server_string')
|
||||
dico.move('ecdl_smb_trash', 'smb_trash')
|
||||
dico.move('ecdl_smb_trash_dir', 'smb_trash_dir')
|
||||
dico.move('ecdl_smb_trash_purge', 'smb_trash_purge')
|
||||
dico.move('ecdl_smb_quotawarn' , 'smb_quotawarn')
|
||||
dico.move('ecdl_smb_guest', 'smb_guest')
|
||||
dico.move('ecdl_smb_wins_support', 'smb_wins_support')
|
||||
dico.move('ecdl_smb_adresse_ip_wins', 'smb_wins_server')
|
||||
dico.move('ecdl_smb_dns_proxy', 'smb_dns_proxy')
|
||||
dico.move('ecdl_smb_oplocks', 'smb_oplocks')
|
||||
dico.move('ecdl_smb_dos_attributes', 'smb_dos_attributes')
|
||||
dico.move('ecdl_smb_unixextensions', 'smb_unixextensions')
|
||||
dico.move('ecdl_smb_partage_nom', 'smb_partage_nom')
|
||||
dico.move('ecdl_smb_partage_path', 'smb_partage_path')
|
||||
dico.move('ecdl_smb_partage_visibilite', 'smb_partage_visibilite')
|
||||
dico.move('ecdl_smb_partage_ecriture', 'smb_partage_ecriture')
|
||||
dico.move('ecdl_regles_filtrage_supplementaires', 'activer_regles_filtrage_port_source')
|
||||
dico.move('ecdl_smb_os_level', 'smb_os_level')
|
||||
dico.move('ecdl_smb_domain_master', 'smb_domain_master')
|
||||
dico.move('ecdl_ca_cert', 'ldap_ca_cert')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_passwd', 'ldap_nss_base_passwd_filter')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_group', 'ldap_nss_base_group_filter')
|
||||
dico.move('ecdl_ldap_timeout', 'ldap_timeout')
|
||||
dico.move('ecdl_smb_netbios_name', 'smb_netbios_name')
|
||||
dico.move('ecdl_smb_workgroup', 'smb_workgroup')
|
||||
dico.move('ecdl_smb_usershare_max_shares', 'smb_usershare_max_shares')
|
||||
dico.move('ecdl_smb_activer_partages', 'smb_activer_partages')
|
||||
dico.remove('ecdl_smb_log_level')
|
||||
# fin de migration des modules ecdl
|
||||
|
||||
# migration des modules esbl
|
||||
if dico.get_val('activer_lister_repertoires_apache', 'Pas un eSBL') != 'Pas un eSBL':
|
||||
dico.fill_var('smb_log_level', 0)
|
||||
smb_activer_ordre_resolution_nom = dico.get_val('smb_activer_ordre_resolution_nom', 'non')
|
||||
if smb_activer_ordre_resolution_nom == 'oui':
|
||||
smb_name_resolve_order = " ".join(eval(dico.get_val('smb_procede_recherche_nom')))
|
||||
dico.fill_var('smb_name_resolve_order', smb_name_resolve_order)
|
||||
smb_ad_nom_long_controleur = dico.get_val('smb_ad_nom_long_controleur', "['']")
|
||||
if smb_ad_nom_long_controleur != "['']":
|
||||
dico.fill_var('smb_ad_server', smb_ad_nom_long_controleur)
|
||||
smb_ad_realm = dico.get_val('smb_ad_realm', "['']")
|
||||
if smb_ad_realm != "['']":
|
||||
dico.fill_var('smb_realm', smb_ad_realm)
|
||||
dico.move('activer_lister_repertoires_apache', 'apache_lister_repertoires')
|
||||
|
||||
# répartition des variables pour les répertoires ftp
|
||||
ftps = {}
|
||||
for ftp_rep, ftp_anon in zip(eval(dico.get_val('acces_ftp', '[]')),
|
||||
eval(dico.get_val('acces_ftp_anonymous', '[]'))):
|
||||
ftps[ftp_anon] = ftps.get(ftp_anon, []) + [ftp_rep]
|
||||
# si len(ftps['oui']) > 1, pas de reprise automatique
|
||||
# sinon ftps['oui'] -> ftp_anonymous_directory
|
||||
# ftps['non'] -> ftp_access_directory
|
||||
|
||||
if 'oui' in ftps and len(ftps['oui']) == 1:
|
||||
dico.fill_var('ftp_anonymous_directory', ftps['oui'][0])
|
||||
dico.fill_var('activer_ftp_anonymous_access', 'oui')
|
||||
if 'non' in ftps:
|
||||
dico.fill_var('ftp_access_directory', ftps['non'])
|
||||
dico.fill_var('activer_ftp_access', 'oui')
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
|
||||
dico.move('activer_pare_feu', 'activer_firewall')
|
||||
# fin de migration des modules esbl
|
||||
|
||||
# migration des modules essl
|
||||
if dico.get_val('ecdl_serveurs_ip', "Pas un eSSL") != "Pas un eSSL":
|
||||
# variables ftp_max*
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
# variables renommées
|
||||
dico.move('sites_distants_morea_ip', 'sites_distants_ip')
|
||||
dico.move('sites_distants_morea_netmask', 'sites_distants_netmask')
|
||||
dico.move('nagios_morea_ip', 'nagios_dist_ip')
|
||||
dico.move('nagios_morea_netmask', 'nagios_dist_netmask')
|
||||
dico.move('morea_routeur_ip', 'wan_routeur_ip')
|
||||
dico.move('morea_interface', 'wan_interface')
|
||||
dico.move('surf_lan_ip', 'sites_dist_ip')
|
||||
dico.move('surf_lan_netmask', 'sites_dist_netmask')
|
||||
dico.move('morea_route_adresse', 'wan_route_adresse')
|
||||
dico.move('morea_route_netmask', 'wan_route_netmask')
|
||||
# conversions de valeurs
|
||||
variante_type_mapping = {'standard': 'production',
|
||||
'Applis Web': 'Applis_Web',
|
||||
'eSSL Morea': 'eSSL',
|
||||
'eSSL Internet': 'eSSL_Internet',
|
||||
'eSSL SPC': 'eSSL_SPC',
|
||||
'ppp': 'PPP',
|
||||
'': 'production'}
|
||||
variante_type = eval(dico.get_val('variante_type', "['']"))[0]
|
||||
dico.fill_var('variante_type', variante_type_mapping[variante_type])
|
||||
|
||||
# migration des variables dhcp
|
||||
exxl_dhcp = dico.has_section('dhcp_lease_max')
|
||||
if dico.get_val('activer_dhcp', "['non']") == "['oui']" and exxl_dhcp:
|
||||
# récupération des valeurs de la multi
|
||||
ip_basse = eval(dico.get_val('ip_basse_dhcp', '[""]'))
|
||||
ip_haute = eval(dico.get_val('ip_haute_dhcp', '[""]'))
|
||||
restriction = eval(dico.get_val('activer_dhcp_hotes_autorises', "['']"))
|
||||
lease_default = eval(dico.get_val('dhcp_lease_default', "['']"))
|
||||
lease_max = eval(dico.get_val('dhcp_lease_max', "['']"))
|
||||
# récupération des valeurs communes simples
|
||||
network = [eval(dico.get_val('adresse_network_dhcp', "['']"))[0]]
|
||||
netmask = [eval(dico.get_val('adresse_netmask_dhcp', "['']"))[0]]
|
||||
nom_domaine_dhcp = [eval(dico.get_val('nom_domaine_dhcp', "['']"))[0]]
|
||||
gateway_dhcp = [eval(dico.get_val('adresse_ip_gw_dhcp', "['']"))[0]]
|
||||
# récupération des valeurs communes multiples
|
||||
dns_dhcp = eval(dico.get_val('adresse_ip_dns_dhcp', "['']"))
|
||||
wins = eval(dico.get_val('adresse_ip_wins_dhcp', "['']"))
|
||||
wins_primaire = wins[0]
|
||||
if len(wins) > 1:
|
||||
wins_secondaire = wins[1]
|
||||
else:
|
||||
wins_secondaire = wins_primaire
|
||||
ntp_dhcp = eval(dico.get_val('adresse_ip_ntp_dhcp', "['']"))
|
||||
# création des nouvelles listes, produit cartésien
|
||||
ranges, dns_dhcp, ntp_dhcp = zip(*list(product(zip(ip_basse, ip_haute, restriction, lease_default, lease_max), dns_dhcp, ntp_dhcp)))
|
||||
dns_dhcp = list(dns_dhcp)
|
||||
ntp_dhcp = list(ntp_dhcp)
|
||||
ip_basse, ip_haute, restriction, lease_default, lease_max = [list(l) for l in zip(*ranges)]
|
||||
nb_ranges = len(ip_basse)
|
||||
nom_domaine_dhcp = nom_domaine_dhcp*nb_ranges
|
||||
gateway_dhcp = gateway_dhcp*nb_ranges
|
||||
wins_primaire = [wins_primaire]*nb_ranges
|
||||
wins_secondaire = [wins_secondaire]*nb_ranges
|
||||
network = network*nb_ranges
|
||||
netmask = netmask*nb_ranges
|
||||
# chargement des valeurs dans le dictionnaire
|
||||
dico.fill_var('adresse_network_dhcp', network)
|
||||
dico.fill_var('adresse_netmask_dhcp',netmask)
|
||||
dico.fill_var('ip_basse_dhcp', ip_basse)
|
||||
dico.fill_var('ip_haute_dhcp', ip_haute)
|
||||
dico.fill_var('nom_domaine_dhcp', nom_domaine_dhcp)
|
||||
dico.fill_var('adresse_ip_gw_dhcp', gateway_dhcp)
|
||||
dico.fill_var('adresse_ip_dns_dhcp', dns_dhcp)
|
||||
dico.fill_var('adresse_ip_wins_primaire_dhcp', wins_primaire)
|
||||
dico.fill_var('adresse_ip_wins_secondaire_dhcp', wins_secondaire)
|
||||
dico.fill_var('adresse_ip_ntp_dhcp', ntp_dhcp)
|
||||
dico.fill_var('interdire_hotes_inconnus', restriction)
|
||||
dico.fill_var('dhcp_lease_default', lease_default)
|
||||
dico.fill_var('dhcp_lease_max', lease_max)
|
||||
|
||||
#envole
|
||||
if dico.get_val('activer_envole', "['non']") == "['oui']" and dico.get_val('force_envole', "['non']") == "['oui']":
|
||||
alias_envole = eval(dico.get_val('alias_envole'))[0]
|
||||
if alias_envole != '/':
|
||||
dico.fill_var('web_redirection', alias_envole)
|
||||
dico.remove('alias_envole')
|
||||
|
||||
def get_version(dico):
|
||||
"""
|
||||
recupère la version en fonction de la présence ou non
|
||||
de la variable 'serveur_maj2'
|
||||
|
||||
:param dico: ConfigParser object
|
||||
:return version: '2.2' ou '2.3'
|
||||
"""
|
||||
# ________ version du config.eol ________
|
||||
|
||||
if dico.has_section('serveur_maj2') and not \
|
||||
dico.has_section('activer_bash_completion'):
|
||||
version = '2.2'
|
||||
else:
|
||||
version = '2.3'
|
||||
return version
|
||||
|
||||
|
||||
def main(config_file):
|
||||
"""main entry point"""
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print __doc__
|
||||
sys.exit(1)
|
||||
main(sys.argv[1])
|
|
@ -0,0 +1,735 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
Utilitaire de mise à jour des variables
|
||||
pour les versions >= 2.4.1
|
||||
|
||||
"""
|
||||
from .upgrade import log, migration_23_to_tiramisu
|
||||
from .var_loader import convert_value
|
||||
from pyeole.i18n import i18n
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu.setting import undefined
|
||||
from distutils.version import StrictVersion
|
||||
from pyeole.encode import normalize
|
||||
_ = i18n('creole')
|
||||
|
||||
class Upgrade():
|
||||
"""
|
||||
Méthodes pour la mise à niveau des variables
|
||||
"""
|
||||
def __init__(self, config):
|
||||
owner = u'upgrade'
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
self.config = config
|
||||
self.owner = getattr(owners, owner)
|
||||
self.unknown_options = config.impl_get_information(u'unknown_options')
|
||||
|
||||
def get_old_value(self, variable, old_variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "disparue"
|
||||
"""
|
||||
try:
|
||||
old_obj = self.unknown_options[old_variable]
|
||||
if old_obj.get('old_format', False):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
opt = self.config.unwrap_from_path(path)
|
||||
val = migration_23_to_tiramisu(opt, old_obj['val'])
|
||||
else:
|
||||
val = old_obj['val']
|
||||
return val
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def get_value(self, variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "connue"
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
return self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
|
||||
def get_unvalid_value(self, variable, default=None):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
try:
|
||||
return self.config.impl_get_information('orig_value_{}'.format(path))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_noncalculated_value_for_auto(self, variable):
|
||||
"""
|
||||
Retourne la valeur contenue dans le fichier config.eol dans le cas où la variable
|
||||
est calculée (auto), forcé à la valeur par défaut, ...
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('get_noncalculated_value_for_auto: unknown variable {}').format(variable))
|
||||
return None
|
||||
values = self.config.cfgimpl_get_values()
|
||||
if values._contains(path):
|
||||
idx = 0
|
||||
vals = []
|
||||
while True:
|
||||
val = values._p_.getvalue(path, values._p_.getsession(), idx)
|
||||
if val is undefined:
|
||||
break
|
||||
vals.append(val)
|
||||
idx += 1
|
||||
if len(vals) > 0:
|
||||
return vals
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def var_exists(self, variable):
|
||||
try:
|
||||
self.get_path(variable)
|
||||
return True
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def get_path(self, variable):
|
||||
"""
|
||||
Retourne le chemin complet d'une variable
|
||||
"""
|
||||
return self.config.find_first(byname=variable, type_='path')
|
||||
|
||||
def modify_owner(self, path, value=None, index=None):
|
||||
"""
|
||||
Modifie le propriétaire d'une variable
|
||||
"""
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if index is not None:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=index)
|
||||
elif value is not None:
|
||||
for idx in xrange(len(value)):
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=idx)
|
||||
else:
|
||||
raise Exception('must have value or index for slave')
|
||||
|
||||
else:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner)
|
||||
|
||||
def is_default(self, variable, default=True):
|
||||
"""
|
||||
Retourne True si la valeur n'a pas été personnalisée par l'utilisateur
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
option = self.config.unwrap_from_path(path)
|
||||
return self.config.cfgimpl_get_values().is_default_owner(option)
|
||||
|
||||
def set_value(self, variable, value):
|
||||
"""
|
||||
Modifie la valeur d'une variable
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_(u"Try to set value to unknown option: {0} = {1}").format(variable, value))
|
||||
else:
|
||||
try:
|
||||
self.config._setattr(path, value,
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
self.config.impl_del_information('error_msg_{}'.format(path), raises=False)
|
||||
self.config.impl_del_information('orig_value_{}'.format(path), raises=False)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_settings()[option].remove('load_error')
|
||||
except ValueError:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
try:
|
||||
# the value could be in Creole 2.3 format #13957
|
||||
if not option.impl_is_multi() and isinstance(value, list) and len(value) == 1:
|
||||
value = value[0]
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, variable))
|
||||
return
|
||||
self.config._setattr(path, convert_value(option, value),
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
except Exception, err:
|
||||
log.error(_(u"{0} for {1}").format(err, variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
except Exception, err:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
log.error(_("{0} for {1}").format(normalize(str(err)), variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
|
||||
def del_value(self, variable):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to delete an unknown option: {0}').format(variable))
|
||||
else:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_values().__delitem__(option)
|
||||
log.info(_(u"Variable {0} reinitialized").format(variable))
|
||||
|
||||
def append_value(self, variable, value):
|
||||
"""
|
||||
Ajoute une valeur à une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to append a value to an unknown option: {0} += {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi.append(value)
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def modify_last_value(self, variable, value):
|
||||
"""
|
||||
Modifie la dernière valeur d'une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to modify last value of an unknown option: {0}[-1] = {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi[-1] = value
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def move(self, old_variable, new_variable):
|
||||
"""
|
||||
Déplace les données d'une variable "disparue"
|
||||
vers une nouvelle variable
|
||||
"""
|
||||
if old_variable in self.unknown_options:
|
||||
value = self.unknown_options[old_variable][u'val']
|
||||
path = self.get_path(new_variable)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, old_variable))
|
||||
return
|
||||
if option.impl_is_multi() and isinstance(value, list):
|
||||
for val in value:
|
||||
self.append_value(new_variable, val)
|
||||
else:
|
||||
self.set_value(new_variable, value)
|
||||
del(self.unknown_options[old_variable])
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old_variable, new_variable))
|
||||
|
||||
def copy(self, old_variable, new_variable, only_if_modified=True):
|
||||
"""
|
||||
Copie la valeur d'une variable existante vers une autre
|
||||
Si la valeur "old" est une multi et pas la "new" => copie la 1er valeur de la liste
|
||||
Si la valeur "old" n'est pas une multi et la "new" ne l'est pas => transforme la valeur en liste
|
||||
only_if_modified: si True ne copie que les valeurs qui sont modifiées
|
||||
"""
|
||||
try:
|
||||
# si les deux variables existe => migration
|
||||
old_path = self.get_path(old_variable)
|
||||
new_path = self.get_path(new_variable)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
old_option = self.config.unwrap_from_path(old_path)
|
||||
new_option = self.config.unwrap_from_path(new_path)
|
||||
# si la nouvelle option n'est pas modifié et si la valeur est modifié ou only_if_modified est False
|
||||
if self.config.cfgimpl_get_values().is_default_owner(new_option) and \
|
||||
(not only_if_modified or
|
||||
not self.config.cfgimpl_get_values().is_default_owner(old_option)):
|
||||
old_value = self.config.getattr(old_path,
|
||||
force_permissive=True)
|
||||
if old_option.impl_is_multi() and not new_option.impl_is_multi():
|
||||
if len(old_value) != 0:
|
||||
old_value = old_value[0]
|
||||
else:
|
||||
old_value = None
|
||||
if not old_option.impl_is_multi() and new_option.impl_is_multi():
|
||||
if old_value is None:
|
||||
old_value = []
|
||||
else:
|
||||
old_value = [old_value]
|
||||
self.set_value(new_variable, old_value)
|
||||
|
||||
|
||||
|
||||
class Upgrade_2_4_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.0 vers 2.4.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.0', '2.4.1'))
|
||||
|
||||
# renommage des variables "era_proxy_bypass"
|
||||
for i in range(1, 5):
|
||||
self.move('era_proxy_bypass_eth{0}'.format(i), 'proxy_bypass_network_eth{0}'.format(i))
|
||||
|
||||
# fusion des variables "proxy_bypass" et "wpad_exclude"
|
||||
if 'adresse_ip_wpad_exclude' in self.unknown_options:
|
||||
#le 1er argument sert a récupérer les propriétés des option (choiceoption, multi, ...)
|
||||
#on lui passe la variable de la 1er interface
|
||||
old_interfaces = self.get_old_value('proxy_bypass_network_eth1', 'interface_wpad_exclude')
|
||||
netmasks = self.get_old_value('proxy_bypass_netmask_eth1', 'adresse_netmask_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_network_eth1', 'adresse_ip_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_network_eth{0}'.format(i), value)
|
||||
self.modify_last_value('proxy_bypass_netmask_eth{0}'.format(i), netmasks[idx])
|
||||
del(self.unknown_options['adresse_ip_wpad_exclude'])
|
||||
del(self.unknown_options['adresse_netmask_wpad_exclude'])
|
||||
del(self.unknown_options['interface_wpad_exclude'])
|
||||
|
||||
# passage à oui des variables "proxy_bypass_ethX" si nécessaire
|
||||
for i in range(1, 5):
|
||||
if len(self.get_value('proxy_bypass_network_eth{0}'.format(i), [])) > 0:
|
||||
self.set_value('proxy_bypass_eth{0}'.format(i), u'oui')
|
||||
|
||||
# transfert des variables nom_domaine_wpad_exclude
|
||||
if 'nom_domaine_wpad_exclude' in self.unknown_options:
|
||||
old_interfaces = self.get_old_value('proxy_bypass_domain_eth1', 'nom_interface_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_domain_eth1', 'nom_domaine_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'nom_interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_domain_eth{0}'.format(i), value)
|
||||
del(self.unknown_options['nom_domaine_wpad_exclude'])
|
||||
del(self.unknown_options['nom_interface_wpad_exclude'])
|
||||
|
||||
# nom_serveur_scribe_dmz/ip_serveur_scribe_dmz => mandatory (#11713)
|
||||
if self.get_value('install_scribe_dmz') == u'oui':
|
||||
if self.get_value('nom_serveur_scribe_dmz') == None or self.get_value('ip_serveur_scribe_dmz') == None:
|
||||
self.set_value('install_scribe_dmz', u'non')
|
||||
|
||||
|
||||
class Upgrade_2_4_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.1 vers 2.4.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.1', '2.4.2'))
|
||||
# migration des variables eolesso vers client LDAP #10821
|
||||
self.copy('eolesso_port_ldap', 'ldap_port')
|
||||
self.copy('eolesso_ldap_reader', 'ldap_reader')
|
||||
self.copy('eolesso_ldap_reader_passfile', 'ldap_reader_passfile')
|
||||
self.copy('eolesso_ldap_match_attribute', 'ldap_match_attribute')
|
||||
self.copy('eolesso_ldap_filter_user', 'ldap_filter_user')
|
||||
self.copy('eolesso_ldap_filter_group', 'ldap_filter_group')
|
||||
self.copy('eolesso_ldap_dntree_user', 'ldap_dntree_user')
|
||||
self.copy('eolesso_ldap_dntree_group', 'ldap_dntree_group')
|
||||
self.copy('eolesso_ldap_fill_displayname', 'ldap_fill_displayname')
|
||||
self.copy('eolesso_ldap_fill_mail', 'ldap_fill_mail')
|
||||
self.copy('eolesso_ldap_fill_fonction', 'ldap_fill_fonction')
|
||||
self.copy('eolesso_ldap_fill_categorie', 'ldap_fill_categorie')
|
||||
self.copy('eolesso_ldap_fill_rne', 'ldap_fill_rne')
|
||||
self.copy('eolesso_ldap_fill_fredurne', 'ldap_fill_fredurne')
|
||||
self.copy('eolesso_ldap_fill_displaygroup', 'ldap_fill_displaygroup')
|
||||
|
||||
# migration des variables courier #10987
|
||||
courier_val = self.get_old_value('activer_recuperation_courriel', 'activer_courier')
|
||||
if courier_val is not None:
|
||||
if courier_val == 'non':
|
||||
self.set_value('activer_recuperation_courriel', 'non')
|
||||
elif not 'imap' in courier_val:
|
||||
self.set_value('activer_courier_imap', 'non')
|
||||
if 'pop' in courier_val:
|
||||
self.set_value('activer_courier_pop', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_5_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.X vers 2.5.0
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.X', '2.5.0'))
|
||||
|
||||
# migration des variables nut #11608
|
||||
monitor = self.get_value('nut_monitor_user')
|
||||
if monitor != []:
|
||||
self.set_value('nut_monitor', 'oui')
|
||||
|
||||
# migration des variables postgresql pour Zéphir #11974
|
||||
old_pg_shared_buffers = self.get_value('pg_shared_buffers')
|
||||
if old_pg_shared_buffers is not None:
|
||||
if int(old_pg_shared_buffers) == 3072:
|
||||
self.del_value('pg_shared_buffers')
|
||||
else:
|
||||
self.set_value('pg_shared_buffers_unit', u'kB')
|
||||
self.del_value('pg_effective_cache_size')
|
||||
|
||||
|
||||
class Upgrade_2_5_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.0 vers 2.5.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.0', '2.5.1'))
|
||||
|
||||
# migration des variables zone_forward (#11922)
|
||||
zone_forward = self.get_value('nom_zone_forward', [])
|
||||
if zone_forward != []:
|
||||
self.set_value('activer_zone_forward', 'oui')
|
||||
|
||||
# passage de bacula à bareos (#12425)
|
||||
for var in ['activer_bareos_dir', 'activer_bareos_sd',
|
||||
'bareos_dir_name', 'bareos_full_retention',
|
||||
'bareos_full_retention_unit', 'bareos_diff_retention',
|
||||
'bareos_diff_retention_unit', 'bareos_inc_retention',
|
||||
'bareos_inc_retention_unit', 'bareos_max_run_time',
|
||||
'bareos_compression', 'bareos_dir_password',
|
||||
'bareos_fd_password', 'bareos_sd_local',
|
||||
'bareos_sd_adresse', 'bareos_sd_password',
|
||||
'bareos_sd_name', 'bareos_sd_remote_dir_name',
|
||||
'bareos_sd_remote_ip', 'bareos_sd_remote_password']:
|
||||
self.move(var.replace('bareos', 'bacula'), var)
|
||||
|
||||
if self.get_value('activer_bareos_dir') == u'oui':
|
||||
#sauvegarde déjà programmé en sqlite3, ne gère pas la migration vers mysql
|
||||
self.set_value('bareos_db_type', 'sqlite3')
|
||||
|
||||
if self.get_value('ldap_ca_cert') == '/etc/certs/CA2008.pem':
|
||||
self.set_value('ldap_ca_cert', '/etc/certs/certificat.pem')
|
||||
|
||||
|
||||
class Upgrade_2_5_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.1 vers 2.5.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.1', '2.5.2'))
|
||||
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD sphynx #14881
|
||||
if self.var_exists('activer_resource_arv'):
|
||||
activer_haute_dispo = self.get_value('activer_haute_dispo')
|
||||
if activer_haute_dispo == 'maitre':
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
service_resource_startdelay = self.get_noncalculated_value_for_auto('service_resource_startdelay')
|
||||
need_update = False
|
||||
startdelay_index = 1
|
||||
need_disabled_arv = False
|
||||
if service_resource_startdelay is not None:
|
||||
if service_resource_name is not None:
|
||||
need_update = 'arv_rsc' in service_resource_name
|
||||
if need_update:
|
||||
startdelay_index = service_resource_name.index('arv_rsc')
|
||||
need_disabled_arv = not need_update
|
||||
else:
|
||||
need_update = True
|
||||
self.del_value('service_resource_name')
|
||||
self.del_value('service_resource_script')
|
||||
self.del_value('service_resource_interval')
|
||||
self.del_value('service_resource_timeout')
|
||||
self.del_value('service_resource_startdelay')
|
||||
if need_update and service_resource_startdelay[startdelay_index] != 15:
|
||||
self.set_value('service_resource_arv_startdelay', service_resource_startdelay[startdelay_index])
|
||||
if need_disabled_arv:
|
||||
self.set_value('activer_resource_arv', u'non')
|
||||
#
|
||||
vip_resource_adresseip = self.get_noncalculated_value_for_auto('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_name')
|
||||
self.del_value('vip_resource_if')
|
||||
self.del_value('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_location')
|
||||
if vip_resource_adresseip is not None:
|
||||
if len(vip_resource_adresseip) > 0:
|
||||
self.set_value('vip_externe', vip_resource_adresseip[0])
|
||||
if len(vip_resource_adresseip) > 1:
|
||||
self.set_value('vip_interne', vip_resource_adresseip[1])
|
||||
# migration HD non Sphynx #14951
|
||||
else:
|
||||
vip_resource_if = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
vip_netmask = []
|
||||
for vip_if in vip_resource_if:
|
||||
netmask_var = 'adresse_netmask_{0}'.format(vip_if.lower())
|
||||
vip_netmask.append(self.get_value(netmask_var))
|
||||
if len(vip_netmask) > 0:
|
||||
self.set_value('vip_resource_netmask', vip_netmask)
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
if len(service_resource_name) > 0:
|
||||
self.set_value('activer_service_resource', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_6_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.X vers 2.6.0
|
||||
"""
|
||||
|
||||
def get_eth_no(self, eth):
|
||||
"""
|
||||
Retourne le numéro X du nom de l'interface ethX
|
||||
"""
|
||||
try:
|
||||
return eth.split("eth")[1]
|
||||
except:
|
||||
log.error(_(u"Interface {0} name has not an 'ethX' format").format(eth))
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.X', '2.6.0'))
|
||||
|
||||
# migration des variables faisant référence au nom des interfaces ethX
|
||||
eth_vars = ['route_int', 'fw_rule_int', 'dhcrelay_server_interface', 'freerad_listen_int',
|
||||
'sw_force_ip_src', 'corosync_dial_if', 'dhcrelay_interfaces']
|
||||
for eth_var in eth_vars:
|
||||
eth_name = self.get_unvalid_value(eth_var)
|
||||
if isinstance(eth_name, list):
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
if eth == 'all':
|
||||
eth_no.append(eth)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif isinstance(eth_name, dict):
|
||||
eth_no = []
|
||||
for eth_key, eth_value in eth_name.items():
|
||||
if eth_value == 'all':
|
||||
eth_no.append(eth_value)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth_value))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_name is not None:
|
||||
eth_no = self.get_eth_no(eth_name)
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_var == 'dhcrelay_server_interface' and self.get_value('adresse_ip_dhcp_dhcrelay') is not None:
|
||||
# migration de l'ancienne valeur par défaut de dhcrelay_server_interface #18329
|
||||
self.set_value(eth_var, u'3')
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD non sphynx
|
||||
if not self.var_exists('activer_resource_arv'):
|
||||
eth_name = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
self.set_value('vip_resource_if', eth_no)
|
||||
|
||||
|
||||
class Upgrade_2_6_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.0 vers 2.6.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.0', '2.6.1'))
|
||||
|
||||
# migration des variables NTLM/SMB : multi -> non multi (#18277)
|
||||
if self.var_exists('nom_serveur_smb'):
|
||||
for varname in ('nom_serveur_smb', 'nom_domaine_smb', 'ip_serveur_smb'):
|
||||
value = self.get_unvalid_value(varname)
|
||||
if isinstance(value, list) and len(value) > 1:
|
||||
self.set_value(varname, value[0])
|
||||
|
||||
# nom_carte_ethX => multi-valuées (#18609)
|
||||
for numint in range(0, 5):
|
||||
varname = 'nom_carte_eth{}'.format(numint)
|
||||
value = self.get_unvalid_value(varname)
|
||||
if value != None:
|
||||
self.set_value(varname, [value])
|
||||
|
||||
# migration variable 'module_type' pour le module esbl ('ESBL') -> ('eSBL') (#21677)
|
||||
if self.get_value('eole_module') == u'esbl':
|
||||
self.set_value('module_type', u'eSBL')
|
||||
|
||||
|
||||
class Upgrade_2_6_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.1 vers 2.6.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.1', '2.6.2'))
|
||||
|
||||
adresse_network_dhcp = self.get_value('adresse_network_dhcp')
|
||||
if adresse_network_dhcp:
|
||||
plages = []
|
||||
for idx in xrange(len(adresse_network_dhcp)):
|
||||
plages.append(u'plage{}'.format(idx))
|
||||
self.set_value('nom_plage_dhcp', plages)
|
||||
if self.var_exists('acces_distant_backend_ead'):
|
||||
self.set_value('acces_distant_backend_ead', 'oui')
|
||||
for interface in [str(n) for n in range(5)]:
|
||||
variable = 'frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, 'oui')
|
||||
variable = 'ip_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
variable = 'netmask_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
# Upgrade Seth
|
||||
# AD firewall - mix old multi variables ad_clients_ip and
|
||||
# ad_servers_ip in ad_peer_ip
|
||||
ad_servers_ip = self.get_old_value('ad_peer_ip', 'ad_servers_ip')
|
||||
ad_clients_ip = self.get_old_value('ad_peer_ip', 'ad_clients_ip')
|
||||
if ad_servers_ip or ad_clients_ip:
|
||||
self.set_value('ad_filter_network', 'oui')
|
||||
if ad_servers_ip:
|
||||
ad_servers_netmask = self.get_old_value('ad_peer_netmask', 'ad_servers_netmask')
|
||||
for ip, netmask in zip(ad_servers_ip, [nm[1] for nm in sorted(ad_servers_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_servers_ip'])
|
||||
del(self.unknown_options['ad_servers_netmask'])
|
||||
if ad_clients_ip:
|
||||
ad_clients_netmask = self.get_old_value('ad_peer_netmask', 'ad_clients_netmask')
|
||||
for ip, netmask in zip(ad_clients_ip, [nm[1] for nm in sorted(ad_clients_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_clients_ip'])
|
||||
del(self.unknown_options['ad_clients_netmask'])
|
||||
# Force SID
|
||||
force_sid = self.get_value('ad_domain_sid')
|
||||
if force_sid:
|
||||
self.set_value('ad_force_domain_sid', 'oui')
|
||||
# Squid modified variables : minutes -> seconds
|
||||
for squidvar in ['forward_timeout', 'connect_timeout', 'read_timeout', 'request_timeout', 'persistent_request_timeout']:
|
||||
squidval = self.get_value(squidvar)
|
||||
if squidval is not None and not self.is_default(squidvar):
|
||||
self.set_value(squidvar, squidval*60)
|
||||
# Exim relay : force to "activate" when upgrade from Scribe 2.6.1 only
|
||||
if self.var_exists('synchro_aaf'):
|
||||
self.set_value('exim_relay', 'oui')
|
||||
if self.get_value('activer_dhcp') == 'oui' and self.is_default('exim_relay_dhcp'):
|
||||
self.set_value('exim_relay_dhcp', 'oui')
|
||||
# Autosign certificat modified by user must be manual
|
||||
if self.get_value('cert_type') == u'autosigné':
|
||||
cert_is_modified = False
|
||||
# set manuel to access to variable
|
||||
self.set_value('cert_type', u'manuel')
|
||||
for cert in ['server_cert', 'server_key', 'server_pem']:
|
||||
if not self.is_default(cert):
|
||||
cert_is_modified = True
|
||||
break
|
||||
if not cert_is_modified:
|
||||
self.set_value('cert_type', u'autosigné')
|
||||
# Store autosign certificat in manual type
|
||||
if self.get_value('cert_type') == u'manuel':
|
||||
for cert, filename in [('server_cert', u'/etc/ssl/certs/eole.crt'), ('server_pem', u'/etc/ssl/certs/eole.pem')]:
|
||||
if self.is_default(cert):
|
||||
self.set_value(cert, filename)
|
||||
# gaspacho agent needs to pass by port 8080 has in 2.6.1 and ealier
|
||||
if self.var_exists('gaspacho_https'):
|
||||
self.set_value('gaspacho_https', 'non')
|
||||
|
||||
|
||||
def upgrade2(major_version, old_release, current_release, config):
|
||||
"""
|
||||
major_version: version des scripts de migration (ex : 2.4)
|
||||
old_release: version du config.eol à migrer (ex : 2.4.0)
|
||||
current_release: version du serveur (ex : 2.5.1)
|
||||
config: objet de configuration Tiramisu
|
||||
"""
|
||||
def _get_max_release():
|
||||
"""
|
||||
Calcul du dernier numéro de release disponible pour la version majeure
|
||||
"""
|
||||
ends = 0
|
||||
for func in globals():
|
||||
if func.startswith(func_start):
|
||||
ends = max(ends, int(func.split('_')[-1]))
|
||||
return ends
|
||||
|
||||
old_version = '.'.join(old_release.split('.')[0:2])
|
||||
current_version = '.'.join(current_release.split('.')[0:2])
|
||||
func_start = 'Upgrade_' + "_".join(major_version.split('.'))
|
||||
if StrictVersion(current_version) == StrictVersion(old_version):
|
||||
# upgrade au sein d'une même version
|
||||
# ex : 2.5.1 -> 2.5.4 en 2.5
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = int(current_release.split('.')[-1])
|
||||
elif StrictVersion(major_version) == StrictVersion(old_version):
|
||||
# upgrade "de base" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.4
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = _get_max_release()
|
||||
elif StrictVersion(major_version) == StrictVersion(current_version):
|
||||
# upgrade "final" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.6
|
||||
starts = -1
|
||||
ends = int(current_release.split('.')[-1])
|
||||
else:
|
||||
# upgrade "intermédiaire" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.5
|
||||
starts = -1
|
||||
ends = _get_max_release()
|
||||
|
||||
for i in xrange(starts + 1, ends + 1):
|
||||
func = func_start + '_' + str(i)
|
||||
if func in globals():
|
||||
upgrade = globals()[func](config)
|
||||
upgrade.run()
|
|
@ -0,0 +1,197 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
utilitaires créole
|
||||
"""
|
||||
|
||||
import sys
|
||||
from .error import NoneError, OutOfRange
|
||||
from .config import charset
|
||||
try:
|
||||
from pyeole.ansiprint import *
|
||||
except:
|
||||
pass
|
||||
import time, hashlib, random, unicodedata
|
||||
|
||||
# définition des classes d'adresse IP existantes
|
||||
classes = {
|
||||
u'128.0.0.0' : u'1'
|
||||
, u'192.0.0.0' : u'2'
|
||||
, u'224.0.0.0' : u'3'
|
||||
, u'240.0.0.0' : u'4'
|
||||
, u'248.0.0.0' : u'5'
|
||||
, u'252.0.0.0' : u'6'
|
||||
, u'254.0.0.0' : u'7'
|
||||
, u'255.0.0.0' : u'8'
|
||||
, u'255.128.0.0' : u'9'
|
||||
, u'255.192.0.0' : u'10'
|
||||
, u'255.224.0.0' : u'11'
|
||||
, u'255.240.0.0' : u'12'
|
||||
, u'255.248.0.0' : u'13'
|
||||
, u'255.252.0.0' : u'14'
|
||||
, u'255.254.0.0' : u'15'
|
||||
, u'255.255.0.0' : u'16'
|
||||
, u'255.255.128.0' : u'17'
|
||||
, u'255.255.192.0' : u'18'
|
||||
, u'255.255.224.0' : u'19'
|
||||
, u'255.255.240.0' : u'20'
|
||||
, u'255.255.248.0' : u'21'
|
||||
, u'255.255.252.0' : u'22'
|
||||
, u'255.255.254.0' : u'23'
|
||||
, u'255.255.255.0' : u'24'
|
||||
, u'255.255.255.128' : u'25'
|
||||
, u'255.255.255.192' : u'26'
|
||||
, u'255.255.255.224' : u'27'
|
||||
, u'255.255.255.240' : u'28'
|
||||
, u'255.255.255.248' : u'29'
|
||||
, u'255.255.255.252' : u'30'
|
||||
, u'255.255.255.254' : u'31'
|
||||
, u'255.255.255.255' : u'32'
|
||||
}
|
||||
|
||||
def string_to_bool(string):
|
||||
"""
|
||||
Transforme les chaines 'True' ou 'False' en valeurs booléennes
|
||||
"""
|
||||
if string == "":
|
||||
raise ValueError('empty string')
|
||||
result = eval(string)
|
||||
if result not in [True, False]:
|
||||
raise TypeError("string must be like 'True' or 'False'")
|
||||
else: return result
|
||||
|
||||
|
||||
def get_text_node(node):
|
||||
"""
|
||||
@param node: node minidom contenant du texte
|
||||
Utilitaire minidom permettant de récupérer le texte d'un node texte
|
||||
"""
|
||||
texte = ""
|
||||
nodelist = node.childNodes
|
||||
for textnode in nodelist:
|
||||
if textnode.nodeType == textnode.TEXT_NODE:
|
||||
texte = texte + textnode.data
|
||||
return texte
|
||||
|
||||
|
||||
# utilitaires pour la
|
||||
# ligne de commande
|
||||
|
||||
def raw(text):
|
||||
"""
|
||||
Question en ligne de commande : permet de repérer si l'utilisateur a renvoyé quelque chose
|
||||
|
||||
@param text: le libellé de message
|
||||
@return: la variable demandée
|
||||
"""
|
||||
var = raw_input(text + " : ")
|
||||
if var:
|
||||
return var
|
||||
else:
|
||||
raise NoneError
|
||||
|
||||
|
||||
def stringify(string):
|
||||
"""
|
||||
Encodage des chaînes avec le charset local
|
||||
"""
|
||||
try:
|
||||
return string.encode(charset)
|
||||
except:
|
||||
return string
|
||||
|
||||
def encode_list(_list):
|
||||
""" encode une liste en utf-8 si les éléments sont de type dico ou str ou liste, unicode"""
|
||||
encoded_list = []
|
||||
for element in _list:
|
||||
if type(element) == str:
|
||||
encoded_list.append(encode_str(element))
|
||||
elif type(element) == dict:
|
||||
encoded_list.append(encode_dico(element))
|
||||
elif type(element) == list:
|
||||
encoded_list.append(encode_list(element))
|
||||
elif type(element) == unicode:
|
||||
encoded_list.append(encode_str(element))
|
||||
else:
|
||||
encoded_list.append(element)
|
||||
return encoded_list
|
||||
|
||||
def encode_str(string):
|
||||
""" encode une string ou un unicode en utf8 """
|
||||
try:
|
||||
string = string.encode(charset)
|
||||
except:
|
||||
pass
|
||||
return string
|
||||
|
||||
def encode_dico(dico):
|
||||
""" encode un dico en utf8 dans le cas ou les valeurs soient de type dico, liste, str, unicode """
|
||||
for key in dico.keys():
|
||||
if type(dico[key]) == str:
|
||||
dico[key] = encode_str(dico[key])
|
||||
elif type(dico[key]) == unicode:
|
||||
dico[key] = encode_str(dico[key])
|
||||
elif type(dico[key]) == dict:
|
||||
dico[key] = encode_dico(dico[key])
|
||||
elif type(dico[key]) == list:
|
||||
dico[key] = encode_list(dico[key])
|
||||
return dico
|
||||
|
||||
|
||||
def select_list(selection):
|
||||
"""
|
||||
Utilitaire de construction d'une sélection en ligne de commande
|
||||
@param selection : liste
|
||||
@return : l'identifiant sélectionné (entier)
|
||||
"""
|
||||
# affichage de la liste (ordonnée)
|
||||
for i in selection:
|
||||
print(selection.index(i) , ':', stringify(i))
|
||||
# print selection.index(i) , ':', i[0]
|
||||
|
||||
# recuperation du numero
|
||||
try:
|
||||
number = int(raw(stringify(_("Choose a number in the list"))))
|
||||
except:
|
||||
raise OutOfRange
|
||||
if number not in range(len(selection)):
|
||||
raise OutOfRange
|
||||
return number
|
||||
|
||||
def gen_random(length=None):
|
||||
"""
|
||||
length: longueur de la chaine aléatoire attendu
|
||||
"""
|
||||
try:
|
||||
random_id = str(time.time()).split('.')[0]
|
||||
random_str = hashlib.sha224('{}/{}'.format(random_id, str(random.randrange(2**100))).encode('utf-8')).hexdigest()
|
||||
return random_str[:length]
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def normalize_family(family_name, check_name=True):
|
||||
"""
|
||||
il ne faut pas d'espace, d'accent, de majuscule, de tiré, ...
|
||||
dans le nom des familles
|
||||
"""
|
||||
if sys.version_info[0] < 3:
|
||||
f = unicode(family_name)
|
||||
else:
|
||||
f = family_name
|
||||
f = f.replace('-', '_')
|
||||
#f = f.replace(u'é', 'e')
|
||||
#f = f.replace(u'è', 'e')
|
||||
nfkd_form = unicodedata.normalize('NFKD', f)
|
||||
f = u"".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
f = f.replace(' ', '_')
|
||||
f = f.lower()
|
||||
try:
|
||||
int(f[0])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise ValueError(u'Le nom de la famille ne doit pas commencer par un chiffre : {0}'.format(f))
|
||||
if check_name and f.lower() in ['containers']:
|
||||
raise ValueError(u'nom de la famille interdit {0}'.format(f))
|
||||
return f
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# -*- coding:utf-8 -*-
|
||||
|
||||
"""
|
||||
callbacks de validation personnalisés pour tiramisu
|
||||
|
||||
**utilisation**
|
||||
|
||||
faire des callbacks standards en cas de validation
|
||||
sur la configuration entière.
|
||||
la possibilité de validation personnalisable doit
|
||||
être utilisée *uniquement* pour des validations locales
|
||||
|
||||
**important**
|
||||
|
||||
la fonction ne doit pas lever d'exception, elle doit
|
||||
aboutir.
|
||||
|
||||
api
|
||||
:param value: premier paramètre, valeur de l'option
|
||||
les autres paramètres doivent être des
|
||||
paramètres **nommés**
|
||||
:return: True ou False suivant que l'option a été validée ou non
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
|
@ -0,0 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from formencode.validators import UnicodeString
|
||||
from formencode.api import Invalid
|
||||
|
||||
def valid_string(value, min=None, max=None, not_empty=True):
|
||||
try:
|
||||
UnicodeString(min=min, max=max, not_empty=not_empty
|
||||
).to_python(value)
|
||||
return True
|
||||
except Invalid:
|
||||
return False
|
|
@ -0,0 +1,1750 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from pyeole.odict import OrderedDict
|
||||
from copy import copy
|
||||
from os.path import isdir, isfile, join, basename, dirname
|
||||
from os import listdir
|
||||
|
||||
from .error import FileNotFound, ConfigError
|
||||
from .config import dtdfilename, VIRTBASE, VIRTROOT, VIRTMASTER
|
||||
from .dtd_parser import parse_dtd
|
||||
#from .lxml_parser import parse_xml_file, parse_string
|
||||
#don't touch this, for variables with eosfunc value
|
||||
#import eosfunc
|
||||
#from .utils import normalize_family
|
||||
|
||||
from .i18n import _
|
||||
|
||||
import tiramisu.option
|
||||
|
||||
from tiramisu.option import UnicodeOption, OptionDescription, PortOption, \
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption, \
|
||||
NetworkOption, NetmaskOption, DomainnameOption, BroadcastOption, \
|
||||
URLOption, EmailOption, FilenameOption, UsernameOption, DateOption, \
|
||||
PasswordOption, Option, Leadership
|
||||
|
||||
from tiramisu import Config
|
||||
from tiramisu.setting import groups
|
||||
#from tiramisu.error import PropertiesOptionError
|
||||
|
||||
####################################################
|
||||
# FIXME : Ajout option adresse mac
|
||||
from tiramisu import RegexpOption
|
||||
import re
|
||||
class MACOption(RegexpOption):
|
||||
__slots__ = tuple()
|
||||
_regexp = re.compile(r"^([0-9A-Fa-f]{2}[:]){5}([0-9A-Fa-f]{2})$")
|
||||
_display_name = _('mac address')
|
||||
####################################################
|
||||
|
||||
|
||||
CONVERT_DATA = {IntOption: int, UnicodeOption: str, PortOption: str,
|
||||
DomainnameOption: str, EmailOption: str, URLOption: str,
|
||||
IPOption: str, NetmaskOption: str, NetworkOption: str,
|
||||
BroadcastOption: str, FilenameOption: str}
|
||||
COMMON_KEY = {'container': UnicodeOption, 'container_group': UnicodeOption,
|
||||
'real_container': UnicodeOption, 'instance_mode': None,
|
||||
'exists': None, 'redefine': UnicodeOption}
|
||||
|
||||
|
||||
CONVERT_OPTION = {'number': (IntOption, None, None),
|
||||
'string': (UnicodeOption, None, None),
|
||||
'password': (PasswordOption, None, None),
|
||||
'mail': (EmailOption, None, None),
|
||||
'filename': (FilenameOption, None, None),
|
||||
'date': (DateOption, None, None),
|
||||
#restriction approchante
|
||||
'unix_user': (UsernameOption, None, None),
|
||||
'ip': (IPOption, None, {'allow_reserved': True}),
|
||||
'local_ip': (IPOption, None, {'private_only': True, 'warnings_only': True}),
|
||||
'netmask': (NetmaskOption, None, None),
|
||||
'network': (NetworkOption, None, None),
|
||||
'broadcast': (BroadcastOption, None, None),
|
||||
'netbios': (DomainnameOption, None, {'type_': 'netbios', 'warnings_only': True}),
|
||||
'domain': (DomainnameOption, None, {'type_': 'domainname', 'allow_ip': True, 'allow_without_dot': True}),
|
||||
'domain_strict': (DomainnameOption, None, {'type_': 'domainname', 'allow_ip': False}),
|
||||
'hostname': (DomainnameOption, None, {'type_': 'hostname', 'allow_ip': True}),
|
||||
'hostname_strict': (DomainnameOption, None, {'type_': 'hostname', 'allow_ip': False}),
|
||||
'web_address': (URLOption, None, {'allow_ip': True, 'allow_without_dot': True}),
|
||||
'port': (PortOption, None, {'allow_private': True}),
|
||||
'oui/non': (ChoiceOption, [u'oui', u'non'], None),
|
||||
'on/off': (ChoiceOption, [u'on', u'off'], None),
|
||||
'yes/no': (ChoiceOption, [u'yes', u'no'], None),
|
||||
'schedule': (ChoiceOption, [u'none', u'daily', u'weekly', u'monthly'], None),
|
||||
'schedulemod': (ChoiceOption, [u'pre', u'post'], None)}
|
||||
|
||||
type_option = {UnicodeOption: 'str', ChoiceOption: 'choice', IntOption: 'int',
|
||||
OptionDescription: 'optiondescription', Leadership: 'optiondescription', IPOption: 'ip',
|
||||
DomainnameOption: 'str', NetworkOption: 'ip', NetmaskOption: 'ip',
|
||||
FilenameOption: 'str', DateOption: 'str', EmailOption: 'str', URLOption: 'str',
|
||||
BroadcastOption: 'str', PortOption: 'str', UsernameOption: 'str', MACOption: 'str', # FIXME YO
|
||||
PasswordOption:'password'}
|
||||
type_option_convert = {'int': int, 'str': str, 'ip': str,
|
||||
'password': str,
|
||||
}
|
||||
|
||||
|
||||
#def force_unicode(val):
|
||||
# if val is not None and type(val) != unicode:
|
||||
# return unicode(val, 'utf-8')
|
||||
# else:
|
||||
# return val
|
||||
|
||||
def convert_value(option, value, config=None):
|
||||
_type = type_option[type(option)]
|
||||
if _type in type_option_convert:
|
||||
if value is not None:
|
||||
return type_option_convert[_type](value)
|
||||
elif _type == 'choice':
|
||||
values = option.impl_get_values(config)
|
||||
if value is None and u'' in values:
|
||||
value = u''
|
||||
if value not in values:
|
||||
raise ValueError(_("option {0}'s value should be in {1}".format(option._name, str(values))))
|
||||
return value
|
||||
|
||||
#===DUPLIQUE DANS ANNOTATOR
|
||||
#mode order is important
|
||||
modes_level = ('basic', 'normal', 'expert')
|
||||
class Mode(object):
|
||||
def __init__(self, name, level):
|
||||
self.name = name
|
||||
self.level = level
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.level, other.level)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.level == other.level
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.level != other.level
|
||||
|
||||
def __gt__(self, other):
|
||||
return other.level < self.level
|
||||
|
||||
def __ge__(self, other):
|
||||
return not self.level < other.level
|
||||
|
||||
def __le__(self, other):
|
||||
return not other.level < self.level
|
||||
|
||||
|
||||
def mode_factory():
|
||||
mode_obj = {}
|
||||
for idx in range(len(modes_level)):
|
||||
name = modes_level[idx]
|
||||
mode_obj[name] = Mode(name, idx)
|
||||
return mode_obj
|
||||
|
||||
modes = mode_factory()
|
||||
#/===
|
||||
def convert_tiramisu_value(value, obj):
|
||||
"""
|
||||
convertit les variables dans le bon type si nécessaire
|
||||
"""
|
||||
def _convert_boolean(value):
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if value == 'True':
|
||||
return True
|
||||
elif value == 'False':
|
||||
return False
|
||||
elif value is None:
|
||||
return None
|
||||
else:
|
||||
raise Exception('unknown value {} while trying to cast {} to boolean'.format(value, obj))
|
||||
|
||||
if obj is BoolOption:
|
||||
if isinstance(value, list):
|
||||
# variable multi
|
||||
return [_convert_boolean(val) for val in value]
|
||||
else:
|
||||
return _convert_boolean(value)
|
||||
func = CONVERT_DATA.get(obj, None)
|
||||
if value == None or func == None:
|
||||
return value
|
||||
if type(value) is list:
|
||||
# variable multi
|
||||
return [func(val) for val in value]
|
||||
else:
|
||||
return func(value)
|
||||
|
||||
class CreoleGeneric():
|
||||
def gen_generic(self, name, paths, copy_requires=None,
|
||||
verify_exists_redefine=True):
|
||||
def _get_type(values):
|
||||
"""get type and values for ChoiceOption
|
||||
"""
|
||||
if values == None:
|
||||
return UnicodeOption, None
|
||||
elif set([True, False]) == set(values):
|
||||
return BoolOption, None
|
||||
else:
|
||||
return ChoiceOption, values
|
||||
|
||||
def build_key_type(name, pnode=''):
|
||||
#build key_type and choice_constrainte with 'needs' and 'optionals'
|
||||
#attribut
|
||||
key_type = {}
|
||||
for mode in ['needs', 'optionals']:
|
||||
for key, value in self.dtd[name][mode].items():
|
||||
#don't load COMMON_KEY and xxxlist and parentnodelist
|
||||
if key not in COMMON_KEY and key != '{0}list'.format(name) and key != '{0}list'.format(pnode):
|
||||
choice = None
|
||||
if value['type'] is not None:
|
||||
type_ = value['type']
|
||||
else:
|
||||
type_, choice = _get_type(value['values'])
|
||||
if choice != None:
|
||||
choice_constrainte[key] = choice
|
||||
key_type[key] = type_
|
||||
return key_type
|
||||
|
||||
containers = self._get_containers()
|
||||
tgeneric_vars = self.generic.get(name, [])
|
||||
generic_vars = []
|
||||
for data in tgeneric_vars:
|
||||
if data['container'] == 'all':
|
||||
# Generate per container
|
||||
for container in containers.values():
|
||||
if container['name'] in ['all', VIRTMASTER]:
|
||||
continue
|
||||
tdata = copy(data)
|
||||
tdata['container'] = container['name']
|
||||
generic_vars.append(tdata)
|
||||
else:
|
||||
generic_vars.append(data)
|
||||
#remove last 's' in name (hosts => host)
|
||||
if name[-1] == 's':
|
||||
name = name[:-1]
|
||||
#if name is a key of self.requires set requires_key to 'activate'
|
||||
if name in self.requires:
|
||||
requires_key = 'activate'
|
||||
else:
|
||||
requires_key = None
|
||||
choice_constrainte = {}
|
||||
key_type = build_key_type(name)
|
||||
#if sub node add subkeys to key_type, be carefull, all sub node
|
||||
#are mixed, 'node_name' is it's node name
|
||||
for option in self.dtd[name]['options']:
|
||||
key_type.update(build_key_type(option, name))
|
||||
key_type['node_name'] = UnicodeOption
|
||||
key_type['level'] = UnicodeOption
|
||||
return self._gen_tiramisu_config(paths, name, generic_vars, key_type,
|
||||
choice_constrainte, requires_key,
|
||||
copy_requires=copy_requires,
|
||||
verify_exists_redefine=verify_exists_redefine)
|
||||
|
||||
def _check_instance_mode(self, data):
|
||||
"""Verify if the resource is to be instanciated
|
||||
|
||||
A resource can tagged to be instanciate only when containers
|
||||
is enabled or disabled.
|
||||
|
||||
We check if the tagged instance mode match the current state
|
||||
of the containers activation.
|
||||
|
||||
:param data: resource informations
|
||||
:type data: `dict`
|
||||
:return: resource instance mode match containers activation
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
check = True
|
||||
if 'instance_mode' in data:
|
||||
mode = data['instance_mode']
|
||||
if self.containers_enabled and mode == 'when_no_container':
|
||||
check = False
|
||||
elif not self.containers_enabled and mode == 'when_container':
|
||||
check = False
|
||||
return check
|
||||
|
||||
def _config_list_to_dict(self, gvariables, verify_exists_redefine):
|
||||
"""
|
||||
valid variables in container context and return a dict
|
||||
(with variable's name has key)
|
||||
variables: list of variables
|
||||
"""
|
||||
def _test_new_variable(variable):
|
||||
"""
|
||||
test if variable redefine and exists attribut
|
||||
variable: attribute of the variable
|
||||
"""
|
||||
return
|
||||
if variable.get('redefine', False):
|
||||
raise ConfigError(
|
||||
_(u"{0} {1} redefined but unexistent.").format(gtype, name))
|
||||
if not variable.get('exists', True):
|
||||
raise ConfigError(_(u'{0} {1} existent.').format(gtype, name))
|
||||
|
||||
|
||||
variables = OrderedDict()
|
||||
containers = self._get_containers()
|
||||
for variable in gvariables:
|
||||
# Check if we activate the variable or not
|
||||
if not self._check_instance_mode(variable):
|
||||
continue
|
||||
name = variable['name']
|
||||
if variable.has_key('container'):
|
||||
#add container group
|
||||
variable['container_group'] = containers[variable['container']]['group']
|
||||
if self.containers_enabled:
|
||||
tcontainer = self.get_real_container_name(containers, variable['container_group'])
|
||||
variable['real_container'] = tcontainer
|
||||
else:
|
||||
variable['real_container'] = VIRTMASTER
|
||||
else:
|
||||
variable['container_group'] = variable['group']
|
||||
if self.containers_enabled:
|
||||
variable['real_container'] = variable['group']
|
||||
else:
|
||||
variable['real_container'] = VIRTMASTER
|
||||
#if variable already exist, verify if not in same container
|
||||
#if same container, verify redefine and exists attributs
|
||||
if variable.has_key('container') and name in variables:
|
||||
if verify_exists_redefine:
|
||||
is_exists = False
|
||||
for test in variables[name]:
|
||||
if test['container'] == variable['container']:
|
||||
is_exists = True
|
||||
break
|
||||
#if variable exists in same container
|
||||
if is_exists:
|
||||
if not variable.get('exists', True):
|
||||
continue
|
||||
if not variable.get('redefine', False):
|
||||
#var already exists
|
||||
raise ConfigError(_(u"Name ({0}) already used.").format(name))
|
||||
else:
|
||||
#variable exists in an other container
|
||||
_test_new_variable(variable)
|
||||
#FIXME : ajoute mais je modifie pas si exists !
|
||||
variables[name].append(variable)
|
||||
else:
|
||||
#var does not exists
|
||||
if verify_exists_redefine:
|
||||
_test_new_variable(variable)
|
||||
variables[name] = [variable]
|
||||
return variables
|
||||
|
||||
def _gen_tiramisu_config(self, paths, gtype, gvariables, key_type={},
|
||||
choice_constrainte={}, requires_key=None, copy_requires=None,
|
||||
verify_exists_redefine=True):
|
||||
"""
|
||||
Generate tiramisu's config for container's attributs
|
||||
|
||||
paths: paths of all Creole variables
|
||||
gtype: type of Creole attributs (file, service, ...)
|
||||
gvariables: attributs for generate tiramisu config
|
||||
key_type: type of each attribut key
|
||||
choice_constrainte:
|
||||
requires_key: apply requires for this key
|
||||
copy_requires: copy all requires for Symlink to OptionDescription
|
||||
"""
|
||||
variables = self._config_list_to_dict(gvariables, verify_exists_redefine)
|
||||
|
||||
#add common key type
|
||||
key_type.update(COMMON_KEY)
|
||||
key_type['{0}list'.format(gtype)] = UnicodeOption
|
||||
var = []
|
||||
|
||||
#parse dictionary generated by _config_list_to_dict
|
||||
for name, var_datas in variables.items():
|
||||
#parse attributs of variable
|
||||
for var_data in var_datas:
|
||||
force_requires = []
|
||||
properties = tuple()
|
||||
if var_data.get('{0}list'.format(gtype), None) in \
|
||||
self.requires.get(gtype, {}):
|
||||
props, req = self.update_requires(
|
||||
self.requires[gtype][
|
||||
var_data['{0}list'.format(gtype)]]['list'], namespace='creole', option=True)
|
||||
if props != []:
|
||||
properties = tuple(props)
|
||||
requires = None
|
||||
else:
|
||||
requires = req
|
||||
else:
|
||||
requires = None
|
||||
options = []
|
||||
#add option in tiramisu for a specified attribut
|
||||
for option_type, option_value in var_data.items():
|
||||
#if option's type is define in key_type
|
||||
if option_type in key_type:
|
||||
#get tiramisu's object
|
||||
option_obj = key_type[option_type]
|
||||
if isinstance(option_obj, str):
|
||||
option_obj = getattr(tiramisu.option, var_data[option_obj])
|
||||
elif option_type == 'name':
|
||||
#default option_obj
|
||||
option_obj = UnicodeOption
|
||||
#if type is set, get type
|
||||
if self.dtd[gtype]['type']:
|
||||
option_obj = self.dtd[gtype]['type']
|
||||
elif 'node_name' in var_data:
|
||||
#if no type, search node_name and get type in node (this it's a str, not an option)
|
||||
option_obj = getattr(tiramisu.option, var_data[self.dtd[var_data['node_name']]['type']])
|
||||
else:
|
||||
raise Exception(_(u'Unknown key {0}').format(option_type))
|
||||
option_value = convert_tiramisu_value(option_value, option_obj)
|
||||
#if value is None, don't generate tiramisu's option
|
||||
if option_obj and option_value is not None:
|
||||
#if option_type is requires_key, unset requires_key
|
||||
#and add requires for this key
|
||||
if option_type == requires_key:
|
||||
requires_key = None
|
||||
r = requires
|
||||
p = properties
|
||||
requires = None
|
||||
properties = tuple()
|
||||
else:
|
||||
r = None
|
||||
p = None
|
||||
|
||||
#gen tiramisu object
|
||||
if option_obj == ChoiceOption:
|
||||
options.append(option_obj(option_type, '',
|
||||
tuple(choice_constrainte[option_type]),
|
||||
default=option_value, requires=r,
|
||||
properties=p))
|
||||
elif option_obj == SymLinkOption:
|
||||
if r != None:
|
||||
raise Exception(
|
||||
_(u'No requires for SymLinkOption'))
|
||||
try:
|
||||
path = paths[option_value]
|
||||
except KeyError:
|
||||
raise Exception(
|
||||
_(u"SymLinkOption targetting unexistent variable: {0}.").format(option_value))
|
||||
namespace = path.split('.')[0]
|
||||
for descr in self.space:
|
||||
if descr._name == namespace:
|
||||
bopt = OptionDescription('baseconfig',
|
||||
'baseconfigdescr',
|
||||
[descr])
|
||||
opt = bopt
|
||||
for p in path.split('.'):
|
||||
opt = getattr(opt, p)
|
||||
if option_type == copy_requires:
|
||||
#aggrege tous les requirements des familles/option
|
||||
#pour les appliquer aussi sur l'OptionDescription
|
||||
opt_path = path.split('.')
|
||||
for p in opt_path[:-1]:
|
||||
try:
|
||||
force_requires.extend(self.update_requires(self.requires['family'][p]['list'], 'creole', option=True)[1])
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
force_requires.extend(self.update_requires(self.requires['variable'][opt_path[-1]]['list'],'creole', option=True)[1])
|
||||
not_mandatory = False
|
||||
for req_ in force_requires:
|
||||
if req_[2] == 'disabled' and req_[3] != False:
|
||||
not_mandatory = True
|
||||
if not not_mandatory and 'mandatory' in opt._properties:
|
||||
force_requires.append((opt, None, 'disabled', False, True, False))
|
||||
except KeyError:
|
||||
pass
|
||||
break
|
||||
|
||||
options.append(option_obj(option_type, opt))
|
||||
else:
|
||||
options.append(option_obj(option_type, '',
|
||||
default=option_value, requires=r, properties=p))
|
||||
|
||||
#if requires_key is not already set
|
||||
if requires_key:
|
||||
options.append(BoolOption(requires_key, '', default=True,
|
||||
requires=requires, properties=properties))
|
||||
requires = None
|
||||
properties = tuple()
|
||||
level = len(var)
|
||||
if force_requires != []:
|
||||
if requires == None:
|
||||
requires = force_requires
|
||||
else:
|
||||
requires.extend(force_requires)
|
||||
|
||||
var.append(OptionDescription(gtype + str(level),
|
||||
'', options, requires=requires, properties=properties))
|
||||
return OptionDescription('{0}s'.format(gtype), '', var)
|
||||
|
||||
def gen_container(self, paths, namespace):
|
||||
ret = []
|
||||
if 'gen_networks' in dir(self):
|
||||
ret.append(self.gen_networks(paths))
|
||||
for name in self.generic:
|
||||
func_name = 'gen_{0}'.format(name)
|
||||
if func_name in dir(self):
|
||||
ret.append(getattr(self, func_name)(paths))
|
||||
else:
|
||||
ret.append(self.gen_generic(name, paths))
|
||||
return ret
|
||||
|
||||
def _get_containers(self):
|
||||
"""
|
||||
Load container's description
|
||||
"""
|
||||
containers = OrderedDict()
|
||||
containers_id = OrderedDict()
|
||||
for container in self.generic.get('containers', []):
|
||||
name = container['name']
|
||||
if not containers.has_key(name):
|
||||
containers[name] = {'name': name, 'group': name}
|
||||
if container.has_key('id') and container['id'] is not None:
|
||||
id_ = container['id']
|
||||
if id_ in containers_id and containers_id[id_] != name:
|
||||
raise ConfigError(_(u"Two containers with the same id ({0})").format(id_))
|
||||
if name in containers_id.values() and containers_id.get(id_) != name:
|
||||
raise ConfigError(_(u"Multiple ids for the container {0}").format(name))
|
||||
containers_id[id_] = name
|
||||
containers[name]['id'] = id_
|
||||
if container.has_key('group') and container['group'] is not None:
|
||||
containers[name]['group'] = container['group']
|
||||
|
||||
for name, container in containers.items():
|
||||
group = container['group']
|
||||
if name != group and group in containers:
|
||||
containers[name]['id'] = containers[group]['id']
|
||||
return containers
|
||||
|
||||
def gen_containers_creole(self, paths, namespace):
|
||||
"""
|
||||
Generate fake config.creole.containers hidden family.
|
||||
Each container has two UnicodeOption:
|
||||
container_ip_//name// and container_path_//name//
|
||||
|
||||
:paths: paths variables (for added new option in paths's dictionnary)
|
||||
"""
|
||||
if self.containers_enabled:
|
||||
ip_br0 = u'192.0.2.1'
|
||||
mask_br0 = u'255.255.255.0'
|
||||
network_br0 = u'192.0.2.0'
|
||||
bcast_br0 = u'192.0.2.255'
|
||||
else:
|
||||
ip_br0 = u'127.0.0.1'
|
||||
mask_br0 = u'255.0.0.0'
|
||||
network_br0 = u'127.0.0.0'
|
||||
bcast_br0 = u'127.255.255.255'
|
||||
|
||||
variables = []
|
||||
args = {'name': 'adresse_ip_br0', 'doc': _(u"Bridge IP address"), 'default': ip_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_netmask_br0', 'doc': _(u"Bridge IP subnet mask"), 'default': mask_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_network_br0', 'doc': _(u"Bridge IP network_br0 address"), 'default': network_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_broadcast_br0', 'doc': _(u"Bridge broadcast IP address"), 'default': bcast_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
paths[name] = 'creole.containers.{0}'.format(name)
|
||||
|
||||
containers = self._get_containers()
|
||||
for name, container in containers.items():
|
||||
if name == 'all':
|
||||
ip = None
|
||||
path = None
|
||||
real_name = u'all'
|
||||
elif not self.containers_enabled or name == VIRTMASTER:
|
||||
path = u''
|
||||
ip = u'127.0.0.1'
|
||||
real_name = unicode(VIRTMASTER)
|
||||
else:
|
||||
tcontainer = self.get_real_container_name(containers, container['name'])
|
||||
real_name = unicode(tcontainer)
|
||||
path = unicode(join(VIRTROOT, real_name, VIRTBASE))
|
||||
#FIXME : pas toujours ca l'IP
|
||||
ip = u"192.0.2." + container['id']
|
||||
# Variable : container_path_<conteneur>
|
||||
path_name = 'container_path_{0}'.format(name)
|
||||
args = {'name': path_name, 'doc': _(u'Path of container {0}').format(name), 'default': path, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[path_name] = 'creole.containers.{0}'.format(path_name)
|
||||
# Variable : container_ip_<conteneur>
|
||||
ip_name = 'container_ip_{0}'.format(name)
|
||||
args = {'name': ip_name, 'doc': _(u'IP address of container {0}').format(name), 'default': ip, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[ip_name] = 'creole.containers.{0}'.format(ip_name)
|
||||
# Variable : container_name_<conteneur>
|
||||
name_name = 'container_name_{0}'.format(name)
|
||||
args = {'name': name_name, 'doc': _(u'Group name of container {0}').format(name), 'default': real_name, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[name_name] = 'creole.containers.{0}'.format(name_name)
|
||||
# Variable : adresse_ip_<conteneur>
|
||||
# adresse_ip_<container> added for compat 2.3 (#5701, #5868)
|
||||
adresse_name = 'adresse_ip_{0}'.format(name)
|
||||
if adresse_name not in self.variables:
|
||||
if not self.containers_enabled:
|
||||
# hack to have "localhost" in non container mode #7183
|
||||
args = {'name': adresse_name, 'doc': _(u'Path of container {0}').format(name), 'default': u'localhost',
|
||||
'properties': ('frozen', 'force_default_on_freeze'), 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
else:
|
||||
variables.append({'optiontype': 'symlinkoption', 'obj': SymLinkOption, 'path': paths[ip_name], 'args': {'name': adresse_name}, 'option': None})
|
||||
paths[adresse_name] = 'creole.containers.{0}'.format(adresse_name)
|
||||
variables_path = []
|
||||
for var in variables:
|
||||
path = 'containers.' + var['args']['name']
|
||||
self.options[namespace][path] = var
|
||||
variables_path.append(path)
|
||||
fname = 'containers'
|
||||
self.options[namespace][fname] = {'optiontype': 'optiondescription',
|
||||
'args': {'name': fname,
|
||||
'doc': _('Containers informations'),
|
||||
'children': variables_path,
|
||||
'properties': ('hidden', 'normal'),
|
||||
'requires': None},
|
||||
'group_type': 'family',
|
||||
'informations': {'icon': 'puzzle-piece'},
|
||||
'option': None}
|
||||
return fname
|
||||
|
||||
|
||||
class CreoleFamily():
|
||||
"""
|
||||
charge les familles, les variables, les aides et séparateurs
|
||||
"""
|
||||
def _init_creole_family(self):
|
||||
"""
|
||||
initialise les variables pour les familles
|
||||
"""
|
||||
self.families = OrderedDict()
|
||||
#only for find old variable
|
||||
self.variables = {}
|
||||
self.helps = {'variables':{}, 'families':{}}
|
||||
self.separators = {}
|
||||
self.groups = {}
|
||||
|
||||
def populate_families(self, families, namespace):
|
||||
for family, fdata in families.items():
|
||||
nfamily = normalize_family(family)
|
||||
lvars = OrderedDict()
|
||||
for var, vdata in fdata['vars'].items():
|
||||
variable = self.get_variable(var, vdata, nfamily, namespace)
|
||||
if variable is not None:
|
||||
lvars[var] = variable
|
||||
if vdata.get('remove_check', False):
|
||||
try:
|
||||
self.valid_enum.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
self.consistency.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
self.check.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
if vdata.get('remove_condition', False):
|
||||
try:
|
||||
self.requires['variable'].pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
# si famille inexistant, on l'ajoute
|
||||
if not self.families.has_key(nfamily):
|
||||
# définition de la famille
|
||||
fdata['vars'] = OrderedDict()
|
||||
self.families[nfamily] = copy(fdata)
|
||||
self.families[nfamily]['mode'] = modes_level[0]
|
||||
self.families[nfamily]['hidden'] = False
|
||||
self.families[nfamily]['doc'] = str(family.encode('utf8'))
|
||||
self.families[nfamily]['vars'].update(lvars)
|
||||
#ne pas remettre a normal dans ce cadre d'un redefine
|
||||
if 'mode' in fdata and fdata['mode'] not in [modes_level[0], None]:
|
||||
self.families[nfamily]['mode'] = fdata['mode']
|
||||
if 'icon' in fdata and fdata['icon'] is not None:
|
||||
self.families[nfamily]['icon'] = fdata['icon']
|
||||
if 'hidden' in fdata:
|
||||
self.families[nfamily]['hidden'] = fdata['hidden']
|
||||
self.families[nfamily]['vars'].update(lvars)
|
||||
|
||||
|
||||
def get_variable(self, var, vdata, family, namespace):
|
||||
#si la derniere variable ne devait pas etre prise en compte
|
||||
#existe == False => on quitte brutalement
|
||||
#et il ne faut pas prendre en compte la suite
|
||||
if namespace == 'creole' and var in self.variables:
|
||||
if not vdata['exists']:
|
||||
return None
|
||||
if not vdata['redefine']:
|
||||
# on ne devrait pas avoir 2 fois la meme variable
|
||||
raise ConfigError(_(u"Two variables with the same name ({0})").format(var))
|
||||
elif vdata['redefine']:
|
||||
raise ConfigError(_(u"Attempt to redefine unexistent variable: {0}.").format(var))
|
||||
#si c'est une nouvelle variable
|
||||
if not vdata['redefine']:
|
||||
# Be sure to have defaults on new variables
|
||||
tvar = self._update_variable_attributes(var, vdata)
|
||||
#uniquement dans le cadre de redefine
|
||||
else:
|
||||
old_family = self.variables[var]
|
||||
if old_family != family:
|
||||
tvar = self.families[old_family]['vars'][var]
|
||||
self.families[old_family]['vars'].pop(var)
|
||||
else:
|
||||
tvar = self.families[family]['vars'][var]
|
||||
if vdata['value'] != None:
|
||||
tvar['value'] = vdata['value']
|
||||
tvar = self._update_variable_attributes(var, tvar, vdata)
|
||||
self.variables[var] = family
|
||||
return tvar
|
||||
|
||||
def _update_variable_attributes(self, var, vdata, newdata=None):
|
||||
"""Update variable attributes.
|
||||
|
||||
If :data:`newdata` is ``None``, set default, update to new
|
||||
value otherwise.
|
||||
|
||||
:param var: variable name
|
||||
:type var: `str`
|
||||
:param vdata: variable attributes
|
||||
:type vdata: `dict`
|
||||
:param newdata: new variable attributes
|
||||
:type newdata: `dict`
|
||||
:return: variable attributes
|
||||
|
||||
"""
|
||||
attrs = vdata.copy()
|
||||
|
||||
if newdata and newdata['multi']:
|
||||
raise ValueError(_(u"Redefining multi attribute is not allowed"
|
||||
" for variable {0}").format(var))
|
||||
if newdata and newdata['type'] != 'string':
|
||||
raise ValueError(_(u"Redefining type attribute is not allowed"
|
||||
" for variable {0}").format(var))
|
||||
for attr in ['auto_freeze', 'auto_save', 'hidden', 'mandatory', 'redefine']:
|
||||
# Default value is False
|
||||
if attr not in vdata or vdata[attr] is None:
|
||||
attrs[attr] = False
|
||||
elif newdata is not None and attr in newdata \
|
||||
and newdata[attr] is not None \
|
||||
and vdata[attr] != newdata[attr]:
|
||||
attrs[attr] = newdata[attr]
|
||||
|
||||
if 'exists' not in vdata or vdata['exists'] is None:
|
||||
attrs['exists'] = True
|
||||
elif newdata is not None and 'exists' in newdata \
|
||||
and newdata['exists'] is not None \
|
||||
and vdata['exists'] != newdata['exists']:
|
||||
attrs['exists'] = newdata['exists']
|
||||
|
||||
if 'mode' not in vdata or vdata['mode'] is None:
|
||||
attrs['mode'] = 'normal'
|
||||
elif newdata is not None and 'mode' in newdata \
|
||||
and newdata['mode'] is not None \
|
||||
and vdata['mode'] != newdata['mode']:
|
||||
attrs['mode'] = newdata['mode']
|
||||
|
||||
if newdata is not None and 'description' in newdata \
|
||||
and newdata['description'] is not None \
|
||||
and vdata['description'] != newdata['description']:
|
||||
attrs['description'] = newdata['description']
|
||||
|
||||
if vdata['disabled'] is True or (newdata is not None and newdata['disabled'] is True):
|
||||
attrs['disabled'] = True
|
||||
|
||||
return attrs
|
||||
|
||||
def populate_helps(self, helps, namespace):
|
||||
"""
|
||||
"""
|
||||
for key, values in helps['variables'].items():
|
||||
vdata = self.families[self.variables[key]]['vars'][key]
|
||||
if self.helps['variables'].has_key(key) and not vdata['redefine']:
|
||||
raise ConfigError(_(u"help already set for {0}").format(key))
|
||||
else:
|
||||
self.helps['variables'][key] = values
|
||||
for key, values in helps['families'].items():
|
||||
key = normalize_family(key)
|
||||
fdata = self.families[key]
|
||||
if self.helps['families'].has_key(key) and not fdata['redefine']:
|
||||
raise ConfigError(_(u"help already set for {0}").format(key))
|
||||
else:
|
||||
self.helps['families'][key] = values
|
||||
|
||||
def populate_separators(self, separators, namespace):
|
||||
"""
|
||||
"""
|
||||
#devrait être dans la variable plutot que dans self.separators
|
||||
for var, value in separators.items():
|
||||
if self.separators.has_key(var):
|
||||
raise ConfigError(_(u"More than one separator for "
|
||||
"{0}").format(var))
|
||||
else:
|
||||
self.separators[var] = value
|
||||
|
||||
def populate_groups(self, groups_, namespace):
|
||||
for grp_name, grps in groups_.items():
|
||||
self.groups.setdefault(grp_name, []).extend(grps)
|
||||
|
||||
class CreoleConstraint():
|
||||
"""
|
||||
charge les contraintes
|
||||
"""
|
||||
def _init_creole_constrainte(self):
|
||||
self.valid_enum = {}
|
||||
self.mandatory = []
|
||||
self.fill = {}
|
||||
self.auto = {}
|
||||
self.check = {}
|
||||
self.consistency = {}
|
||||
|
||||
def populate_conditions(self, conditions, namespace):
|
||||
#FIXME juste les conditions/hidden_if_in|hidden_if_not_in
|
||||
for var, _conditions in conditions.items():
|
||||
for condition in _conditions:
|
||||
if condition['name'] in ['hidden_if_in', 'disabled_if_in']:
|
||||
conds = [('disabled', False)]
|
||||
elif condition['name'] in ['hidden_if_not_in',
|
||||
'disabled_if_not_in']:
|
||||
conds = [('disabled', True)]
|
||||
elif condition['name'] == 'frozen_if_in':
|
||||
conds = [('frozen', False), ('hidden', False), ('force_default_on_freeze', False)]
|
||||
elif condition['name'] == 'frozen_if_not_in':
|
||||
conds = [('frozen', True), ('hidden', True), ('force_default_on_freeze', True)]
|
||||
elif condition['name'] in ['mandatory_if_in']:
|
||||
conds = [('mandatory', False)]
|
||||
elif condition['name'] in ['mandatory_if_not_in']:
|
||||
conds = [('mandatory', True)]
|
||||
else:
|
||||
raise Exception(_(u'Unknown condition type for {0}').format(
|
||||
condition['name']))
|
||||
families = condition['family']
|
||||
variables = condition['variable']
|
||||
for params in condition['param']:
|
||||
if params['type']:
|
||||
raise Exception(_(u'Unknown type {0}').format(
|
||||
params['type']))
|
||||
if params['hidden']:
|
||||
raise Exception(_(u'Unknown hidden {0}').format(
|
||||
params['hidden']))
|
||||
if params['name']:
|
||||
raise Exception(_(u'Unknown name {0}').format(
|
||||
params['name']))
|
||||
if params['optional']:
|
||||
raise Exception(_(u'Unknown optional {0}').format(
|
||||
params['optional']))
|
||||
value = params['value']
|
||||
tconditions = []
|
||||
for cond in conds:
|
||||
tconditions.append((var, value, cond[0], cond[1]))
|
||||
for variable, optional in variables:
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires['variable'].setdefault(variable, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires['variable'][variable]['optional'] = optional
|
||||
self.requires['variable'][variable]['list'].extend(tconditions)
|
||||
for family, optional in families:
|
||||
#FIXME optional not used
|
||||
family = normalize_family(family)
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires['family'].setdefault(family, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires['family'][family]['optional'] = optional
|
||||
self.requires['family'][family]['list'].extend(tconditions)
|
||||
for list_name, list_value, optional in condition['list']:
|
||||
#FIXME optional not used
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires[list_name].setdefault(list_value, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires[list_name][list_value]['optional'] = optional
|
||||
self.requires[list_name][list_value]['list'].extend(tconditions)
|
||||
self.fallback[var] = condition['fallback']
|
||||
|
||||
def _populate_func(self, datas, _type, namespace):
|
||||
"""
|
||||
to populate auto or fill
|
||||
"""
|
||||
data = {}
|
||||
for target, funcs in datas.items():
|
||||
if len(funcs) != 1:
|
||||
raise Exception(_(u'More than one function for target: {0}').format(target))
|
||||
func_name = funcs[0][0]
|
||||
func_params = funcs[0][1]
|
||||
func_level = funcs[0][2]
|
||||
if func_level != 'error':
|
||||
raise Exception(_(u"Can not set level to {0} for this kind of callback").format(func_level))
|
||||
params = {}
|
||||
for param in func_params:
|
||||
name = {None: ''}.get(param['name'], param['name'])
|
||||
if param['type'] == None:
|
||||
params.setdefault(name, []).append(unicode(param['value']))
|
||||
elif param['type'] == 'eole':
|
||||
check_disabled = param['hidden'] == "False"
|
||||
optional = param['optional'] == 'True'
|
||||
value = param['value']
|
||||
if '.' in value:
|
||||
ns, value = value.split('.', 1)
|
||||
if ns != namespace:
|
||||
raise Exception(_('Namespace different in param not allowed: {} - {}').format(ns, namespace))
|
||||
params.setdefault(name, []).append({'optional': optional,
|
||||
'check_disabled': check_disabled,
|
||||
'value': value})
|
||||
elif param['type'] == 'number':
|
||||
params.setdefault(name, []).append(int(param['value']))
|
||||
elif param['type'] == 'container':
|
||||
#pour compatibilté dicos 2.3 (#6240)
|
||||
# remplace le dictionnaire d'infos conteneur
|
||||
# par l'ip du conteneur demandé
|
||||
params.setdefault(name, []).append({'optional': False,
|
||||
'check_disabled': False,
|
||||
'value': 'container_ip_' + param['value']})
|
||||
elif param['type'] == 'context':
|
||||
params.setdefault(name, []).append((None,))
|
||||
else:
|
||||
raise Exception(_(u'Type {0} not yet implemented '
|
||||
u'for {1} for {2}').format(param['type'], _type,
|
||||
target))
|
||||
if namespace != 'creole' and '.' in target:
|
||||
#if extra and variable in extra (so with complet path)
|
||||
#don't support redefine
|
||||
vdata = {'redefine': False}
|
||||
else:
|
||||
vdata = self.families[self.variables[target]]['vars'][target]
|
||||
#6016
|
||||
if _type in ['auto', 'fills'] and vdata.get('value') is not None and \
|
||||
vdata['redefine']:
|
||||
vdata['value'] = None
|
||||
if (_type == 'check' and target in self.check.keys()) or \
|
||||
(_type != 'check' and (target in self.fill.keys() or
|
||||
target in self.auto.keys()) and not vdata['redefine']):
|
||||
raise Exception(_(u"Computing function already defined for {0}").format(
|
||||
target))
|
||||
if _type != 'check':
|
||||
if target in self.fill:
|
||||
del(self.fill[target])
|
||||
if target in self.auto:
|
||||
del(self.auto[target])
|
||||
data[target] = (func_name, params)
|
||||
return data
|
||||
|
||||
def populate_checks(self, checks, namespace):
|
||||
#FIXME faudrait voir pour supprimer les anciens comme avant
|
||||
for var, _checks in checks.items():
|
||||
for check in _checks:
|
||||
if check[0] == 'valid_enum':
|
||||
open_values = False
|
||||
for param in check[1]:
|
||||
if param['name'] == 'checkval':
|
||||
open_values = not {'True': True,
|
||||
'False': False}.get(param['value'])
|
||||
tvalues = eval(check[1][0]['value'])
|
||||
values = []
|
||||
for value in tvalues:
|
||||
if type(value) == str:
|
||||
values.append(unicode(value, 'utf-8'))
|
||||
else:
|
||||
values.append(value)
|
||||
self.valid_enum[var] = (values, open_values)
|
||||
elif check[0] == 'obligatoire':
|
||||
self.mandatory.append(var)
|
||||
elif check[0] == 'valid_differ' and check[1][0]['type'] == 'eole':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_differ length should be 1'))
|
||||
self.consistency.setdefault(var, []).append(('not_equal', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_networknetmask':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_networknetmask length should be 1'))
|
||||
if check[1][0]['type'] != 'eole':
|
||||
raise Exception(_(u'valid_networknetmask must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('network_netmask', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_ipnetmask':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_ipnetmask length should be 1'))
|
||||
if check[1][0]['type'] != 'eole':
|
||||
raise Exception(_(u'valid_ipnetmask must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('ip_netmask', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_broadcast':
|
||||
if len(check[1]) != 2:
|
||||
raise Exception(_(u'valid_broadcast length should be 2'))
|
||||
error = False
|
||||
try:
|
||||
if check[1][0]['type'] != 'eole' or check[1][1]['type'] != 'eole':
|
||||
error = True
|
||||
except IndexError:
|
||||
error = True
|
||||
if error:
|
||||
raise Exception(_(u'valid_broadcast must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('broadcast', check[1][0], check[1][1], check[2]))
|
||||
elif check[0] == 'valid_in_network':
|
||||
if len(check[1]) != 2:
|
||||
raise Exception(_(u'valid_in_network length should be 2'))
|
||||
error = False
|
||||
try:
|
||||
if check[1][0]['type'] != 'eole' or check[1][1]['type'] != 'eole':
|
||||
error = True
|
||||
except IndexError:
|
||||
error = True
|
||||
if error:
|
||||
raise Exception(_(u'valid_in_network must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('in_network', check[1][0], check[1][1], check[2]))
|
||||
else:
|
||||
self.check.update(self._populate_func({var: [check]},
|
||||
'check', namespace))
|
||||
|
||||
def populate_fills(self, fills, namespace):
|
||||
self.fill.update(self._populate_func(fills, 'fill', namespace))
|
||||
|
||||
def populate_autos(self, autos, namespace):
|
||||
self.auto.update(self._populate_func(autos, 'auto', namespace))
|
||||
|
||||
class CreoleVarLoader(CreoleFamily, CreoleConstraint, CreoleGeneric):
|
||||
def __init__(self, no_auto_store=False):
|
||||
self.space = []
|
||||
self._config = None
|
||||
self.is_lint = False
|
||||
self.dtd = parse_dtd(dtdfilename)
|
||||
self.containers_enabled = None
|
||||
self.options = {}
|
||||
self.paths = {}
|
||||
self.no_auto_store = no_auto_store
|
||||
self.force_store_vars = set()
|
||||
self.actions = {}
|
||||
|
||||
def _init_creole_varloader(self):
|
||||
self.variables = OrderedDict()
|
||||
self.generic = {}
|
||||
# Generate empty trees
|
||||
for opt in self.dtd['container']['options']:
|
||||
self.generic[opt + 's'] = []
|
||||
|
||||
def read_string(self, data_dicts, namespace, test_duplicate):
|
||||
"""
|
||||
lecture d'un ensemble de dictionnaires et d'un
|
||||
configuration passés en paramètres (Zéphir)
|
||||
data_dicts : données des dictionnaires encodés en base64 et ordonnés
|
||||
"""
|
||||
self._pre_populate(namespace)
|
||||
# parsing des dictionnaires fournis
|
||||
for dico in data_dicts:
|
||||
is_creole_constrainte = 'gen_container' in dir(self)
|
||||
parse_result = parse_string(dico, self.dtd, is_creole_constrainte, test_duplicate)
|
||||
#FIXME: voir pour autre chose que 'module'
|
||||
self._populate(parse_result, namespace, 'module')
|
||||
self._post_populate(namespace)
|
||||
# chargement des valeurs depuis le format json
|
||||
self._gen_descr(namespace)
|
||||
|
||||
def read_dir(self, dir_config, namespace, force_test_duplicate=None):
|
||||
"""
|
||||
lecture d'un répertoire entier de dictionnaires
|
||||
"""
|
||||
self._pre_populate(namespace)
|
||||
if type(dir_config) != list:
|
||||
#if dir_config is not a list, add subdirectory 'local'
|
||||
#and 'variante'
|
||||
orig_dir = dir_config
|
||||
dir_config = [dir_config]
|
||||
for tdir in [join(orig_dir, 'local'),
|
||||
join(orig_dir, 'variante')]:
|
||||
if isdir(tdir):
|
||||
dir_config.append(tdir)
|
||||
if namespace == 'creole':
|
||||
if force_test_duplicate is not None:
|
||||
test_duplicate = force_test_duplicate
|
||||
else:
|
||||
test_duplicate = True
|
||||
else:
|
||||
test_duplicate = False
|
||||
for mydir in dir_config:
|
||||
if type(mydir) in (list, tuple):
|
||||
# directory group : collect files from each
|
||||
# directory and sort them before loading
|
||||
group_files = []
|
||||
for idx, subdir in enumerate(mydir):
|
||||
if isdir(subdir):
|
||||
for filename in listdir(subdir):
|
||||
group_files.append((filename, idx, subdir))
|
||||
else:
|
||||
group_files.append(basename(subdir), idx, dirname(subdir))
|
||||
def sort_group(file1, file2):
|
||||
if file1[0] == file2[0]:
|
||||
# sort by initial mydir order if same name
|
||||
return file1[1].__cmp__(file2[1])
|
||||
# sort by filename
|
||||
elif file1[0] > file2[0]:
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
group_files.sort(sort_group)
|
||||
filenames = [join(f[2], f[0]) for f in group_files]
|
||||
elif isdir(mydir):
|
||||
filenames = []
|
||||
for filename in listdir(mydir):
|
||||
filenames.append(join(mydir, filename))
|
||||
filenames.sort()
|
||||
else:
|
||||
filenames = [mydir]
|
||||
for filename in filenames:
|
||||
if filename.endswith('.xml'):
|
||||
if not isfile(filename):
|
||||
raise FileNotFound(_(u"File {0} does not exist").format(filename))
|
||||
# level indicates the level of dictionary (module, variante or local)
|
||||
level = {'local': 'local',
|
||||
'variante': 'variante'}.get(basename(dirname(filename)), 'module')
|
||||
#print filename
|
||||
#hack to detect if CreoleVarLoader or CreoleLoader is used
|
||||
is_creole_constrainte = 'gen_files' in dir(self)
|
||||
parse = parse_xml_file(filename, self.dtd, is_creole_constrainte, test_duplicate)
|
||||
self._populate(parse, namespace, level)
|
||||
self._post_populate(namespace)
|
||||
self._gen_descr(namespace)
|
||||
|
||||
def _pre_populate(self, namespace):
|
||||
# initialisation avant chargement des données d'un dictionnaire
|
||||
if self._config is not None:
|
||||
raise Exception(_(u'Unable to run read_dir if Config already exists.'))
|
||||
#Re init all variables
|
||||
for func in dir(self):
|
||||
if func.startswith('_init_creole_'):
|
||||
getattr(self, func)()
|
||||
# chargement des dictionnaires
|
||||
#FIXME devrait être automatique ...
|
||||
self.requires = {'variable': {}, 'family': {}, 'service': {},
|
||||
'interface': {}, 'file': {}, 'filelist': {}, 'fstab': {},
|
||||
'host': {}, 'service_restriction': {}, 'service_access': {}, "action": {}}
|
||||
# this information should be a self.requires, but we need to change
|
||||
# too much code to do that (#5717)
|
||||
self.fallback = {}
|
||||
self.options[namespace] = {}
|
||||
|
||||
def _populate(self, parse, namespace, level):
|
||||
parse_keys = parse.keys()
|
||||
#families always in first place
|
||||
parse_keys.remove('families')
|
||||
parse_keys.insert(0, 'families')
|
||||
for keys in parse_keys:
|
||||
func_name = 'populate_' + keys
|
||||
if func_name in dir(self):
|
||||
try:
|
||||
getattr(self, 'populate_' + keys)(parse[keys], namespace)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Unable to populate {0}: {1}").format(keys, err))
|
||||
else:
|
||||
for var in parse[keys]:
|
||||
var['level'] = level
|
||||
self.generic.setdefault(keys, []).append(var)
|
||||
|
||||
def populate_families_action(self, var, namespace):
|
||||
for family_name, family in var.items():
|
||||
if family_name not in self.actions.keys():
|
||||
self.actions[family_name] = {}
|
||||
for key, value in family.items():
|
||||
if key == 'action':
|
||||
if 'actions' not in self.actions[family_name]:
|
||||
self.actions[family_name]['actions'] = []
|
||||
value['name'] = namespace
|
||||
self.actions[family_name]['actions'].append(value)
|
||||
else:
|
||||
self.actions[family_name][key] = value
|
||||
|
||||
def _post_populate(self, namespace):
|
||||
if namespace == 'creole':
|
||||
if self.families['general']['vars']['mode_conteneur_actif']['value'] == 'oui':
|
||||
self.containers_enabled = True
|
||||
else:
|
||||
self.containers_enabled = False
|
||||
|
||||
def gen_actions(self):
|
||||
objactions = []
|
||||
#name = 'actions'
|
||||
#for name_family, families in self.actions.items():
|
||||
# opts = []
|
||||
# for type_, infos in families.items():
|
||||
# if isinstance(infos, str):
|
||||
# opts.append(UnicodeOption(type_, '', unicode(infos)))
|
||||
# elif isinstance(infos, unicode):
|
||||
# opts.append(UnicodeOption(type_, '', infos))
|
||||
# elif infos == None:
|
||||
# pass
|
||||
# else:
|
||||
# for index, info in enumerate(infos):
|
||||
# optstype = []
|
||||
# for key, val in info.items():
|
||||
# if key == 'type':
|
||||
# optstype.append(ChoiceOption(key, '', ('form', 'custom', 'external'), unicode(val)))
|
||||
# elif isinstance(val, list):
|
||||
# lst = []
|
||||
# for val_ in val:
|
||||
# lst.append(unicode(val_['name']))
|
||||
# if lst != []:
|
||||
# optstype.append(UnicodeOption(key, '', default=lst, default_multi=lst[0], multi=True))
|
||||
# else:
|
||||
# optstype.append(UnicodeOption(key, '', unicode(val)))
|
||||
|
||||
# opts.append(OptionDescription(type_[:-1] + str(index), '', optstype))
|
||||
# objactions.append(OptionDescription(str(normalize_family(name_family)), name_family, opts))
|
||||
|
||||
descr = OptionDescription('actions', 'actions', objactions)
|
||||
return descr
|
||||
|
||||
def gen_paths(self, namespace):
|
||||
if namespace in self.paths:
|
||||
return self.paths[namespace]
|
||||
paths = {}
|
||||
all_slaves = {}
|
||||
for master, slaves in self.groups.items():
|
||||
for slave in slaves:
|
||||
all_slaves[slave] = master
|
||||
for fname, fdata in self.families.items():
|
||||
for vname in fdata['vars']:
|
||||
if vname in self.groups:
|
||||
paths[vname] = '{0}.{1}.{2}.{2}'.format(namespace,
|
||||
fname, vname)
|
||||
else:
|
||||
if vname in all_slaves:
|
||||
paths[vname] = '{0}.{1}.{2}.{3}'.format(
|
||||
namespace, fname, all_slaves[vname], vname)
|
||||
else:
|
||||
paths[vname] = '{0}.{1}.{2}'.format(namespace,
|
||||
fname, vname)
|
||||
self.paths[namespace] = paths
|
||||
return paths
|
||||
|
||||
def update_requires(self, values, namespace, option=False):
|
||||
"""
|
||||
replace variable name with paths in self.requires
|
||||
"""
|
||||
force_properties = []
|
||||
requires = []
|
||||
for value in values:
|
||||
try:
|
||||
if not '.' in value[0]:
|
||||
ns = 'creole'
|
||||
#path without namespace
|
||||
path = '.'.join(self.paths[ns][value[0]].split('.')[1:])
|
||||
else:
|
||||
ns = namespace
|
||||
path = '.'.join(value[0].split('.')[1:])
|
||||
opt = self.options[ns][path]
|
||||
except KeyError:
|
||||
if self.fallback[value[0]]:
|
||||
force_properties.append(value[2])
|
||||
continue
|
||||
else:
|
||||
raise Exception(_(u"Condition using unexistent variable {0} as parameter.").format(value[0]))
|
||||
val = value[1]
|
||||
if opt['obj'] is ChoiceOption:
|
||||
if val not in opt['args']['values']:
|
||||
if value[3]:
|
||||
force_properties.append(value[2])
|
||||
else:
|
||||
continue
|
||||
val = convert_tiramisu_value(val, opt['obj'])
|
||||
if option:
|
||||
ropt = self._get_option(ns, path)
|
||||
else:
|
||||
ropt = (ns, value[0])
|
||||
|
||||
requires.append({'option': ropt, 'expected': val, 'action': value[2], 'inverse': value[3]})
|
||||
return force_properties, requires
|
||||
|
||||
def _populate_requires(self, namespace):
|
||||
for vname, values in self.requires['variable'].items():
|
||||
try:
|
||||
if not '.' in vname:
|
||||
ns = 'creole'
|
||||
#path without namespace
|
||||
path = '.'.join(self.paths[ns][vname].split('.')[1:])
|
||||
else:
|
||||
ns = namespace
|
||||
path = '.'.join(vname.split('.')[1:])
|
||||
opt = self.options[ns][path]
|
||||
except KeyError:
|
||||
if values['optional']:
|
||||
continue
|
||||
raise Exception(_(u"Condition targetting unexistent variable {0}").format(vname))
|
||||
props, req = self.update_requires(values['list'], namespace)
|
||||
if props != []:
|
||||
if opt['args']['requires'] is not None:
|
||||
raise Exception(_(u'requires already set for this option preventing changing properties {0}').format(vname))
|
||||
opt['args']['properties'] = tuple(list(opt['args']['properties']) + props)
|
||||
else:
|
||||
if opt['args']['requires'] is not None:
|
||||
raise Exception(_(u'requires already set for this option {0}').format(vname))
|
||||
#if force_store_value is set, remove force_default_on_freeze #7854
|
||||
if 'force_store_value' in opt['args']['properties']:
|
||||
new_rep = []
|
||||
for nreq in req:
|
||||
if nreq['action'] != 'force_default_on_freeze':
|
||||
new_rep.append(nreq)
|
||||
req = new_rep
|
||||
opt['args']['requires'] = req
|
||||
calc_properties = set()
|
||||
for r in req:
|
||||
calc_properties.add(r['action'])
|
||||
opt['args']['properties'] = tuple(set(opt['args']['properties']) - calc_properties)
|
||||
|
||||
def _get_option(self, namespace, vname):
|
||||
option = self.options[namespace][vname]
|
||||
if option['option'] is None:
|
||||
if option['optiontype'] == 'option':
|
||||
if option['args']['requires'] is not None:
|
||||
for require in option['args']['requires']:
|
||||
name = require['option'][1]
|
||||
if "." in name:
|
||||
path = name
|
||||
else:
|
||||
path = self.paths[namespace][require['option'][1]]
|
||||
path = '.'.join(path.split('.')[1:])
|
||||
require['option'] = self._get_option(require['option'][0], path)
|
||||
if 'callback_params' in option['args'] and option['args']['callback_params'] is not None:
|
||||
new_call_params = option['args']['callback_params']
|
||||
for key, callback_params in option['args']['callback_params'].items():
|
||||
new_cp = []
|
||||
for callback_param in callback_params:
|
||||
if isinstance(callback_param, tuple) and len(callback_param) == 2:
|
||||
path = callback_param[0][1]
|
||||
if '.' not in path:
|
||||
path = '.'.join(self.paths['creole'][path].split('.')[1:])
|
||||
new_cp.append((self._get_option(callback_param[0][0], path), callback_param[1]))
|
||||
else:
|
||||
new_cp.append(callback_param)
|
||||
new_call_params[key] = tuple(new_cp)
|
||||
option['args']['callback_params'] = new_call_params
|
||||
opt = option['obj'](**option['args'])
|
||||
elif option['optiontype'] == 'optiondescription':
|
||||
children = []
|
||||
for child in option['args']['children']:
|
||||
children.append(self._get_option(namespace, child))
|
||||
option['args']['children'] = children
|
||||
if option['args']['requires'] is not None:
|
||||
for require in option['args']['requires']:
|
||||
opt_name = require['option'][1]
|
||||
if '.' not in opt_name:
|
||||
path = '.'.join(self.paths['creole'][opt_name].split('.')[1:])
|
||||
require['option'] = self._get_option(require['option'][0], path)
|
||||
opt = OptionDescription(**option['args'])
|
||||
if option['group_type'] == 'master':
|
||||
opt.impl_set_group_type(groups.master)
|
||||
elif option['group_type'] == 'family':
|
||||
opt.impl_set_group_type(groups.family)
|
||||
else:
|
||||
raise Exception('Unknown group {}'.format(option['group_type']))
|
||||
elif option['optiontype'] == 'symlinkoption':
|
||||
sym_path = option['path'].split('.')
|
||||
sym_opt = self._get_option(sym_path[0], '.'.join(sym_path[1:]))
|
||||
option['args']['opt'] = sym_opt
|
||||
opt = option['obj'](**option['args'])
|
||||
else:
|
||||
raise Exception('unknown type {0}'.format(option['optiontype']))
|
||||
try:
|
||||
for key, info in self.options[namespace][vname]['informations'].items():
|
||||
opt.impl_set_information(key, info)
|
||||
except KeyError:
|
||||
pass
|
||||
self.options[namespace][vname]['option'] = opt
|
||||
return self.options[namespace][vname]['option']
|
||||
|
||||
def _gen_consistencies(self, namespace):
|
||||
for vname, params in self.consistency.items():
|
||||
path = '.'.join(self.paths[namespace][vname].split('.')[1:])
|
||||
opt = self._get_option(namespace, path)
|
||||
for param in params:
|
||||
dopt = []
|
||||
c_params = {}
|
||||
if param[-1] == 'warning':
|
||||
c_params['warnings_only'] = True
|
||||
for dvdict in param[1:-1]:
|
||||
dvname = dvdict['value']
|
||||
try:
|
||||
path = '.'.join(self.paths[namespace][dvname].split('.')[1:])
|
||||
dopt.append(self._get_option(namespace, path))
|
||||
except KeyError:
|
||||
if dvdict['optional'] != 'True':
|
||||
raise Exception(_(u"Check using unexistent variable {0} as parameter.").format(dvname))
|
||||
if dvdict['hidden'] == 'False':
|
||||
c_params['transitive'] = False
|
||||
opt.impl_add_consistency(param[0], *dopt, **c_params)
|
||||
|
||||
def _is_hidden(self, vname, vdata):
|
||||
#si la variable est hidden mais pas disabled
|
||||
if not vname in self.requires['variable'] and vdata['hidden']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_multi(self, vname, vdata, group_master):
|
||||
#if not a list
|
||||
if not vdata['multi'] and (group_master == None or
|
||||
(group_master != None and \
|
||||
vname not in self.groups[group_master])):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_mandatory(self, vname, vdata):
|
||||
if vname in self.mandatory or vdata['mandatory']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_auto(self, vname):
|
||||
if vname in self.auto:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _gen_func(self, path, obj, callback, callback_params, namespace):
|
||||
if callback_params is None:
|
||||
callback_params = {}
|
||||
if namespace == 'creole':
|
||||
vname = path.split('.')[-1]
|
||||
else:
|
||||
vname = path
|
||||
if vname in obj:
|
||||
callback, params = obj[vname]
|
||||
try:
|
||||
callback = getattr(eosfunc, callback)
|
||||
except AttributeError:
|
||||
raise ValueError(_(u'unknown function {0} in eosfunc').format(callback))
|
||||
for param, pvalues in params.items():
|
||||
for pvalue in pvalues:
|
||||
if type(pvalue) == dict:
|
||||
if namespace == 'creole':
|
||||
ns = 'creole'
|
||||
#it's a Tiramisu's **Option**, that is, a variable
|
||||
#optional could be None, False or True
|
||||
if pvalue['optional'] == True and \
|
||||
pvalue['value'] not in self.variables and \
|
||||
pvalue['value'] not in self.options[namespace]:
|
||||
continue
|
||||
path = '.'.join(self.paths[namespace][pvalue['value']].split('.')[1:])
|
||||
if not path in self.options[namespace]:
|
||||
if self.is_lint:
|
||||
return None, {}
|
||||
else:
|
||||
raise Exception(_(u"Variable computing function"
|
||||
u" using unknown variable "
|
||||
u"{0}").format(pvalue['value']))
|
||||
else:
|
||||
#Support extra
|
||||
try:
|
||||
# when we don't deal with the 'creole' namespace
|
||||
# the pvalues are paths, ex: schedule.bacula.day
|
||||
if namespace != 'creole' and not '.' in pvalue['value']:
|
||||
ns = 'creole'
|
||||
else:
|
||||
ns = namespace
|
||||
except KeyError:
|
||||
raise Exception(_(u"Variable computing function"
|
||||
u" using unknown variable "
|
||||
u"{0}").format(pvalue['value']))
|
||||
callback_params.setdefault(param, []).append(((ns, pvalue['value']),
|
||||
pvalue['check_disabled']))
|
||||
else:
|
||||
callback_params.setdefault(param, []).append(pvalue)
|
||||
normalize_callback_params = {}
|
||||
for callback_name, parameters in callback_params.items():
|
||||
normalize_callback_params[callback_name] = tuple(parameters)
|
||||
return callback, normalize_callback_params
|
||||
|
||||
def _gen_callback(self, namespace):
|
||||
for path, option in self.options[namespace].items():
|
||||
if option['optiontype'] != 'option':
|
||||
continue
|
||||
callback = None
|
||||
callback_params = {}
|
||||
if namespace != 'creole':
|
||||
path = namespace + '.' + path
|
||||
callback, callback_params = self._gen_func(path, self.fill, callback,
|
||||
callback_params, namespace)
|
||||
callback, callback_params = self._gen_func(path, self.auto, callback,
|
||||
callback_params, namespace)
|
||||
#pas de callback_params => None
|
||||
if callback_params == {}:
|
||||
callback_params = None
|
||||
if callback is not None:
|
||||
option['args']['callback'] = callback
|
||||
option['args']['callback_params'] = callback_params
|
||||
|
||||
|
||||
def _gen_check(self, namespace):
|
||||
for path, option in self.options[namespace].items():
|
||||
validator = self._gen_func(path, self.check, None, None, namespace=namespace)
|
||||
if validator[0] is not None:
|
||||
option['args']['validator'] = validator[0]
|
||||
if validator[1] is not None:
|
||||
option['args']['validator_params'] = validator[1]
|
||||
|
||||
def _gen_option(self, fname, vname, vdata, group_master, family_mode, namespace, goptions):
|
||||
"""
|
||||
generate an option with given information
|
||||
|
||||
:vname: variable name
|
||||
:vdata: variable informations load in XML file
|
||||
:group_master: name of master
|
||||
"""
|
||||
informations = {}
|
||||
#FIXME master_slaves
|
||||
if group_master is not None:
|
||||
path = '.'.join([fname, group_master, vname])
|
||||
else:
|
||||
path = '.'.join([fname, vname])
|
||||
|
||||
if namespace == 'creole':
|
||||
cname = vname
|
||||
else:
|
||||
cname = namespace + '.' + path
|
||||
has_callback = cname in self.fill or cname in self.auto
|
||||
if not has_callback:
|
||||
value = vdata['value']
|
||||
else:
|
||||
value = None
|
||||
multi = self._is_multi(vname, vdata, group_master)
|
||||
if value != None and multi and type(value) != list:
|
||||
value = [value]
|
||||
default_multi = None
|
||||
if multi and value is not None and vname != group_master:
|
||||
default_multi = value[0]
|
||||
#il n'y a pas de valeur pour les esclaves
|
||||
if value is not None and self._is_a_masterslave(vname, group_master):
|
||||
if len(value) != 1:
|
||||
# exception à la règle pas d'esclave pour maître sans valeur
|
||||
# certains dictionnaires définissent une valeur esclave
|
||||
# par défaut : on tolère une et une seule valeur.
|
||||
raise Exception(_(u"Slave value length can not be greater "
|
||||
u"than 1."))
|
||||
if vname != group_master:
|
||||
value = []
|
||||
if vdata['description'] is None:
|
||||
doc = vname
|
||||
else:
|
||||
doc = vdata['description']
|
||||
args = {'name': vname, 'doc': doc,
|
||||
'multi': multi}
|
||||
#args['callback'], args['callback_params'] = self._gen_callback(path, paths, namespace)
|
||||
args['properties'] = self._gen_properties(vname, value, vdata,
|
||||
has_callback, family_mode,
|
||||
default_multi, group_master,
|
||||
goptions, namespace, path)
|
||||
is_choiceoption = False
|
||||
ovalue = None
|
||||
if namespace == 'creole':
|
||||
valid_enum_path = vname
|
||||
else:
|
||||
valid_enum_path = namespace + '.' + path
|
||||
valid_enum_path = vname
|
||||
if self.valid_enum.has_key(valid_enum_path):
|
||||
valid_enum = self.valid_enum[valid_enum_path]
|
||||
ovalue = valid_enum[0][0]
|
||||
open_values = valid_enum[1]
|
||||
if open_values:
|
||||
informations['proposed_value'] = tuple(valid_enum[0])
|
||||
else:
|
||||
obj = ChoiceOption
|
||||
olist = tuple(valid_enum[0])
|
||||
forceargs = None
|
||||
is_choiceoption = True
|
||||
if not is_choiceoption:
|
||||
obj, olist, forceargs = CONVERT_OPTION.get(vdata['type'], (None, None, None))
|
||||
if olist is not None:
|
||||
ovalue = olist[0]
|
||||
if obj is None:
|
||||
raise Exception(_(u'Unknown type {0}').format(vdata['type']))
|
||||
#args['validator'], args['validator_params'] = self._gen_check(vname, namespace)
|
||||
args['default'] = convert_tiramisu_value(value, obj)
|
||||
args['default_multi'] = convert_tiramisu_value(default_multi, obj)
|
||||
if olist:
|
||||
args['values'] = tuple(olist)
|
||||
if ovalue is not None:
|
||||
#if default list dans no value
|
||||
if args['default'] is None and not args['multi'] and not has_callback:
|
||||
args['default'] = ovalue
|
||||
#if value but not in list
|
||||
if args['default'] != None and args['multi'] and type(args['default']) != list:
|
||||
args['default'] = [args['default']]
|
||||
if forceargs is not None:
|
||||
args.update(forceargs)
|
||||
if vname in self.helps['variables']:
|
||||
informations['help'] = self.helps['variables'][vname]
|
||||
if vname in self.separators:
|
||||
informations['separator'] = self.separators[vname]
|
||||
args['requires'] = None
|
||||
option = {'optiontype': 'option', 'obj': obj, 'args': args,
|
||||
'informations': informations, 'option': None}
|
||||
self.options[namespace][path] = option
|
||||
return path
|
||||
|
||||
def _gen_master_group(self, namespace, fname, group_master, goptions):
|
||||
path = '.'.join((fname, group_master))
|
||||
properties = []
|
||||
mode = False
|
||||
for mode in modes_level:
|
||||
if mode in self.options[namespace][goptions[0]]['args']['properties']:
|
||||
properties.append(mode)
|
||||
mode = True
|
||||
if not mode:
|
||||
properties.append(modes_level[1])
|
||||
self.options[namespace][path] = {'optiontype': 'optiondescription',
|
||||
'args': {'name': group_master,
|
||||
'doc': 'Master {0}'.format(group_master),
|
||||
'children': goptions,
|
||||
'properties': tuple(properties),
|
||||
'requires': None},
|
||||
'group_type': 'master',
|
||||
'option': None}
|
||||
return path
|
||||
|
||||
def _gen_properties(self, vname, value, vdata, has_callback, family_mode,
|
||||
default_multi, group_master, goptions, namespace, path):
|
||||
if self._is_hidden(vname, vdata) or self._is_auto(vname):
|
||||
properties = ['hidden', 'frozen']
|
||||
#7854
|
||||
if vdata['auto_save'] is False and not self.no_auto_store:
|
||||
properties.append('force_default_on_freeze')
|
||||
else:
|
||||
properties = []
|
||||
mode = vdata['mode']
|
||||
#mandatory variable with no value is a basic value
|
||||
if self._is_mandatory(vname, vdata):
|
||||
properties.append('mandatory')
|
||||
if value in (None, []) and vname not in self.auto and \
|
||||
vname not in self.fill:
|
||||
mode = modes_level[0]
|
||||
#non mandatory variable with a value becomes mandatory (#7141)
|
||||
elif value not in (None, []) or default_multi is not None:
|
||||
properties.append('mandatory')
|
||||
|
||||
if vdata['auto_freeze'] == True:
|
||||
if self._is_auto(vname):
|
||||
raise Exception(_('{0} is auto, so must not be auto_freeze or auto_save').format(vname))
|
||||
if not self.no_auto_store:
|
||||
properties.extend(['auto_freeze'])
|
||||
if mode != 'expert':
|
||||
mode = modes_level[0]
|
||||
self.force_store_vars.add(self.paths[namespace][vname])
|
||||
if vdata['auto_save'] is True:
|
||||
if self._is_auto(vname):
|
||||
raise Exception(_('{0} is auto, so must not be auto_freeze or auto_save').format(vname))
|
||||
if not self.no_auto_store:
|
||||
properties.append('force_store_value')
|
||||
if mode != 'expert':
|
||||
mode = modes_level[0]
|
||||
self.force_store_vars.add(self.paths[namespace][vname])
|
||||
if self._is_a_masterslave(vname, group_master) and goptions != []:
|
||||
master_mode = 'normal'
|
||||
for mod in self.options[namespace][goptions[0]]['args']['properties']:
|
||||
if mod in modes_level:
|
||||
master_mode = mod
|
||||
break
|
||||
if modes[mode] < modes[master_mode]:
|
||||
properties.append(master_mode)
|
||||
else:
|
||||
properties.append(mode)
|
||||
else:
|
||||
if modes[mode] < modes[family_mode]:
|
||||
properties.append(family_mode)
|
||||
else:
|
||||
properties.append(mode)
|
||||
if vdata.get('disabled') == True:
|
||||
properties.append('disabled')
|
||||
return tuple(properties)
|
||||
|
||||
def _is_a_masterslave(self, vname, group_master):
|
||||
return group_master != None and (vname == group_master or
|
||||
vname in self.groups[group_master])
|
||||
|
||||
def _gen_options_by_family(self, fname, fdata, namespace):
|
||||
#if var is in a group
|
||||
options = []
|
||||
family_mode = fdata['mode']
|
||||
slaves = []
|
||||
for vname, vdata in fdata['vars'].items():
|
||||
goptions = []
|
||||
if vname in self.groups:
|
||||
slaves.extend(self.groups[vname])
|
||||
goptions.append(self._gen_option(fname, vname, vdata, vname, family_mode, namespace, goptions))
|
||||
for sname in self.groups[vname]:
|
||||
sdata = fdata['vars'][sname]
|
||||
goptions.append(self._gen_option(fname, sname, sdata, vname, family_mode, namespace, goptions))
|
||||
options.append(self._gen_master_group(namespace, fname, vname, goptions))
|
||||
elif vname in slaves:
|
||||
pass
|
||||
else:
|
||||
options.append(self._gen_option(fname, vname, vdata, None, family_mode, namespace, goptions))
|
||||
#family
|
||||
fname = unicode.encode(unicode(fname), 'utf-8')
|
||||
properties = [fdata['mode']]
|
||||
if fname in self.requires['family']:
|
||||
props, req = self.update_requires(self.requires['family'][fname]['list'], namespace)
|
||||
if props != []:
|
||||
properties.extend(props)
|
||||
requires = None
|
||||
else:
|
||||
requires = req
|
||||
else:
|
||||
requires = None
|
||||
if fdata['hidden'] == True:
|
||||
#if hidden_if_in or hidden_if_not_in for this family, don't
|
||||
#hidden family
|
||||
hide = True
|
||||
for var, val, act, inv in self.requires['family'].get(fname, {'list': []})['list']:
|
||||
if act == 'disabled':
|
||||
hide = False
|
||||
break
|
||||
if hide:
|
||||
properties.append('hidden')
|
||||
|
||||
informations = {}
|
||||
if 'icon' in fdata:
|
||||
informations['icon'] = fdata['icon']
|
||||
if fname in self.helps['families']:
|
||||
informations['help'] = self.helps['families'][fname]
|
||||
family = {'optiontype': 'optiondescription',
|
||||
'args': {'name': fname, 'doc': fdata['doc'],
|
||||
'children': options, 'requires': requires,
|
||||
'properties': tuple(properties),
|
||||
'requires': requires},
|
||||
'group_type': 'family',
|
||||
'informations': informations,
|
||||
'option': None}
|
||||
self.options[namespace][fname] = family
|
||||
return fname
|
||||
|
||||
def _gen_descr(self, namespace):
|
||||
is_creole_constrainte = 'gen_files' in dir(self)
|
||||
paths = self.gen_paths(namespace)
|
||||
if namespace == 'creole':
|
||||
flist = [self.gen_containers_creole(paths, namespace)]
|
||||
else:
|
||||
flist = []
|
||||
for fname in self.requires['family']:
|
||||
if fname not in self.families and not self.requires['family'][fname]['optional']:
|
||||
raise Exception(_(u'Unknown family {0} has requires').format(fname))
|
||||
for fname, fdata in self.families.items():
|
||||
flist.append(self._gen_options_by_family(fname, fdata, namespace))
|
||||
self.families = {}
|
||||
self._populate_requires(namespace)
|
||||
self._gen_callback(namespace)
|
||||
self._gen_check(namespace)
|
||||
self._gen_consistencies(namespace)
|
||||
options = []
|
||||
for fl in flist:
|
||||
options.append(self._get_option(namespace, fl))
|
||||
|
||||
self.space.append(OptionDescription(namespace, '', options))
|
||||
if namespace == 'creole' and is_creole_constrainte:
|
||||
containers = self.gen_container(paths, namespace='containers')
|
||||
self.space.append(OptionDescription('containers', '',
|
||||
containers))
|
||||
|
||||
def get_config(self):
|
||||
if self._config is None:
|
||||
if self.actions != {}:
|
||||
self.space.append(self.gen_actions())
|
||||
descr = OptionDescription('baseconfig', 'baseconfigdescr',
|
||||
self.space)
|
||||
self._config = Config(descr)
|
||||
self._config.impl_set_information('force_store_vars', self.force_store_vars)
|
||||
self._config.impl_set_information('force_store_values', list(self.force_store_vars))
|
||||
self._config.cfgimpl_get_settings().remove('hidden')
|
||||
_modes = list(modes_level)
|
||||
_modes.append('hidden')
|
||||
self._config.cfgimpl_get_settings().setpermissive(tuple(_modes))
|
||||
return self._config
|
||||
|
||||
def get_real_container_name(self, containers, cont):
|
||||
while containers[cont]['group'] != cont:
|
||||
cont = containers[cont]['group']
|
||||
return cont
|
|
@ -0,0 +1,67 @@
|
|||
#! /usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
|
||||
import base64
|
||||
|
||||
KEY_LENGTH = 40
|
||||
KEYS = [
|
||||
0x50,
|
||||
0xF7,
|
||||
0x82,
|
||||
0x69,
|
||||
0xEA,
|
||||
0x2D,
|
||||
0xDD,
|
||||
0x2D,
|
||||
0x6A,
|
||||
0xB4,
|
||||
0x33,
|
||||
0x8F,
|
||||
0xD5,
|
||||
0xC7,
|
||||
0x90,
|
||||
0x9C,
|
||||
0x22,
|
||||
0x95,
|
||||
0x61,
|
||||
0xE5,
|
||||
0x65,
|
||||
0xF6,
|
||||
0xB0,
|
||||
0x4B,
|
||||
0x94,
|
||||
0x47,
|
||||
0xB0,
|
||||
0xBD,
|
||||
0x73,
|
||||
0x58,
|
||||
0x56,
|
||||
0x87,
|
||||
0x79,
|
||||
0x7B,
|
||||
0xE6,
|
||||
0xB0,
|
||||
0xD2,
|
||||
0x20,
|
||||
0x28,
|
||||
0xE1
|
||||
]
|
||||
|
||||
def bitwise(s):
|
||||
res = ''
|
||||
idx = 0
|
||||
for i in range(len(s)):
|
||||
res += chr(ord(s[i]) ^ KEYS[idx])
|
||||
idx+=1
|
||||
if idx > (KEY_LENGTH - 1):
|
||||
idx = 0
|
||||
return res
|
||||
|
||||
def wcrypt(s):
|
||||
s = bitwise(s)
|
||||
return base64.encodestring(s)[:-1] # encodestring renvoie la chaine avec un '\n', on le vire
|
||||
|
||||
def wdecrypt(s):
|
||||
s = base64.decodestring(s)
|
||||
return bitwise(s)
|
|
@ -0,0 +1,161 @@
|
|||
try:
|
||||
import doctest
|
||||
doctest.OutputChecker
|
||||
except (AttributeError, ImportError): # Python < 2.4
|
||||
import util.doctest24 as doctest
|
||||
try:
|
||||
import xml.etree.ElementTree as ET
|
||||
except ImportError:
|
||||
import elementtree.ElementTree as ET
|
||||
from xml.parsers.expat import ExpatError as XMLParseError
|
||||
|
||||
RealOutputChecker = doctest.OutputChecker
|
||||
|
||||
|
||||
def debug(*msg):
|
||||
import sys
|
||||
print >> sys.stderr, ' '.join(map(str, msg))
|
||||
|
||||
|
||||
class HTMLOutputChecker(RealOutputChecker):
|
||||
|
||||
def check_output(self, want, got, optionflags):
|
||||
normal = RealOutputChecker.check_output(self, want, got, optionflags)
|
||||
if normal or not got:
|
||||
return normal
|
||||
try:
|
||||
want_xml = make_xml(want)
|
||||
except XMLParseError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
got_xml = make_xml(got)
|
||||
except XMLParseError:
|
||||
pass
|
||||
else:
|
||||
if xml_compare(want_xml, got_xml):
|
||||
return True
|
||||
return False
|
||||
|
||||
def output_difference(self, example, got, optionflags):
|
||||
actual = RealOutputChecker.output_difference(
|
||||
self, example, got, optionflags)
|
||||
want_xml = got_xml = None
|
||||
try:
|
||||
want_xml = make_xml(example.want)
|
||||
want_norm = make_string(want_xml)
|
||||
except XMLParseError as e:
|
||||
if example.want.startswith('<'):
|
||||
want_norm = '(bad XML: %s)' % e
|
||||
# '<xml>%s</xml>' % example.want
|
||||
else:
|
||||
return actual
|
||||
try:
|
||||
got_xml = make_xml(got)
|
||||
got_norm = make_string(got_xml)
|
||||
except XMLParseError as e:
|
||||
if example.want.startswith('<'):
|
||||
got_norm = '(bad XML: %s)' % e
|
||||
else:
|
||||
return actual
|
||||
s = '%s\nXML Wanted: %s\nXML Got : %s\n' % (
|
||||
actual, want_norm, got_norm)
|
||||
if got_xml and want_xml:
|
||||
result = []
|
||||
xml_compare(want_xml, got_xml, result.append)
|
||||
s += 'Difference report:\n%s\n' % '\n'.join(result)
|
||||
return s
|
||||
|
||||
|
||||
def xml_sort(children):
|
||||
tcl1 = {}
|
||||
#idx = 0
|
||||
|
||||
for child in children:
|
||||
if 'name' in child.attrib:
|
||||
key = child.attrib['name']
|
||||
else:
|
||||
key = child.tag
|
||||
if key not in tcl1:
|
||||
tcl1[key] = []
|
||||
tcl1[key].append(child)
|
||||
cl1_keys = list(tcl1.keys())
|
||||
cl1_keys.sort()
|
||||
cl1 = []
|
||||
for key in cl1_keys:
|
||||
cl1.extend(tcl1[key])
|
||||
return cl1
|
||||
|
||||
def xml_compare(x1, x2):
|
||||
if x1.tag != x2.tag:
|
||||
print ('Tags do not match: %s and %s' % (x1.tag, x2.tag))
|
||||
return False
|
||||
for name, value in x1.attrib.items():
|
||||
if x2.attrib.get(name) != value:
|
||||
print ('Attributes do not match: %s=%r, %s=%r'
|
||||
% (name, value, name, x2.attrib.get(name)))
|
||||
return False
|
||||
for name in x2.attrib:
|
||||
if name not in x1.attrib:
|
||||
print ('x2 has an attribute x1 is missing: %s'
|
||||
% name)
|
||||
return False
|
||||
if not text_compare(x1.text, x2.text):
|
||||
print ('text: %r != %r' % (x1.text, x2.text))
|
||||
return False
|
||||
if not text_compare(x1.tail, x2.tail):
|
||||
print ('tail: %r != %r' % (x1.tail, x2.tail))
|
||||
return False
|
||||
|
||||
cl1 = xml_sort(x1.getchildren())
|
||||
cl2 = xml_sort(x2.getchildren())
|
||||
|
||||
if len(cl1) != len(cl2):
|
||||
cl1_tags = []
|
||||
for c in cl1:
|
||||
cl1_tags.append(c.tag)
|
||||
cl2_tags = []
|
||||
for c in cl2:
|
||||
cl2_tags.append(c.tag)
|
||||
print ('children length differs, %i != %i (%s != %s)'
|
||||
% (len(cl1), len(cl2), cl1_tags, cl2_tags))
|
||||
return False
|
||||
i = 0
|
||||
for c1, c2 in zip(cl1, cl2):
|
||||
i += 1
|
||||
if not xml_compare(c1, c2):
|
||||
if 'name' in c1.attrib:
|
||||
name = c1.attrib['name']
|
||||
else:
|
||||
name = i
|
||||
print ('in tag "%s" with name "%s"'
|
||||
% (c1.tag, name))
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def text_compare(t1, t2):
|
||||
if not t1 and not t2:
|
||||
return True
|
||||
if t1 == '*' or t2 == '*':
|
||||
return True
|
||||
return (t1 or '').strip() == (t2 or '').strip()
|
||||
|
||||
|
||||
def make_xml(s):
|
||||
return ET.XML('<xml>%s</xml>' % s)
|
||||
|
||||
|
||||
def make_string(xml):
|
||||
if isinstance(xml, (str, unicode)):
|
||||
xml = make_xml(xml)
|
||||
s = ET.tostring(xml)
|
||||
if s == '<xml />':
|
||||
return ''
|
||||
assert s.startswith('<xml>') and s.endswith('</xml>'), repr(s)
|
||||
return s[5:-6]
|
||||
|
||||
|
||||
def install():
|
||||
doctest.OutputChecker = HTMLOutputChecker
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
# coding: utf-8
|
||||
from os.path import join, isfile, basename, isdir
|
||||
from os import listdir
|
||||
from base64 import decodestring
|
||||
from io import BytesIO
|
||||
from collections import OrderedDict
|
||||
import sys
|
||||
|
||||
from lxml.etree import DTD, parse, tostring, XMLParser # pylint: disable=E0611
|
||||
|
||||
from .i18n import _
|
||||
from .utils import normalize_family
|
||||
from .error import CreoleDictConsistencyError
|
||||
from .config import VIRTBASE, VIRTROOT, VIRTMASTER, templatedir
|
||||
|
||||
HIGH_COMPATIBILITY = True
|
||||
|
||||
class XMLReflector(object):
|
||||
"""Helper class for loading the Creole XML file,
|
||||
parsing it, validating against the Creole DTD,
|
||||
writing the xml result on the disk
|
||||
"""
|
||||
def __init__(self):
|
||||
self.dtd = None
|
||||
|
||||
def parse_dtd(self, dtdfilename):
|
||||
"""Loads the Creole DTD
|
||||
|
||||
:raises IOError: if the DTD is not found
|
||||
|
||||
:param dtdfilename: the full filename of the Creole DTD
|
||||
"""
|
||||
if not isfile(dtdfilename):
|
||||
raise IOError(_("no such DTD file: {}").format(dtdfilename))
|
||||
with open(dtdfilename, 'r') as dtdfd:
|
||||
self.dtd = DTD(dtdfd)
|
||||
|
||||
def parse_xmlfile(self, xmlfile, from_zephir=None, zephir2=False):
|
||||
"""Parses and validates some Creole XML against the Creole DTD
|
||||
|
||||
:returns: the root element tree object
|
||||
"""
|
||||
if from_zephir:
|
||||
if zephir2:
|
||||
document = parse(BytesIO(xmlfile), XMLParser(remove_blank_text=True))
|
||||
else:
|
||||
document = parse(BytesIO(decodestring(xmlfile)), XMLParser(remove_blank_text=True))
|
||||
else:
|
||||
document = parse(xmlfile)
|
||||
assert self.dtd.validate(document), _("not a valid xml file: {}").format(xmlfile)
|
||||
return document.getroot()
|
||||
|
||||
def load_xml_from_folders(self, xmlfolders, from_zephir):
|
||||
"""Loads all the XML files located in the xmlfolders' list
|
||||
|
||||
:param xmlfolders: list of full folder's name
|
||||
"""
|
||||
documents = []
|
||||
if from_zephir:
|
||||
for idx, xmlfile in enumerate(xmlfolders):
|
||||
documents.append(('generate_{}'.format(idx), self.parse_xmlfile(xmlfile, from_zephir=from_zephir)))
|
||||
else:
|
||||
if not isinstance(xmlfolders, list):
|
||||
xmlfolders = [xmlfolders]
|
||||
for xmlfolder in xmlfolders:
|
||||
if isinstance(xmlfolder, list) or isinstance(xmlfolder, tuple):
|
||||
# directory group : collect files from each
|
||||
# directory and sort them before loading
|
||||
group_files = []
|
||||
for idx, subdir in enumerate(xmlfolder):
|
||||
if isdir(subdir):
|
||||
for filename in listdir(subdir):
|
||||
group_files.append((filename, idx, subdir))
|
||||
else:
|
||||
group_files.append(basename(subdir), idx, dirname(subdir))
|
||||
def sort_group(file1, file2):
|
||||
if file1[0] == file2[0]:
|
||||
# sort by initial xmlfolder order if same name
|
||||
return file1[1].__cmp__(file2[1])
|
||||
# sort by filename
|
||||
elif file1[0] > file2[0]:
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
group_files.sort(sort_group)
|
||||
filenames = [join(f[2], f[0]) for f in group_files]
|
||||
elif isdir(xmlfolder):
|
||||
filenames = []
|
||||
for filename in listdir(xmlfolder):
|
||||
filenames.append(join(xmlfolder, filename))
|
||||
filenames.sort()
|
||||
else:
|
||||
filenames = [xmlfolder]
|
||||
for xmlfile in filenames:
|
||||
if xmlfile.endswith('.xml'):
|
||||
#xmlfile_path = join(xmlfolder, xmlfile)
|
||||
documents.append((xmlfile, self.parse_xmlfile(xmlfile)))
|
||||
return documents
|
||||
|
||||
def save_xmlfile(self, xmlfilename, xml): # pylint: disable=R0201
|
||||
"""Write a bunch of XML on the disk
|
||||
"""
|
||||
with open(xmlfilename, 'w') as xmlfh:
|
||||
if sys.version_info[0] < 3:
|
||||
xmlfh.write(tostring(xml, pretty_print=True, encoding="UTF-8", xml_declaration=True))
|
||||
else:
|
||||
xmlfh.write(tostring(xml, pretty_print=True, encoding="UTF-8", xml_declaration=True).decode('utf8'))
|
|
@ -0,0 +1,235 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<!-- ===================================================================== -->
|
||||
|
||||
<!-- Definition de la DTD du fichier creole -->
|
||||
|
||||
<!-- ===================================================================== -->
|
||||
|
||||
<!--
|
||||
# Conception :
|
||||
# Eole (http://eole.orion.education.fr)
|
||||
|
||||
# Copyright (C) 2005-2018
|
||||
|
||||
# distribue sous la licence GPL-2
|
||||
|
||||
# En attendant une traduction officielle de la GPL, la notice de
|
||||
# copyright demeure en anglais.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# Se reporter a la documentation envoyee avec le programme pour la notice.
|
||||
|
||||
-->
|
||||
<!--================ -->
|
||||
<!-- root element -->
|
||||
<!-- =============== -->
|
||||
|
||||
<!ELEMENT creole (containers | files | family_action | variables | constraints | help)*>
|
||||
|
||||
<!-- ============== -->
|
||||
<!-- files element -->
|
||||
<!-- ============== -->
|
||||
|
||||
<!ELEMENT family_action (action)>
|
||||
<!ATTLIST family_action name CDATA #REQUIRED>
|
||||
<!ATTLIST family_action description CDATA #IMPLIED>
|
||||
<!ATTLIST family_action color CDATA #IMPLIED>
|
||||
<!ATTLIST family_action image CDATA #IMPLIED>
|
||||
<!ELEMENT action ((input* | profile* | ewtapp* | tag* | saltaction*)*)>
|
||||
<!ATTLIST action type (form|custom|external|reader|apache) "custom">
|
||||
<!ATTLIST action title CDATA #REQUIRED>
|
||||
<!ATTLIST action description CDATA #REQUIRED>
|
||||
<!ATTLIST action rewrite CDATA #IMPLIED>
|
||||
<!ATTLIST action image CDATA #IMPLIED>
|
||||
<!ATTLIST action actionlist CDATA #IMPLIED>
|
||||
<!-- for apache action -->
|
||||
<!ATTLIST action apache_path CDATA #IMPLIED>
|
||||
<!ATTLIST action apache_path_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!-- for external action -->
|
||||
<!ATTLIST action url CDATA #IMPLIED>
|
||||
<!ATTLIST action url_type (URLOption|SymLinkOption) "URLOption">
|
||||
<!-- for form action -->
|
||||
<!ATTLIST action save (True|False) "False">
|
||||
<!ELEMENT files ((service* | service_access* | service_restriction* | package* | file*)*)>
|
||||
|
||||
<!ELEMENT containers ((container* | all*)*)>
|
||||
|
||||
<!ELEMENT container ((service* | service_access* | service_restriction* | interface* | package* | file* | disknod* | host* | fstab*)*) >
|
||||
<!ATTLIST container name CDATA #REQUIRED >
|
||||
<!ATTLIST container id CDATA #IMPLIED >
|
||||
<!ATTLIST container group CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT all ((service* | interface* | package* | file* | disknod* | host* | fstab*)*) >
|
||||
|
||||
<!ELEMENT service (#PCDATA)>
|
||||
<!ATTLIST service servicelist CDATA #IMPLIED >
|
||||
<!ATTLIST service instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST service method (systemd|upstart|apache|network) "systemd">
|
||||
<!ATTLIST service redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT input (#PCDATA)>
|
||||
<!ELEMENT profile (#PCDATA)>
|
||||
<!ELEMENT ewtapp (#PCDATA)>
|
||||
<!ELEMENT tag (#PCDATA)>
|
||||
<!ELEMENT saltaction (#PCDATA)>
|
||||
|
||||
<!ELEMENT service_access ((port | tcpwrapper)*)>
|
||||
<!ATTLIST service_access service CDATA #REQUIRED >
|
||||
|
||||
<!ELEMENT port (#PCDATA)> <!--port_type-->
|
||||
<!ATTLIST port port_type (PortOption|SymLinkOption) "PortOption">
|
||||
<!ATTLIST port service_accesslist CDATA #IMPLIED >
|
||||
<!ATTLIST port protocol (tcp|udp) "tcp">
|
||||
|
||||
<!ELEMENT tcpwrapper (#PCDATA)> <!--tcpwrapper_type-->
|
||||
<!ATTLIST tcpwrapper tcpwrapper_type (UnicodeOption|SymLinkOption) "UnicodeOption">
|
||||
<!ATTLIST tcpwrapper service_accesslist CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT service_restriction (ip*)>
|
||||
<!ATTLIST service_restriction service CDATA #REQUIRED >
|
||||
|
||||
<!ELEMENT ip (#PCDATA)> <!--ip_type-->
|
||||
<!ATTLIST ip service_restrictionlist CDATA #IMPLIED >
|
||||
<!ATTLIST ip ip_type (NetworkOption|SymLinkOption) "NetworkOption">
|
||||
<!ATTLIST ip interface_type (UnicodeOption|SymLinkOption) "UnicodeOption">
|
||||
<!ATTLIST ip interface CDATA #REQUIRED> <!--interface_type-->
|
||||
<!ATTLIST ip netmask_type (NetmaskOption|SymLinkOption) "NetmaskOption">
|
||||
<!ATTLIST ip netmask CDATA "255.255.255.255"> <!--netmask_type-->
|
||||
|
||||
<!ELEMENT interface (#PCDATA)>
|
||||
<!ATTLIST interface interfacelist CDATA #IMPLIED >
|
||||
<!ATTLIST interface linkto CDATA #REQUIRED >
|
||||
<!ATTLIST interface ip CDATA #REQUIRED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface ip_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface mask CDATA #REQUIRED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface mask_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface bcast CDATA #IMPLIED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface bcast_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface gateway CDATA #IMPLIED> <!--SymLinkOption-->
|
||||
<!ATTLIST interface gateway_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST interface method (bridge|macvlan) "macvlan" >
|
||||
<!ATTLIST interface redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT host EMPTY >
|
||||
<!ATTLIST host hostlist CDATA #IMPLIED >
|
||||
<!ATTLIST host name CDATA #REQUIRED > <!--SymLinkOption-->
|
||||
<!ATTLIST host name_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST host ip CDATA #REQUIRED > <!--SymLinkOption-->
|
||||
<!ATTLIST host ip_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST host crossed (True|False) "True" >
|
||||
<!ATTLIST host instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST host comment CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT fstab EMPTY >
|
||||
<!ATTLIST fstab name CDATA #REQUIRED> <!--name_type-->
|
||||
<!ATTLIST fstab name_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!ATTLIST fstab type (bind|normal) "bind">
|
||||
<!ATTLIST fstab fstype (auto|ext3|ext4|nfs|smb) "auto">
|
||||
<!ATTLIST fstab mount_point CDATA #IMPLIED> <!--mount_point_type-->
|
||||
<!ATTLIST fstab mount_point_type (FilenameOption|SymLinkOption) "FilenameOption">
|
||||
<!ATTLIST fstab options CDATA #IMPLIED>
|
||||
<!ATTLIST fstab checks CDATA #IMPLIED>
|
||||
<!ATTLIST fstab fstablist CDATA #IMPLIED>
|
||||
<!ATTLIST fstab instance_mode (when_container|when_no_container|always) "when_container">
|
||||
|
||||
<!ELEMENT package (#PCDATA)>
|
||||
<!ATTLIST package instance_mode (when_container|when_no_container|always) "always">
|
||||
|
||||
<!ELEMENT disknod (#PCDATA)>
|
||||
|
||||
<!ELEMENT file EMPTY>
|
||||
<!ATTLIST file name CDATA #REQUIRED >
|
||||
<!ATTLIST file source CDATA #IMPLIED>
|
||||
<!ATTLIST file mode CDATA #IMPLIED >
|
||||
<!ATTLIST file owner CDATA #IMPLIED >
|
||||
<!ATTLIST file group CDATA #IMPLIED >
|
||||
<!ATTLIST file filelist CDATA #IMPLIED >
|
||||
<!ATTLIST file mkdir (True|False) "False">
|
||||
<!ATTLIST file instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST file rm (True|False) "False">
|
||||
<!ATTLIST file del_comment CDATA #IMPLIED >
|
||||
<!ATTLIST file redefine (True|False) "False">
|
||||
|
||||
<!ELEMENT variables (family*, separators*)>
|
||||
<!ELEMENT family (#PCDATA | variable)*>
|
||||
<!ATTLIST family name CDATA #REQUIRED>
|
||||
<!ATTLIST family description CDATA #IMPLIED>
|
||||
<!ATTLIST family mode (basic|normal|expert) "basic">
|
||||
<!ATTLIST family icon CDATA #IMPLIED>
|
||||
<!ATTLIST family hidden (True|False) "False">
|
||||
|
||||
<!ELEMENT variable (#PCDATA | value)*>
|
||||
<!ATTLIST variable name CDATA #REQUIRED>
|
||||
<!ATTLIST variable type CDATA #IMPLIED>
|
||||
<!ATTLIST variable description CDATA #IMPLIED>
|
||||
<!ATTLIST variable hidden (True|False) "False">
|
||||
<!ATTLIST variable disabled (True|False) "False">
|
||||
<!ATTLIST variable multi (True|False) "False">
|
||||
<!ATTLIST variable redefine (True|False) "False">
|
||||
<!ATTLIST variable exists (True|False) "True">
|
||||
<!ATTLIST variable mandatory (True|False) "False">
|
||||
<!ATTLIST variable auto_freeze (True|False) "False">
|
||||
<!ATTLIST variable auto_save (True|False) "False">
|
||||
<!ATTLIST variable mode (basic|normal|expert) "normal">
|
||||
<!ATTLIST variable remove_check (True|False) "False">
|
||||
<!ATTLIST variable remove_condition (True|False) "False">
|
||||
|
||||
<!ELEMENT separators (separator*)>
|
||||
|
||||
<!ELEMENT separator (#PCDATA)>
|
||||
<!ATTLIST separator name CDATA #REQUIRED>
|
||||
<!ATTLIST separator never_hidden CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT value (#PCDATA)>
|
||||
|
||||
<!ELEMENT constraints ((fill* | check* | condition* | auto* | group*)*)>
|
||||
<!ELEMENT fill (param*)>
|
||||
<!ATTLIST fill name CDATA #REQUIRED>
|
||||
<!ATTLIST fill target CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT check (param*)>
|
||||
<!ATTLIST check name CDATA #REQUIRED>
|
||||
<!ATTLIST check target CDATA #REQUIRED>
|
||||
<!ATTLIST check level (error|warning) "error">
|
||||
<!ATTLIST check probe (True|False) "False">
|
||||
|
||||
<!ELEMENT auto ((param)*)>
|
||||
<!ATTLIST auto name CDATA #REQUIRED>
|
||||
<!ATTLIST auto target CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT condition ((target | param)+ )>
|
||||
<!ATTLIST condition name CDATA #REQUIRED>
|
||||
<!ATTLIST condition source CDATA #REQUIRED>
|
||||
<!ATTLIST condition fallback (True|False) "False">
|
||||
|
||||
<!ELEMENT group (slave+)>
|
||||
<!ATTLIST group master CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT param (#PCDATA)>
|
||||
<!ATTLIST param type (string|eole|number|container|context|python) "string">
|
||||
<!ATTLIST param name CDATA #IMPLIED>
|
||||
<!ATTLIST param hidden (True|False) "True">
|
||||
<!ATTLIST param optional (True|False) "False">
|
||||
|
||||
<!ELEMENT target (#PCDATA)>
|
||||
<!ATTLIST target type (family|filelist|servicelist|interfacelist|variable|service_accesslist|service_restrictionlist|hostlist|fstablist|actionlist) "variable">
|
||||
<!ATTLIST target optional (True|False) "False">
|
||||
|
||||
<!ELEMENT slave (#PCDATA)>
|
||||
|
||||
<!ELEMENT help ((variable* | family*)*)>
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
# Eole NG - 2009
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# http://eole.orion.education.fr - eole@ac-dijon.fr
|
||||
#
|
||||
# Licence CeCill
|
||||
# cf: http://www.cecill.info/licences.fr.html
|
||||
###########################################################################
|
||||
|
||||
import sys
|
||||
import socket
|
||||
from os.path import isfile
|
||||
from os import system, stat
|
||||
from pyeole.httprequest import HTTPRequest
|
||||
from creole.config import configeol
|
||||
from creole.client import CreoleClient
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
# adresse IP et port du serveur d'enregistrement
|
||||
server = "http://194.167.18.21/apps/AutoDiag/index.n/diagnose"
|
||||
md5file = "/etc/eole/.server.MD5"
|
||||
module = "%s-%s" % (client.get_creole('eole_module'), client.get_creole('eole_version'))
|
||||
|
||||
def get_md5():
|
||||
""" calcul de l'identifiant md5 """
|
||||
if not isfile(md5file) or stat(md5file).st_size == 0:
|
||||
system("md5sum %s | awk '{print $1}' > %s" % (configeol, md5file))
|
||||
fp = file(md5file)
|
||||
return (fp.read().split()[0])
|
||||
|
||||
def get_proxy():
|
||||
""" récupération du proxy à utiliser """
|
||||
if client.get_creole('activer_proxy_client') == 'oui':
|
||||
return "http://{0}:{1}".format(
|
||||
client.get_creole('proxy_client_adresse'),
|
||||
client.get_creole('proxy_client_port'))
|
||||
return ''
|
||||
|
||||
if __name__ == "__main__":
|
||||
id5 = get_md5()
|
||||
rne = client.get_creole('numero_etab')
|
||||
data = {"ID5":id5, "module":module, "rne":rne, "dep":rne[0:3]}
|
||||
socket.setdefaulttimeout(5)
|
||||
proxy = get_proxy()
|
||||
if proxy != '':
|
||||
# essai avec proxy
|
||||
try:
|
||||
req = HTTPRequest(proxy={'http':proxy})
|
||||
req.request(server, post_datas=data)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
sys.exit(0)
|
||||
# essai sans proxy
|
||||
try:
|
||||
req = HTTPRequest()
|
||||
req.request(server, post_datas=data)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(0)
|
|
@ -0,0 +1,2 @@
|
|||
"""Module de fonctions supplémentaires accessibles à creole. Tous les fichiers python
|
||||
contenus dans ce répertoire sont lus par le module eosfunc de creole"""
|
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
script de generation d'un certificat ssl
|
||||
prend un nom de fichier facultatif en argument (destination du certificat)
|
||||
|
||||
usage::
|
||||
|
||||
soit
|
||||
%prog (-fc) [nom_certif]
|
||||
soit
|
||||
%prog (-f)
|
||||
|
||||
si [nom_certif] non renseigne, regenere tous les certificats par defaut ainsi que la ca locale.
|
||||
Sinon, ne genere que [nom_certif]
|
||||
|
||||
-f :force la regeneration du (ou des) certificat(s) s'il(s) existe(nt)
|
||||
-c : dans le cas de la generation d'un seul certificat, on copie la clef
|
||||
|
||||
"""
|
||||
import sys, os
|
||||
from optparse import OptionParser
|
||||
|
||||
from creole import cert
|
||||
from pyeole.encode import normalize
|
||||
|
||||
def parse_command_line():
|
||||
parser = OptionParser(__doc__)
|
||||
parser.add_option("-c",
|
||||
action="store_true", dest="copy", default=False,
|
||||
help="copie de la clef")
|
||||
|
||||
parser.add_option("-f",
|
||||
action="store_true", dest="regen", default=False,
|
||||
help="force la regeneration de la clef")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
if len(args) > 1:
|
||||
parser.error("Il faut au maximum un certificat")
|
||||
return options, args
|
||||
|
||||
options, args = parse_command_line()
|
||||
|
||||
regen = options.regen
|
||||
copy = options.copy
|
||||
|
||||
if len(args) == 1:
|
||||
certfile = args[0]
|
||||
else:
|
||||
certfile = None
|
||||
|
||||
try:
|
||||
cert.rehash_if_needed()
|
||||
if certfile != None:
|
||||
certfile = os.path.abspath(certfile)
|
||||
dest_dir = os.path.dirname(certfile)
|
||||
if not os.path.isdir(dest_dir):
|
||||
print "Répertoire de destination inexistant (%s)" % dest_dir
|
||||
sys.exit(1)
|
||||
print "Generation du certificat machine"
|
||||
cert.gen_certif(certfile, regen=regen, copy_key=copy)
|
||||
else:
|
||||
# génération de tous les certificats (CA, eole, scribe...)
|
||||
cert.gen_certs(regen=regen)
|
||||
sys.exit(0)
|
||||
except Exception, err:
|
||||
print "Erreur : "
|
||||
print u'{0}'.format(normalize(err))
|
||||
sys.exit(1)
|
|
@ -0,0 +1,26 @@
|
|||
#! /usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
Test des patches pour diagnose
|
||||
réutilisation du code de zephir-client
|
||||
"""
|
||||
import sys
|
||||
from glob import glob
|
||||
from os.path import basename
|
||||
from creole import utils
|
||||
from creole.config import patch_dir
|
||||
from zephir.monitor.agents import patches
|
||||
from os.path import join
|
||||
|
||||
patchs = glob(join(patch_dir, '*.patch'))
|
||||
patchs.extend(glob(join(patch_dir, 'variante', '*.patch')))
|
||||
err = []
|
||||
for patch in patchs:
|
||||
verif = patches.verify_patch(patch).values()
|
||||
if len(verif) > 0 and len(verif[0]) > 0:
|
||||
err.append(basename(patch))
|
||||
if len(err) != 0:
|
||||
utils.print_red('Erreur')
|
||||
print "fichiers : %s" % (", ".join(err),)
|
||||
else:
|
||||
utils.print_green('Ok')
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/sh
|
||||
|
||||
echo "La bibliothèque shell FonctionsEoleNg ne doit plus être utilisée." >&2
|
||||
if [ -n "${0}" ]
|
||||
then
|
||||
echo "Merci de corriger le code de '${0}'" >&2
|
||||
fi
|
||||
echo ''
|
||||
echo "Voir la documentation http://dev-eole.ac-dijon.fr/projects/eole/wiki/PrepareEOLE24" >&2
|
||||
exit 255
|
|
@ -0,0 +1,100 @@
|
|||
|
||||
/* Body color */
|
||||
body { background: #ffffff; color: #000000; }
|
||||
|
||||
/* Tables */
|
||||
table.summary, table.details, table.index
|
||||
{ background: #e8f0f8; color: #000000; }
|
||||
tr.summary, tr.details, tr.index
|
||||
{ background: #70b0f0; color: #000000;
|
||||
text-align: left; font-size: 120%; }
|
||||
tr.group { background: #c0e0f8; color: #000000;
|
||||
text-align: left; font-size: 120%;
|
||||
font-style: italic; }
|
||||
|
||||
/* Documentation page titles */
|
||||
h2.module { margin-top: 0.2em; }
|
||||
h2.class { margin-top: 0.2em; }
|
||||
|
||||
/* Headings */
|
||||
h1.heading { font-size: +140%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h2.heading { font-size: +125%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h3.heading { font-size: +110%; font-style: italic;
|
||||
font-weight: normal; }
|
||||
|
||||
/* Base tree */
|
||||
pre.base-tree { font-size: 80%; margin: 0; }
|
||||
|
||||
/* Details Sections */
|
||||
table.func-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.func-detail { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
table.var-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.var-details { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
/* Function signatures */
|
||||
.sig { background: transparent; color: #000000;
|
||||
font-weight: bold; }
|
||||
.sig-name { background: transparent; color: #006080; }
|
||||
.sig-arg, .sig-kwarg, .sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
.sig-default { background: transparent; color: #602000; }
|
||||
.summary-sig { background: transparent; color: #000000; }
|
||||
.summary-sig-name { background: transparent; color: #204080; }
|
||||
.summary-sig-arg, .summary-sig-kwarg, .summary-sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
|
||||
/* Doctest blocks */
|
||||
.py-src { background: transparent; color: #000000; }
|
||||
.py-prompt { background: transparent; color: #005050;
|
||||
font-weight: bold;}
|
||||
.py-string { background: transparent; color: #006030; }
|
||||
.py-comment { background: transparent; color: #003060; }
|
||||
.py-keyword { background: transparent; color: #600000; }
|
||||
.py-output { background: transparent; color: #404040; }
|
||||
pre.doctestblock { background: #f4faff; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
table pre.doctestblock
|
||||
{ background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
|
||||
/* Variable values */
|
||||
pre.variable { background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 0;
|
||||
border: 1px solid #708890; }
|
||||
.variable-linewrap { background: transparent; color: #604000; }
|
||||
.variable-ellipsis { background: transparent; color: #604000; }
|
||||
.variable-quote { background: transparent; color: #604000; }
|
||||
.re { background: transparent; color: #000000; }
|
||||
.re-char { background: transparent; color: #006030; }
|
||||
.re-op { background: transparent; color: #600000; }
|
||||
.re-group { background: transparent; color: #003060; }
|
||||
.re-ref { background: transparent; color: #404040; }
|
||||
|
||||
/* Navigation bar */
|
||||
table.navbar { background: #a0c0ff; color: #0000ff;
|
||||
border: 2px groove #c0d0d0; }
|
||||
th.navbar { background: #a0c0ff; color: #0000ff; }
|
||||
th.navselect { background: #70b0ff; color: #000000; }
|
||||
.nomargin { margin: 0; }
|
||||
|
||||
/* Links */
|
||||
a:link { background: transparent; color: #0000ff; }
|
||||
a:visited { background: transparent; color: #204080; }
|
||||
a.navbar:link { background: transparent; color: #0000ff;
|
||||
text-decoration: none; }
|
||||
a.navbar:visited { background: transparent; color: #204080;
|
||||
text-decoration: none; }
|
||||
|
||||
/* Lists */
|
||||
ul { margin-top: 0; }
|
|
@ -0,0 +1,60 @@
|
|||
génération des certificats
|
||||
==========================
|
||||
|
||||
mode opératoire
|
||||
|
||||
|
||||
au premier lancement de ``gen_certif.py``
|
||||
------------------------------------------
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère la CA
|
||||
- génère les certificats par défaut (clef privée, requète de certificat)
|
||||
- signature des certificats
|
||||
|
||||
aux lancements ultérieurs
|
||||
-------------------------
|
||||
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère le certificat passé en argument
|
||||
|
||||
::
|
||||
|
||||
gen_certif.py (-f) [nom_certif]
|
||||
|
||||
si [nom_certif] non renseigné, regénère tous les certificats par défaut
|
||||
ainsi que la CA locale. Sinon, ne génère que [nom_certif]
|
||||
-f :force la regénération du (ou des) certificat(s) s'il(s) existe(nt)
|
||||
|
||||
|
||||
``regen``
|
||||
|
||||
attribut permettant de forcer (ou pas) la regénération
|
||||
si ``regen==True`` alors les cerficats sont regénérés même s'ils existent
|
||||
si ``regen==False`` alors les cerficats ne sont générés que s'ils
|
||||
n'existent pas.
|
||||
|
||||
api
|
||||
----
|
||||
|
||||
- génération d'un seul certificat :
|
||||
|
||||
``cert.gen_certif(certfile,regen=regen, copy_key=copy)``
|
||||
|
||||
|
||||
- génération de tous les certificats :
|
||||
|
||||
``cert.gen_certs(regen=regen)``
|
||||
|
||||
|
||||
|
||||
::
|
||||
|
||||
gen_certs()
|
||||
|-> gen_ca()
|
||||
|-> certif_loader()
|
||||
|-> gen_certif()
|
||||
|-> finalise_certs()
|
||||
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
rm -f *.html
|
||||
rm -f api/*.html
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
|
||||
process
|
||||
-------
|
||||
|
||||
- point d'entrée : `process.py` méthode *run()*
|
||||
- lecture des fichiers dictionnaires *xml*
|
||||
- lecture du fichier */etc/eole/config.eol* pour remplir l'objet
|
||||
dictionnaire
|
||||
|
||||
|
||||
mapping avec la ligne de commande
|
||||
---------------------------------
|
||||
|
||||
.. TODO
|
|
@ -0,0 +1,377 @@
|
|||
/*
|
||||
:Author: David Goodger
|
||||
:Contact: goodger at users.sourceforge.net
|
||||
:date: $Date: 2004/11/11 23:11:44 $
|
||||
:version: $Revision: 1.1 $
|
||||
:copyright: This stylesheet has been placed in the public domain.
|
||||
|
||||
Default cascading style sheet for the HTML output of Docutils.
|
||||
*/
|
||||
|
||||
/* "! important" is used here to override other ``margin-top`` and
|
||||
``margin-bottom`` styles that are later in the stylesheet or
|
||||
more specific. See <http://www.w3.org/TR/CSS1#the-cascade>. */
|
||||
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: Georgia, arial, sans-serif;
|
||||
padding: 3em;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 130%;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
width: 70%;
|
||||
margin: 2em auto;
|
||||
padding: 1em;
|
||||
background-color: #FFEEEE;
|
||||
border: 1px solid #EEDDDD;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.title {
|
||||
font-size: 180%;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 ! important }
|
||||
|
||||
.last {
|
||||
margin-bottom: 0 ! important }
|
||||
|
||||
.hidden {
|
||||
display: none }
|
||||
|
||||
a.toc-backref {
|
||||
text-decoration: none ;
|
||||
color: black }
|
||||
|
||||
blockquote.epigraph {
|
||||
margin: 2em 5em ; }
|
||||
|
||||
dd {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
/* Uncomment (& remove this text!) to get bold-faced definition list terms
|
||||
dt {
|
||||
font-weight: bold }
|
||||
*/
|
||||
|
||||
div.abstract {
|
||||
margin: 2em 5em }
|
||||
|
||||
div.abstract p.topic-title {
|
||||
font-weight: bold ;
|
||||
text-align: center }
|
||||
|
||||
div.admonition, div.attention, div.caution, div.danger, div.error,
|
||||
div.hint, div.important, div.note, div.tip, div.warning {
|
||||
margin: 2em ;
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.admonition p.admonition-title, div.hint p.admonition-title,
|
||||
div.important p.admonition-title, div.note p.admonition-title,
|
||||
div.tip p.admonition-title {
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.attention p.admonition-title, div.caution p.admonition-title,
|
||||
div.danger p.admonition-title, div.error p.admonition-title,
|
||||
div.warning p.admonition-title {
|
||||
color: red ;
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.compound .compound-first, div.compound .compound-middle {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
div.compound .compound-last, div.compound .compound-middle {
|
||||
margin-top: 0.5em }
|
||||
|
||||
div.dedication {
|
||||
margin: 2em 5em ;
|
||||
text-align: center ;
|
||||
font-style: italic }
|
||||
|
||||
div.dedication p.topic-title {
|
||||
font-weight: bold ;
|
||||
font-style: normal }
|
||||
|
||||
div.figure {
|
||||
margin-left: 2em }
|
||||
|
||||
div.footer, div.header {
|
||||
font-size: smaller }
|
||||
|
||||
div.line-block {
|
||||
display: block ;
|
||||
margin-top: 1em ;
|
||||
margin-bottom: 1em }
|
||||
|
||||
div.line-block div.line-block {
|
||||
margin-top: 0 ;
|
||||
margin-bottom: 0 ;
|
||||
margin-left: 1.5em }
|
||||
|
||||
div.sidebar {
|
||||
margin-left: 1em ;
|
||||
border: medium outset ;
|
||||
padding: 0em 1em ;
|
||||
background-color: #ffffee ;
|
||||
width: 40% ;
|
||||
float: right ;
|
||||
clear: right }
|
||||
|
||||
div.sidebar p.rubric {
|
||||
font-family: sans-serif ;
|
||||
font-size: medium }
|
||||
|
||||
div.system-messages {
|
||||
margin: 5em }
|
||||
|
||||
div.system-messages h1 {
|
||||
color: red }
|
||||
|
||||
div.system-message {
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.system-message p.system-message-title {
|
||||
color: red ;
|
||||
font-weight: bold }
|
||||
|
||||
div.topic {
|
||||
margin: 2em }
|
||||
|
||||
h1.title {
|
||||
text-align: center }
|
||||
|
||||
h2.subtitle {
|
||||
text-align: center }
|
||||
|
||||
hr {
|
||||
width: 75% }
|
||||
|
||||
ol.simple, ul.simple {
|
||||
margin-bottom: 1em }
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal }
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha }
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha }
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman }
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman }
|
||||
|
||||
p.attribution {
|
||||
text-align: right ;
|
||||
margin-left: 50% }
|
||||
|
||||
p.caption {
|
||||
font-style: italic }
|
||||
|
||||
p.credits {
|
||||
font-style: italic ;
|
||||
font-size: smaller }
|
||||
|
||||
p.label {
|
||||
white-space: nowrap }
|
||||
|
||||
p.rubric {
|
||||
font-weight: bold ;
|
||||
font-size: larger ;
|
||||
color: maroon ;
|
||||
text-align: center }
|
||||
|
||||
p.sidebar-title {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold ;
|
||||
font-size: larger }
|
||||
|
||||
p.sidebar-subtitle {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
p.topic-title {
|
||||
font-weight: bold }
|
||||
|
||||
pre.address {
|
||||
margin-bottom: 0 ;
|
||||
margin-top: 0 ;
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
pre.line-block {
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
.literal {
|
||||
color: #333;
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
pre.literal-block, pre.doctest-block {
|
||||
margin-left: 2em ;
|
||||
margin-right: 2em ;
|
||||
padding: 1em;
|
||||
color: #333;
|
||||
background-color: #EEE;}
|
||||
|
||||
span.classifier {
|
||||
font-family: sans-serif ;
|
||||
font-style: oblique }
|
||||
|
||||
span.classifier-delimiter {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
span.interpreted {
|
||||
font-family: sans-serif }
|
||||
|
||||
span.option {
|
||||
white-space: nowrap }
|
||||
|
||||
span.option-argument {
|
||||
font-style: italic }
|
||||
|
||||
span.pre {
|
||||
white-space: pre }
|
||||
|
||||
span.problematic {
|
||||
color: red }
|
||||
|
||||
table {
|
||||
margin-top: 0.5em ;
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
table.citation {
|
||||
border-left: solid thin gray }
|
||||
|
||||
table.docinfo {
|
||||
margin: 2em 4em }
|
||||
|
||||
table.footnote {
|
||||
border-left: solid thin black }
|
||||
|
||||
td, th {
|
||||
padding-left: 0.5em ;
|
||||
padding-right: 0.5em ;
|
||||
vertical-align: top }
|
||||
|
||||
th.docinfo-name, th.field-name {
|
||||
font-weight: bold ;
|
||||
text-align: left ;
|
||||
white-space: nowrap }
|
||||
|
||||
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
||||
font-size: 100% }
|
||||
|
||||
tt {
|
||||
background-color: #eeeeee
|
||||
}
|
||||
|
||||
ul.auto-toc {
|
||||
list-style-type: none }
|
||||
|
||||
.code-block {
|
||||
font-family: Courier New, Courier, monospace;
|
||||
font-size: 14px;
|
||||
margin: 0 2em;
|
||||
padding: 1em;
|
||||
color: #000;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #DDD;
|
||||
}
|
||||
|
||||
/* Python markup *********************************************/
|
||||
/*Python keyword*/
|
||||
.p_word {
|
||||
color: #036;
|
||||
}
|
||||
/*Python identifier*/
|
||||
.p_identifier {
|
||||
color: #36C;
|
||||
}
|
||||
/*Python number*/
|
||||
.p_number {
|
||||
color: #36C;
|
||||
}
|
||||
/*other text*/
|
||||
.p_default {
|
||||
color: #036;
|
||||
}
|
||||
/*Python operator*/
|
||||
.p_operator {
|
||||
color: #036;
|
||||
}
|
||||
/*Python comment*/
|
||||
.p_commentline {
|
||||
color: #036;
|
||||
}
|
||||
/*function name*/
|
||||
.p_defname {
|
||||
color: #F63;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*class name*/
|
||||
.p_classname {
|
||||
color: #F00;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_character {
|
||||
color: green;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_string {
|
||||
color: green;
|
||||
}
|
||||
/*triple-quoted strings*/
|
||||
.p_triple {
|
||||
color: green;
|
||||
}
|
||||
|
||||
/* HTML markup *********************************************/
|
||||
/*an html tag*/
|
||||
.h_tag {
|
||||
color: #36C;
|
||||
}
|
||||
/*text in a tag*/
|
||||
.h_default {
|
||||
color: #036;
|
||||
}
|
||||
/*attribute name*/
|
||||
.h_attribute {
|
||||
color: #6C3;
|
||||
}
|
||||
/*a double-quoted attribute value*/
|
||||
.h_doublestring {
|
||||
color: green;
|
||||
}
|
||||
/*attribute equals sign, for example*/
|
||||
.h_other {
|
||||
color: #036;
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
buildhtml.py --embed --stylesheet default.css --output-encoding iso-8859-1 --prune .svn --prune api/ --prune pydoctor --prune data .
|
|
@ -0,0 +1,3 @@
|
|||
cd ../creole
|
||||
epydoc --html --no-private --output ../doc/api .
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
Templates créole
|
||||
================
|
||||
|
||||
comportement des templates
|
||||
--------------------------
|
||||
|
||||
Template_
|
||||
|
||||
.. _Template: api/creole.template.Template-class.html
|
||||
|
||||
validations
|
||||
-----------
|
||||
|
||||
Template.verify_
|
||||
|
||||
.. _Template.verify: api/creole.template.Template-class.html#verify
|
||||
|
||||
|
||||
|
||||
fichiers cibles
|
||||
fichiers modèle qui vont être instanciés au final (fichier destination)
|
||||
|
||||
|
||||
- le fichier source (templatisé) *doit* exister ainsi que le
|
||||
fichier de destination (le fichier de configuration effectif)
|
||||
portant le même nom :
|
||||
|
||||
- le fichier cible, c'est-à-dire le fichier de configuration
|
||||
instancié, doit être présent
|
||||
|
||||
|
||||
>>> import creole
|
||||
>>> from creole.template import Template
|
||||
|
||||
>>> try:
|
||||
... t = Template('nexistepas.txt', templatedir= '/tmp')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /tmp/nexistepas.txt n'existe pas
|
||||
>>>
|
||||
|
||||
.. note:: les deux vérifications (template source et fichier
|
||||
destination) sont faites en même temps
|
||||
|
||||
- le répertoire source *doit* exister
|
||||
|
||||
>>> try:
|
||||
... t = Template('/etc/group', templatedir= '/reperoire/qui/n/existe/pas')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /reperoire/qui/n/existe/pas/group n'existe pas
|
||||
>>>
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
utilitaires techniques créole
|
||||
=============================
|
||||
|
||||
utilitaire de tests
|
||||
-------------------
|
||||
|
||||
|
||||
creolecat_
|
||||
|
||||
.. _creolecat: api/creole.creolecat-module.html
|
||||
|
||||
|
||||
un utilitaire de tests est mis à disposition pour ceux qui
|
||||
souhaitent tester leur fichiers de template sans pour autant lancer
|
||||
une instanciation:
|
||||
|
||||
usage::
|
||||
|
||||
creolecat.py -x <path>/eole.xml -o <path>/test.txt testtemplate.tmpl
|
||||
|
||||
testtemplate est le fichier à instancier
|
||||
|
||||
lancer l'option --help pour plus de détails
|
||||
|
||||
utilitaire de conversion
|
||||
------------------------
|
||||
|
||||
conversion dans l'ancien langage de templating (notations *[%*)
|
||||
|
||||
pour ceux qui avaient déjà commencé leur activités de templating pour
|
||||
créole 2 (donc avec une autre notation), un utilitaire de conversion
|
||||
est mis à disposition.
|
||||
Il est dans la lib python creole et s'appelle creole2cheetah_
|
||||
|
||||
.. _creole2cheetah: api/creole.creole2cheetah-module.html
|
||||
|
||||
usage :
|
||||
|
||||
cd creole
|
||||
[creole] ./creole2cheetah.py [nom du fichier source] > [nom du fichier destination]
|
||||
|
|
@ -0,0 +1,201 @@
|
|||
Variables créole
|
||||
================
|
||||
|
||||
typeole_
|
||||
|
||||
.. _typeole: api/creole.typeole-module.html
|
||||
|
||||
variable créole
|
||||
|
||||
instance d'un objet type eole, à un nom de variable correspond
|
||||
peut-être plusieurs valeurs
|
||||
|
||||
>>> from creole import typeole
|
||||
>>> var = typeole.EoleVar('mavariable')
|
||||
>>> var.val
|
||||
[]
|
||||
>>> var.set_value('valeur')
|
||||
>>> var.set_value('defaut', default=True)
|
||||
>>> var.val
|
||||
['valeur']
|
||||
>>> var.valdefault
|
||||
['defaut']
|
||||
>>> var.description = 'variable de test'
|
||||
>>> var.description
|
||||
'variable de test'
|
||||
>>>
|
||||
|
||||
il est possible de créer une variable Eole à l'aide
|
||||
d'une factory :
|
||||
|
||||
>>> var2 = typeole.type_factory('string', 'test_string', valeole=["eole"], valdefault=["def"])
|
||||
>>> var2.get_value()
|
||||
['def']
|
||||
>>>
|
||||
|
||||
des vérifications de type sont faites au moment du *set_value()*
|
||||
|
||||
collecte des variables créole
|
||||
-----------------------------
|
||||
|
||||
collecte
|
||||
|
||||
Récupération de variables qui serviront a la constitution du dictionnaire Eole
|
||||
|
||||
Les données du dictionnaire sont collectées à partir de différents fichiers dans un premier format XML.
|
||||
|
||||
sur une machine cible, une collecte des variables eole est faite avec parse_dico_::
|
||||
|
||||
from creole.parsedico import parse_dico
|
||||
parse_dico()
|
||||
|
||||
.. ce test n'est pas lancé car il peut y avoir un dico sur le poste
|
||||
de travail
|
||||
|
||||
.. _parse_dico: api/creole.parsedico-module.html
|
||||
|
||||
Le dictionnaire créole est vide. Pour le remplir, il faut
|
||||
récupérer des données depuis un fichier xml initial::
|
||||
|
||||
my_dict = EoleDict()
|
||||
my_dict.read(join('/etc/eole/','eole.xml'))
|
||||
|
||||
.. TODO: read_string(self, xml_string)
|
||||
|
||||
Utilisation du dictionnaire
|
||||
---------------------------
|
||||
|
||||
dictionnaire
|
||||
|
||||
fichier au format xml contenant :
|
||||
- une liste de fichiers
|
||||
- une liste de variables
|
||||
|
||||
famille
|
||||
|
||||
Il s'agit d'un regroupement de variable utilisé pour la saisie : on parle alors de famille de variables
|
||||
|
||||
groupe
|
||||
|
||||
Il s'agit de variables de type `liste` dont les éléments sont liées aux éléments correspondants des autres
|
||||
eth[2] aura un lien avec netmask[2] et network[2].
|
||||
|
||||
Plutôt que d'utiliser `parsedico`, construisons un dictionnaire creole EoleDict_ :
|
||||
|
||||
>>> from creole import cfgparser
|
||||
>>> from creole import typeole
|
||||
>>>
|
||||
>>> dico = cfgparser.EoleDict()
|
||||
>>> dico.variables['ip_eth'] = typeole.type_factory('string', 'ip_eth', val=['ip0', 'ip1', 'ip2'])
|
||||
>>> dico.variables['nom_etab'] = typeole.type_factory('string', 'nom_etab', val=['etab'])
|
||||
>>> dico.variables['vrai'] = typeole.type_factory('boolean', 'vrai', val=[True])
|
||||
>>> dico.variables['faux'] = typeole.type_factory('string', 'faux', val=['faux'])
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string', 'toto', val=['toto'])
|
||||
|
||||
voici comment accéder aux variables créole
|
||||
|
||||
>>> assert dico.get_value('ip_eth') == ['ip0', 'ip1', 'ip2']
|
||||
>>> assert dico.get_value('nom_etab') == ['etab']
|
||||
|
||||
.. _EoleDict : api/creole.cfgparser.EoleDict-class.html
|
||||
|
||||
|
||||
variables de template
|
||||
-----------------------
|
||||
|
||||
|
||||
lorsqu'on utilise un appel de bas niveau de traitement de template,
|
||||
c'est-à-dire l'appel direct à la
|
||||
méthode process_ d'un template, il faut vérifier qu'une variable
|
||||
est bien instanciée avec le bon contexte de dictionnaire :
|
||||
|
||||
.. _process: api/creole.template.Template-class.html
|
||||
|
||||
>>> from creole.cfgparser import EoleDict
|
||||
>>> from creole import typeole
|
||||
>>> from creole.template import Template
|
||||
>>> dico = EoleDict()
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string',
|
||||
... 'toto', val=['toto'], context=dico)
|
||||
>>> t = Template('data/dst/test.tmpl', templatedir= 'data/src')
|
||||
>>> t.verify()
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' not in res
|
||||
>>> dico.set_value('toto', 'titi')
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' in res
|
||||
|
||||
le contexte `dico` est passé à la variable `toto`::
|
||||
|
||||
dico.variables['toto'] = typeole.type_factory('string',
|
||||
'toto', val=['toto'], context=dico)
|
||||
|
||||
variables automatiques
|
||||
----------------------
|
||||
|
||||
variable automatique
|
||||
|
||||
variable présente dans le dictionnaire xml mais pas dans le fichier *.ini* de configuration.
|
||||
la valeur de cette variable (sont appel à *.get_value()* est soumis à une fonction de traitement
|
||||
spécifiée dans le xml, qui calcule la variable au lieu de formater sa valeur.
|
||||
|
||||
Une variable automatique simple n'est pas traitée différemment d'une variable dont la valeur est présente dans le dictionnaire et qui est soumise à une condition de vérification de sa valeur. Simplement, aucune vérification n'est effectuée et la valeur est calculée.
|
||||
|
||||
déclaration de la variable::
|
||||
|
||||
<variable name='server_mem' type='string' description='memoire du serveur' auto='True' />
|
||||
|
||||
déclaration de la fonction de remplissage::
|
||||
|
||||
<fill name='server_mem' target='server_mem' />
|
||||
|
||||
deux fonctions strictement automatiques sont implémentées: `server_mem` et `kernel_version`
|
||||
|
||||
variable semi-automatique
|
||||
|
||||
variable remplit automatiquement dans le cas d'une condition sur une autre variable,
|
||||
si cette condition n'est pas remplie, c'est l'uilisateur qui la remplit (ou une autre fonction).
|
||||
la condition est traitée à deux niveaux, dans la fonction de remplissage, et au niveau de l'affichage.
|
||||
On remplit donc deux fonctions pour ce conditionnement (une fonction fill avec la variable
|
||||
conditionnante en paramètre et une fonction condition qui conditionne l'affichage de la variable.
|
||||
exemple : récupération des adresses eth dans le cas où l'on a pas de dhcp.
|
||||
|
||||
déclaration de la variable semi-auto::
|
||||
|
||||
<variable name='eth0' type='string' auto='True'/>
|
||||
|
||||
déclaration de la variable qui définit la condition::
|
||||
|
||||
<variable name='dhcp' type='boolean' description='Activation du dhcp' >
|
||||
<value>non</value>
|
||||
</variable>
|
||||
<check name='valid_enum' target='dhc'>
|
||||
<param>['oui','non']</param>
|
||||
</check>
|
||||
|
||||
déclaration de la fonction de contrôle d'automatisme, la variable eth0 est remplie automatiquement par la fonction
|
||||
*auto_eth* si le paramètre dhcp est égal à la condition *oui*::
|
||||
|
||||
<fill name='auto_eth' target='eth0'>
|
||||
<param>eth0</param>
|
||||
<param name='condition'>oui</param>
|
||||
<param type='eole' name='parametre'>dhcp</param>
|
||||
</fill>
|
||||
|
||||
déclaration de la fonction de contrôle d'éditabilité::
|
||||
|
||||
<condition name='hidden_if_in' source='dhc'>
|
||||
<param>oui</param>
|
||||
<target type='variable'>eth0</target>
|
||||
</condition>
|
||||
|
||||
pour l'instant sont diposnible auto_eth, auto_netmask, auto_broadcast et auto_network.
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
le fichier de configuration créole
|
||||
==================================
|
||||
|
||||
format xml
|
||||
----------
|
||||
|
||||
Pour plus de documentation sur le format xml du dictionnaire créole,
|
||||
se référer à la documentation l'éditeur xml créole ( *exc*)
|
|
@ -0,0 +1,187 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Auto command
|
||||
.\"
|
||||
.TH Maj-Auto 8 "2016 september" "Maj-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Maj-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from security updates and critical bugfixes, but not the latest improvements.
|
||||
.br
|
||||
To take advantage of feature additions to the current version of the server, use the \fBMaj-Release\fR.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fR command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Auto.8 2.5.2
|
|
@ -0,0 +1,52 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Maj-Release 8 "2015 december" "Maj-Release 2.5.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Release \- Automatic release update for EOLE servers
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Release
|
||||
command allows you to manually initiate the release update to the last stables releases.
|
||||
.br
|
||||
It is not an upgrade to a new version.
|
||||
.br
|
||||
You benefit from the latest improvements for the actual server version as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B Upgrade-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Release.8 2.5.0
|
|
@ -0,0 +1,185 @@
|
|||
.\"
|
||||
.\" Manual page for Query-Auto command
|
||||
.\"
|
||||
.TH Query-Auto 8 "2015 september" "Query-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Query-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Query-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Query-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from the latest improvements , updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Query-Auto.8 2.5.2
|
|
@ -0,0 +1,48 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Upgrade-Auto 8 "2015 december" "Version 2.4.2" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Upgrade-Auto \- EOLE distribution upgrade tool
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Upgrade-Auto
|
||||
command allows you to manually initiate a module upgrade to the lastest stables versions.
|
||||
.br
|
||||
You benefit from the latest improvements as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Upgrade-Auto.8 2.4.2
|
|
@ -0,0 +1,73 @@
|
|||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.4.
|
||||
.TH MAJ-AUTO "1" "October 2014" "Maj-Auto 2.4.1" "User Commands"
|
||||
.SH NAME
|
||||
Maj-Auto \- manual page for Maj-Auto 2.4.1
|
||||
.SH DESCRIPTION
|
||||
usage: Maj\-Auto|Query\-Auto [\-h] [\-c CONTAINER]
|
||||
.IP
|
||||
[\-l {debug,info,warning,error,critical}] [\-v] [\-d]
|
||||
[\-n] [\-f] [\-C | \fB\-D]\fR [\-r] [\-R] [\-\-download]
|
||||
[\-S EOLE_MIRROR] [\-U UBUNTU_MIRROR] [\-W]
|
||||
.PP
|
||||
Manage EOLE server automatic update
|
||||
.SS "optional arguments:"
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using QueryAuto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-C\fR, \fB\-\-candidat\fR
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR, \fB\-\-devel\fR
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.SS "container:"
|
||||
.TP
|
||||
\fB\-c\fR CONTAINER, \fB\-\-container\fR CONTAINER
|
||||
Name of LXC container
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode
|
||||
.SH "SEE ALSO"
|
||||
The full documentation for
|
||||
.B Maj-Auto
|
||||
is maintained as a Texinfo manual. If the
|
||||
.B info
|
||||
and
|
||||
.B Maj-Auto
|
||||
programs are properly installed at your site, the command
|
||||
.IP
|
||||
.B info Maj-Auto
|
||||
.PP
|
||||
should give you access to the complete manual.
|
|
@ -0,0 +1 @@
|
|||
Upgrade-Auto.8
|
|
@ -0,0 +1,200 @@
|
|||
#
|
||||
# NE PAS EDITER CE FICHIER
|
||||
#
|
||||
# Utiliser <appli>.mk à inclure à la fin de Makefile
|
||||
|
||||
#################
|
||||
# Sanity checks #
|
||||
#################
|
||||
|
||||
ifeq (, $(DESTDIR))
|
||||
$(warning $$(DESTDIR) is empty, installation will be done in /)
|
||||
endif
|
||||
|
||||
ifeq (, $(filter-out XXX-XXX, $(strip $(SOURCE))))
|
||||
$(error $$(SOURCE) variable has incorrect value '$(SOURCE)')
|
||||
endif
|
||||
|
||||
#########################
|
||||
# Variables definitions #
|
||||
#########################
|
||||
|
||||
INSTALL := install
|
||||
INSTALL_DATA := install -m 644
|
||||
INSTALL_PROGRAM := install -m 755
|
||||
INSTALL_DIRECTORY := install -m 755 -d
|
||||
INSTALL_RECURSIVE := cp -dr --no-preserve=ownership
|
||||
|
||||
# Base
|
||||
eole_DIR := $(DESTDIR)/usr/share/eole
|
||||
|
||||
ifeq ($(strip $(EOLE_VERSION)), 2.3)
|
||||
diagnose_PROG_DIR := $(eole_DIR)/diagnose/module
|
||||
else
|
||||
diagnose_PROG_DIR := $(eole_DIR)/diagnose/
|
||||
endif
|
||||
|
||||
# Creole
|
||||
creole_DIR := $(eole_DIR)/creole
|
||||
dicos_DATA_DIR := $(creole_DIR)/dicos
|
||||
tmpl_DATA_DIR := $(creole_DIR)/distrib
|
||||
pretemplate_PROG_DIR := $(eole_DIR)/pretemplate
|
||||
posttemplate_PROG_DIR := $(eole_DIR)/posttemplate
|
||||
postservice_PROG_DIR := $(eole_DIR)/postservice
|
||||
firewall_DATA_DIR := $(eole_DIR)/firewall
|
||||
bareos_restore_DATA_DIR := $(eole_DIR)/bareos/restore
|
||||
bareos_fichier_DATA_DIR := $(DESTDIR)/etc/bareos/bareosfichiers.d
|
||||
schedule_scripts_PROG_DIR := $(eole_DIR)/schedule/scripts
|
||||
extra_REC_DIR := $(creole_DIR)/extra
|
||||
|
||||
# Zéphir
|
||||
zephir_DATA_DIR := $(DESTDIR)/usr/share/zephir
|
||||
zephir_configs_DATA_DIR := $(zephir_DATA_DIR)/monitor/configs
|
||||
zephir_srv_DATA_DIR := $(zephir_configs_DATA_DIR)/services
|
||||
|
||||
# SSO
|
||||
sso_DATA_DIR := $(DESTDIR)/usr/share/sso
|
||||
sso_filtres_DATA_DIR := $(sso_DATA_DIR)/app_filters
|
||||
sso_user-info_DATA_DIR := $(sso_DATA_DIR)/user_infos
|
||||
|
||||
# EAD
|
||||
ead_DATA_DIR := $(DESTDIR)/usr/share/ead2/backend/config
|
||||
ead_actions_DATA_DIR := $(ead_DATA_DIR)/actions
|
||||
ead_perms_DATA_DIR := $(ead_DATA_DIR)/perms
|
||||
ead_roles_DATA_DIR := $(ead_DATA_DIR)/roles
|
||||
|
||||
# Program libraries goes under /usr/lib/<PROGRAM>/
|
||||
lib_$(SOURCE)_DATA_DIR := $(DESTDIR)/usr/lib/$(SOURCE)
|
||||
|
||||
# Scripts Eole
|
||||
scripts_PROG_DIR := $(eole_DIR)/sbin
|
||||
lib_eole_DATA_DIR := $(DESTDIR)/usr/lib/eole
|
||||
|
||||
# LDAP
|
||||
ldap_passwords_DATA_DIR := $(eole_DIR)/annuaire/password_files
|
||||
|
||||
# LXC
|
||||
lxc_DATA_DIR := $(eole_DIR)/lxc
|
||||
lxc_fstab_DATA_DIR := $(lxc_DATA_DIR)/fstab
|
||||
lxc_hosts_DATA_DIR := $(lxc_DATA_DIR)/hosts
|
||||
|
||||
# SQL
|
||||
sql_DATA_DIR := $(eole_DIR)/mysql/$(SOURCE)
|
||||
sql_gen_DATA_DIR := $(sql_DATA_DIR)/gen
|
||||
sql_updates_DATA_DIR := $(sql_DATA_DIR)/updates
|
||||
|
||||
sql_conf_gen_DATA_DIR := $(eole_DIR)/applications/gen
|
||||
sql_conf_passwords_DATA_DIR := $(eole_DIR)/applications/passwords
|
||||
sql_conf_updates_DATA_DIR := $(eole_DIR)/applications/updates/$(SOURCE)
|
||||
|
||||
# Certifs
|
||||
certs_DATA_DIR := $(eole_DIR)/certs
|
||||
|
||||
# Logrotate
|
||||
logrotate_DATA_DIR := $(DESTDIR)/etc/logrotate.d
|
||||
|
||||
|
||||
# Python modules
|
||||
ifneq ($(DESTDIR),)
|
||||
PYTHON_OPTS := --root $(DESTDIR)
|
||||
endif
|
||||
|
||||
# Translation
|
||||
TRANSLATION_SRC := translation
|
||||
TRANSLATION_DEST := $(DESTDIR)/usr/share/locale
|
||||
PO_FILES = $(wildcard $(TRANSLATION_SRC)/*/*.po)
|
||||
MO_FOLDERS = $(addprefix $(TRANSLATION_DEST), $(addsuffix LC_MESSAGES,$(subst $(TRANSLATION_SRC),,$(dir $(PO_FILES)))))
|
||||
|
||||
#############################################
|
||||
# Common directories and files installation #
|
||||
#############################################
|
||||
|
||||
all:
|
||||
|
||||
$(MO_FOLDERS):
|
||||
$(INSTALL_DIRECTORY) $@
|
||||
|
||||
$(PO_FILES): $(MO_FOLDERS)
|
||||
msgfmt -o $(TRANSLATION_DEST)$(subst $(TRANSLATION_SRC),,$(addsuffix LC_MESSAGES,$(dir $@)))/$(notdir $(@:.po=.mo)) $@
|
||||
|
||||
install-lang: $(PO_FILES)
|
||||
|
||||
install:: install-dirs install-files install-lang
|
||||
|
||||
# $1 = command to run
|
||||
# $2 = source directory
|
||||
# $3 = destination directory
|
||||
define fc_install_file
|
||||
if [ -d $2 ]; then \
|
||||
for file in `ls -1 $2/`; do \
|
||||
$1 $2/$$file $3 || true; \
|
||||
done; \
|
||||
fi
|
||||
endef
|
||||
|
||||
##
|
||||
## Directory creation
|
||||
##
|
||||
|
||||
# use % to catch local name in $*
|
||||
# data, program and recursive directory require a corresponding
|
||||
# directory in local sources
|
||||
%_DATA_DIR %_PROG_DIR %REC_DIR:
|
||||
test ! -d $(subst _,/,$*) || $(INSTALL_DIRECTORY) $($@)
|
||||
|
||||
# Create the directory referenced by the variable without a local one.
|
||||
%_DIR:
|
||||
$(INSTALL_DIRECTORY) $($@)
|
||||
|
||||
##
|
||||
## Install files present directly under data, program and recursive directories
|
||||
##
|
||||
|
||||
# $* : name of variable
|
||||
# $($*): value of variable
|
||||
%-instdata:
|
||||
$(call fc_install_file, $(INSTALL_DATA), $(subst _,/,$(subst _DATA_DIR,,$*)), $($*))
|
||||
|
||||
%-instprog:
|
||||
$(call fc_install_file, $(INSTALL_PROGRAM), $(subst _,/,$(subst _PROG_DIR,,$*)), $($*))
|
||||
|
||||
%-instrec:
|
||||
$(call fc_install_file, $(INSTALL_RECURSIVE), $(subst _,/,$(subst _REC_DIR,,$*)), $($*))
|
||||
|
||||
|
||||
# Use second expansion as variables may be created in included
|
||||
# Makefiles
|
||||
.SECONDEXPANSION:
|
||||
|
||||
# List of all directories
|
||||
installdirs_LIST = $(foreach V, $(filter %_DIR, $(.VARIABLES)), \
|
||||
$(if $(filter file, $(origin $(V))), \
|
||||
$(V)))
|
||||
# List of data directories
|
||||
installdata_LIST = $(filter %_DATA_DIR, $(installdirs_LIST))
|
||||
# List of program directories
|
||||
installprog_LIST = $(filter %_PROG_DIR, $(installdirs_LIST))
|
||||
# List of recursive directories
|
||||
installrec_LIST = $(filter %_REC_DIR, $(installdirs_LIST))
|
||||
|
||||
# Expand directories to create as dependency
|
||||
# Use double-colon to permit user to define additionnal install-dirs
|
||||
install-dirs:: $$(installdirs_LIST)
|
||||
|
||||
# Expand files to install as dependency
|
||||
# Use double-colon to permit user to define additionnal install-files
|
||||
install-files:: install-data-files install-prog-files install-rec-dirs
|
||||
|
||||
install-data-files: $$(patsubst %,%-instdata,$$(installdata_LIST))
|
||||
|
||||
install-prog-files: $$(patsubst %,%-instprog,$$(installprog_LIST))
|
||||
|
||||
install-rec-dirs: $$(patsubst %,%-instrec,$$(installrec_LIST))
|
||||
|
||||
# Installation of python modules
|
||||
ifeq ($(shell test -f setup.py && echo 0), 0)
|
||||
install-files::
|
||||
python3 setup.py install --no-compile --install-layout=deb $(PYTHON_OPTS)
|
||||
endif
|
||||
|
||||
.PHONY: install install-dirs install-files install-data-files install-prog-files install-rec-dirs
|
|
@ -0,0 +1,64 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<family_action name="Tâches planifiées"
|
||||
description="Gestion des tâches planifiées"
|
||||
color="#8cd98c"
|
||||
image="icons/appointment-new.svg">
|
||||
<action type="form"
|
||||
title="Tâches planifiées"
|
||||
save="True"
|
||||
description="Paramétrer les tâches planifiées (heure, jour)"
|
||||
image="icons/x-office-calendar.svg">
|
||||
<input>Programmer</input>
|
||||
<profile>ead_admin</profile>
|
||||
<ewtapp>ead</ewtapp>
|
||||
<tag>maj</tag>
|
||||
<tag>schedule</tag>
|
||||
</action>
|
||||
</family_action>
|
||||
<variables>
|
||||
<family name="schedule" description="Heure et jour d'exécution des tâches planifiées">
|
||||
<variable description="Heure" name='hour' type='number' auto_save='True'/>
|
||||
<variable description="Minute" name='minute' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches hebdomadaires (1 : lundi)" name='weekday' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches mensuelles la première semaine du mois (1 : lundi)" name='monthday' type='number' auto_save='True'/>
|
||||
</family>
|
||||
</variables>
|
||||
<constraints>
|
||||
<check name='valid_enum' target='schedule.schedule.weekday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.monthday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.hour'>
|
||||
<param>[1, 2, 3, 4, 5]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.minute'>
|
||||
<param type='python'>range(0, 60)</param>
|
||||
</check>
|
||||
<fill name="random_int" target='schedule.schedule.hour'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>5</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.minute'>
|
||||
<param type='number'>0</param>
|
||||
<param type='number'>59</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.weekday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.monthday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
<param name='exclude' type='eole'>schedule.schedule.weekday</param>
|
||||
</fill>
|
||||
<check name='valid_differ' target='schedule.schedule.monthday'>
|
||||
<param type='eole'>schedule.schedule.weekday</param>
|
||||
</check>
|
||||
</constraints>
|
||||
<help/>
|
||||
</creole>
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='majauto'
|
||||
description="Mise à jour automatique">
|
||||
<variable name="description" type="string" hidden="True"><value>Mise à jour du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>weekly</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='z_rebootauto'
|
||||
description="Redémarrage automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Redémarrage du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="z_shutdownauto"
|
||||
description="Arrêt automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Arrêt du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="y_reconfigureauto"
|
||||
description="Reconfigure automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Reconfigure du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -0,0 +1,3 @@
|
|||
cron:
|
||||
eole.file:
|
||||
- name: /etc/cron.d/schedule
|
|
@ -0,0 +1,3 @@
|
|||
include:
|
||||
- schedule.cron
|
||||
- schedule.manage
|
|
@ -0,0 +1,3 @@
|
|||
schedule:
|
||||
cmd.run:
|
||||
- name: /usr/share/eole/sbin/manage_schedule --apply
|
|
@ -0,0 +1,109 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleGet.
|
||||
.\"
|
||||
.TH CreoleGet 8 "04 Avril 2017" "Version 2.6.1" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleGet \- Récupération de la valeur d'une variable Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B VARIABLE
|
||||
[
|
||||
.B DEFAULT
|
||||
]
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --groups
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --list
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload-eol
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
.B \-h
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleGet
|
||||
est un utilitaire très pratique pour récupérer la valeur d'une
|
||||
variable Creole
|
||||
|
||||
.SH ARGUMENTS
|
||||
.TP
|
||||
\fBVARIABLE\fP
|
||||
nom de la variable à lire
|
||||
.TP
|
||||
\fBDEFAULT\fP
|
||||
valeur à renvoyer en cas d'erreur (variable inconnue ou désactivée)
|
||||
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-d\fP
|
||||
active le mode de débogage
|
||||
.TP
|
||||
\fB-l\fP
|
||||
paramètrage du niveau de log (debug|info|warning|error|critical)
|
||||
.TP
|
||||
\fB-v\fP
|
||||
active le mode verbeux
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH ACTIONS
|
||||
.TP
|
||||
\fB--groups\fP
|
||||
liste les groupes de conteneurs
|
||||
|
||||
.TP
|
||||
\fB--list\fP
|
||||
liste l'ensemble des variables creole
|
||||
|
||||
.TP
|
||||
\fB--reload\fP
|
||||
recharge toute la configuration creole (dictionnaires et valeurs)
|
||||
|
||||
.TP
|
||||
\fB--reload-eol\fP
|
||||
recharge les valeurs de configuration creole
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleLint.
|
||||
.\"
|
||||
.TH CreoleLint 8 "11 octobre 2013" "Version 2.4" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleLint \- Outil de validation des dictionnaires et templates Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleLint
|
||||
[
|
||||
.B \-t TMPL DIR
|
||||
] [
|
||||
.B \-l info|warning|error
|
||||
] [
|
||||
.B \-n LINT_NAME
|
||||
] [
|
||||
.B \-d
|
||||
] [
|
||||
.B \-h
|
||||
]
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleLint
|
||||
est un utilitaire très pratique pour valider la syntaxe du dictionnaire et des templates. L'outil effectue une série de tests dans le but de détecter les erreurs les plus fréquentes.
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-t\fP
|
||||
répertoire des templates
|
||||
.TP
|
||||
\fB-l\fP
|
||||
niveau des messages (info, warning ou error)
|
||||
.TP
|
||||
\fB-n\fP
|
||||
n'exécuter qu'un lint
|
||||
.TP
|
||||
\fB-d\fP
|
||||
dico-only, ne lance le lint que sur les dictionnaires (et pas sur les templates, donc)
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
.\" Maj-Cd.8 1.0
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue