first cadoles' version
This commit is contained in:
parent
841643e76e
commit
7ecf861459
|
@ -2,3 +2,4 @@
|
|||
*~
|
||||
*#
|
||||
*.swp
|
||||
__pycache__
|
||||
|
|
153
bin/CreoleCat
153
bin/CreoleCat
|
@ -1,153 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Run templatisation on a template name or file
|
||||
|
||||
`CreoleCat` support two modes:
|
||||
|
||||
- run on a template name with option -t: the name is looked up in
|
||||
``/usr/share/eole/creole/distrib/``. The output files are
|
||||
calculated unless you explicitely specify ``-o``.
|
||||
|
||||
- run on a file with options -s: this mode requires the use of
|
||||
``-o`` option.
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from os.path import basename, join, split
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
|
||||
from creole.template import CreoleTemplateEngine
|
||||
import creole.config as cfg
|
||||
from creole.client import CreoleClient, CreoleClientError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Instancie un template creole",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging()])
|
||||
parser.add_argument("-t", "--template", metavar="NAME",
|
||||
help=u"nom du fichier template creole présent "
|
||||
"dans /usr/share/eole/creole/distrib")
|
||||
parser.add_argument("-s", "--source", metavar="PATH",
|
||||
help=u"chemin d’un fichier template")
|
||||
parser.add_argument("-o", "--output", metavar="OUTPUTFILE",
|
||||
help=u"chemin du fichier généré")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (opts.template is None and opts.source is None) \
|
||||
or (opts.template and opts.source):
|
||||
parser.error("Vous devez spécifier une des options"
|
||||
"'--template' ou '--source'.")
|
||||
|
||||
if opts.source is not None and not os.access(opts.source, os.F_OK):
|
||||
parser.error("Fichier source inexistant"
|
||||
" ou illisible: {0}".format(opts.source))
|
||||
|
||||
if opts.output is None:
|
||||
if opts.source is not None:
|
||||
opts.output = ""
|
||||
else:
|
||||
if opts.template is not None \
|
||||
and opts.output == join(cfg.distrib_dir, opts.template):
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier template: {0}".format(opts.output) )
|
||||
if opts.source is not None and opts.output == opts.source:
|
||||
parser.error("Le fichier de sortie ne peut écraser"
|
||||
" le fichier source: {0}".format(opts.output) )
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def _find_file(name, ctx):
|
||||
candidates = client.to_grouped_lists(ctx['files'], keyname='source')
|
||||
for source, filevar in candidates.items():
|
||||
if name != basename(source):
|
||||
continue
|
||||
elif filevar[0].get('activate', False):
|
||||
return filevar[0]
|
||||
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
options = parse_cmdline()
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
|
||||
engine = CreoleTemplateEngine()
|
||||
|
||||
filevar = { 'source': options.source,
|
||||
'name': options.output,
|
||||
'full_name': options.output,
|
||||
'activate' : True,
|
||||
'del_comment': u'',
|
||||
'mkdir' : False,
|
||||
'rm' : False,
|
||||
}
|
||||
|
||||
if options.container is not None:
|
||||
# force container context
|
||||
groups = [client.get_container_infos(options.container)]
|
||||
elif options.output is not None:
|
||||
# Source without container, for root context
|
||||
groups = [client.get_container_infos('root')]
|
||||
else:
|
||||
groups = []
|
||||
for group in client.get_groups():
|
||||
groups.append(client.get_group_infos(group))
|
||||
|
||||
instanciated_files = []
|
||||
for group in groups:
|
||||
if filevar['source'] is not None:
|
||||
instanciated_files.append(filevar)
|
||||
engine.process(filevar, group)
|
||||
elif options.template is not None:
|
||||
found_file = _find_file(options.template, group)
|
||||
if found_file:
|
||||
instanciated_files.append(found_file)
|
||||
if options.output is None:
|
||||
engine._instance_file(found_file, group)
|
||||
else:
|
||||
# Override output
|
||||
found_file['name'] = options.output
|
||||
found_file['full_name'] = options.output
|
||||
# Do not get through verify and
|
||||
# change_properties
|
||||
engine._copy_to_template_dir(found_file)
|
||||
engine.process(found_file, group)
|
||||
|
||||
if not instanciated_files:
|
||||
# No file get instanciated
|
||||
raise CreoleClientError("Fichier template inexistant:"
|
||||
" {0}".format(options.template))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
130
bin/CreoleGet
130
bin/CreoleGet
|
@ -1,130 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Get a creole variable value.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from creole.client import CreoleClient
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.encode import normalize
|
||||
|
||||
_RETURN_VALUES = u"""Multiple values are separated with NEWLINE character '\\n',
|
||||
or SPACE character if several variables are displayed."""
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Get creole variable",
|
||||
epilog=_RETURN_VALUES,
|
||||
parents=[scriptargs.logging()])
|
||||
|
||||
parser.add_argument('variable', nargs='?',
|
||||
help=u"Nom de variable creole")
|
||||
parser.add_argument('default', nargs='?',
|
||||
help=u"Valeur par défaut si la variable n’existe pas")
|
||||
|
||||
incompatible_options = parser.add_mutually_exclusive_group()
|
||||
|
||||
incompatible_options.add_argument('--groups', action="store_true", default=False,
|
||||
help=u"Liste les groupes de conteneurs")
|
||||
|
||||
incompatible_options.add_argument('--list', action="store_true", default=False,
|
||||
help=u"Liste l'ensemble des variables creole")
|
||||
|
||||
incompatible_options.add_argument('--reload', action="store_true", default=False,
|
||||
help=u"Recharge toute la configuration creole")
|
||||
|
||||
incompatible_options.add_argument('--reload-eol', action="store_true", default=False,
|
||||
help=u"Recharge les valeurs de configuration creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
def output(value, strip_master=False):
|
||||
"""
|
||||
formatage de l'affichage
|
||||
"""
|
||||
if isinstance(value, list):
|
||||
#FIXME: ['val1', None, 'val2']
|
||||
for val in value:
|
||||
if isinstance(val, dict):
|
||||
sys.stderr.write(u'{}\n'.format(val['err']))
|
||||
else:
|
||||
sys.stdout.write(u'{}\n'.format(val))
|
||||
elif isinstance(value, dict):
|
||||
# in case several keys/values are returned
|
||||
list_keys = value.keys()
|
||||
list_keys.sort()
|
||||
for var in list_keys:
|
||||
values = value[var]
|
||||
if isinstance(values, list):
|
||||
values_ = u''
|
||||
for val in values:
|
||||
if val and not isinstance(val, dict):
|
||||
values_ += u" {}".format(val)
|
||||
values = values_
|
||||
elif values is None:
|
||||
values = u''
|
||||
else:
|
||||
values = u'{}'.format(values)
|
||||
if strip_master:
|
||||
varname = var.split('.')[-1]
|
||||
else:
|
||||
varname = var
|
||||
sys.stdout.write(u'{}="{}"\n'.format(varname, values.strip()))
|
||||
elif value is None or value == u'':
|
||||
sys.stdout.write(u'\n')
|
||||
else:
|
||||
sys.stdout.write(u'{0}\n'.format(value))
|
||||
#return ret.rstrip('\n')
|
||||
|
||||
def main():
|
||||
"""Setup environnment and run templatisation.
|
||||
"""
|
||||
|
||||
try:
|
||||
log = init_logging(level=options.log_level)
|
||||
client = CreoleClient()
|
||||
var = options.variable
|
||||
if options.groups:
|
||||
output(client.get_groups())
|
||||
elif options.list:
|
||||
output(client.get_creole(), True)
|
||||
elif options.reload:
|
||||
client.reload_config()
|
||||
elif options.reload_eol:
|
||||
client.reload_eol()
|
||||
elif not var:
|
||||
raise Exception(u"Veuillez spécifier un nom de variable Creole")
|
||||
else:
|
||||
if options.default is not None:
|
||||
kwargs = {'default':options.default}
|
||||
else:
|
||||
kwargs = {}
|
||||
if '.' in var:
|
||||
output(client.get(var))
|
||||
else:
|
||||
output(client.get_creole(var, **kwargs))
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(normalize(err), exc_info=True)
|
||||
else:
|
||||
log.error(normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
#Fix #18701
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('UTF8')
|
||||
main()
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
#! /usr/bin/python
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from creole.lint.creolelint import validate
|
||||
from creole.lint.ansiwriter import AnsiWriter
|
||||
|
||||
def parse_cmdline():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option("-t", "--template", dest="tmpl",
|
||||
default=None, help="nom du template Creole")
|
||||
parser.add_option("-l", "--level", dest="writelevel", default='warning',
|
||||
help="level d'affichage des messages")
|
||||
parser.add_option("-n", "--name", dest="name",
|
||||
default=None, help="nom du lint a tester")
|
||||
parser.add_option("-d", "--dico-only", action="store_true",
|
||||
dest="only_on_dico",
|
||||
default=False, help="lint uniquement sur les dicos")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
options, args = parse_cmdline()
|
||||
tmpl = options.tmpl
|
||||
writelevel = options.writelevel
|
||||
|
||||
#if options.writelevel not in errorlevel.values():
|
||||
# raise Exception('Niveau %s inconnu'% options.writelevel)
|
||||
only_on_template = False
|
||||
only_on_dico = options.only_on_dico
|
||||
if tmpl is not None:
|
||||
only_on_template = True
|
||||
if options.name:
|
||||
keywords = [options.name]
|
||||
writelevel = 'info'
|
||||
else:
|
||||
keywords = []
|
||||
if not only_on_template:
|
||||
# keywords.extend(['orphans_def',
|
||||
# 'orphans_set', 'orphans_for', 'orphans_tmpl_files',
|
||||
# 'define', 'syntax_for', 'syntax_var', 'syntax_var2',
|
||||
# 'syntax_function', 'valid_client_option'])
|
||||
keywords.extend(['valid_dtd', 'wrong_dicos_name',
|
||||
'tabs_in_dicos', 'hidden_if_in_dicos',
|
||||
'condition_without_target',
|
||||
'obligatoire_in_dicos',
|
||||
'valid_slave_value',
|
||||
'valid_var_label', 'valid_separator_label',
|
||||
'valid_help_label',
|
||||
'activation_var_without_help',
|
||||
'family_without_help',
|
||||
'family_without_icon',
|
||||
'old_fw_file'])
|
||||
if not only_on_dico:
|
||||
keywords.extend(['valid_parse_tmpl'])
|
||||
keywords.append('builtins')
|
||||
ansi = AnsiWriter(writelevel)
|
||||
try:
|
||||
for keyword in keywords:
|
||||
validate(keyword, ansi, tmpl)
|
||||
except Exception, err:
|
||||
from traceback import print_exc
|
||||
print_exc()
|
||||
#print u"Erreur : {0}".format(err)
|
||||
sys.exit(1)
|
||||
|
||||
main()
|
|
@ -1,16 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from sys import argv
|
||||
from os import getppid
|
||||
from importlib import import_module
|
||||
from pyeole.command_line import ArgumentParser
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
allowed_functions = ('acquire', 'release', 'is_locked')
|
||||
module = import_module('pyeole.lock')
|
||||
module.PID = getppid()
|
||||
arguments = ArgumentParser(module, allowed_functions, argv[0])
|
||||
arguments.parse_args(argv[1:])
|
||||
arguments.trigger_callback()
|
|
@ -1,54 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# exécute une commande dans un conteneur
|
||||
|
||||
SSHCMD="ssh -q -o LogLevel=ERROR -o StrictHostKeyChecking=no"
|
||||
|
||||
commande=$1
|
||||
container=$2
|
||||
# ne lancer la commande que si dans un conteneur (ssh)
|
||||
onlyifcontainer=$3
|
||||
silent=$4
|
||||
CMD='eval'
|
||||
|
||||
ExecContainer()
|
||||
{
|
||||
ip="$1"
|
||||
cmd="$2"
|
||||
tcpcheck 2 $ip:22 &>/dev/null || return 1
|
||||
$SSHCMD root@$ip "$cmd"
|
||||
}
|
||||
|
||||
if [[ ${container} == "all" ]]
|
||||
then
|
||||
if [[ $(CreoleGet mode_conteneur_actif) == "oui" ]]
|
||||
then
|
||||
for grp in $(CreoleGet --groups)
|
||||
do
|
||||
if [[ ${grp} != 'root' ]] && [[ ${grp} != 'all' ]]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_${grp}")
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo "Exécution de la commande [${commande}] dans le conteneur ${grp}"
|
||||
echo
|
||||
fi
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
if [ ! "$silent" = "yes" ]; then
|
||||
echo
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
else
|
||||
if [ -n "$container" ]
|
||||
then
|
||||
container_ip=$(CreoleGet "container_ip_$container")
|
||||
fi
|
||||
if [ -n "$container_ip" ] && [ ! "$container_ip" = "127.0.0.1" ]
|
||||
then
|
||||
ExecContainer "$container_ip" "$commande"
|
||||
elif [ "$onlyifcontainer" != "yes" ]
|
||||
then
|
||||
eval "$commande"
|
||||
fi
|
||||
fi
|
|
@ -1,71 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.service import manage_services
|
||||
from creole.reconfigure import services
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
def parse_cmdline():
|
||||
|
||||
service_actions=['apply', 'configure', 'enable', 'disable', 'status',
|
||||
'start', 'stop', 'restart', 'reload']
|
||||
|
||||
parser = argparse.ArgumentParser(description="Action sur les services",
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging('info')])
|
||||
parser.add_argument('service', help="Nom du service")
|
||||
parser.add_argument('action', choices=service_actions,
|
||||
help="Action à effectuer")
|
||||
parser.add_argument("-f", "--force", action="store_true", default=False,
|
||||
help="Ne pas valider l'état de service")
|
||||
parser.add_argument("-s", "--silent", action="store_true", default=False,
|
||||
help="Ne pas affichier sur la console")
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
if opts.silent:
|
||||
opts.log_level = 'error'
|
||||
|
||||
|
||||
return opts
|
||||
|
||||
def main():
|
||||
options = parse_cmdline()
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
display = 'console'
|
||||
if options.silent:
|
||||
display = 'log'
|
||||
if options.service == 'all':
|
||||
if options.action == 'restart':
|
||||
services('stop', display_title=False, try_restart_lxc=False)
|
||||
services('start', display_title=False, try_restart_lxc=False)
|
||||
else:
|
||||
services(options.action, display_title=False, try_restart_lxc=False)
|
||||
ret = True
|
||||
else:
|
||||
ret = manage_services(options.action, options.service,
|
||||
container=options.container, force=options.force,
|
||||
display=display)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
from sys import exit
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.ansiprint import print_red
|
||||
from pyeole.log import init_logging
|
||||
from creole.var_loader import convert_value
|
||||
from creole.loader import creole_loader, config_save_values
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
parser = argparse.ArgumentParser(description=u"Set Creole variable",
|
||||
parents=[scriptargs.logging()])
|
||||
parser.add_argument("--default", action="store_true", default=False,
|
||||
help=u"remettre à la valeur par défaut")
|
||||
parser.add_argument('variable', nargs=1,
|
||||
help=u"Nom de variable Creole")
|
||||
parser.add_argument('value', nargs='?',
|
||||
help=u"Valeur de la variable Creole")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
# 'info' is outputed to stdout
|
||||
options.log_level = u'warning'
|
||||
if options.debug:
|
||||
options.log_level = u'debug'
|
||||
|
||||
if options.default and options.value:
|
||||
print_red("En cas de remise à la valeur par défaut, il ne faut pas spécifier de valeur")
|
||||
exit(1)
|
||||
|
||||
if not options.default and options.value is None:
|
||||
print_red("Veuiller spécifier la valeur")
|
||||
exit(1)
|
||||
|
||||
def main():
|
||||
log = init_logging(level=options.log_level)
|
||||
try:
|
||||
config = creole_loader(rw=True, owner='creoleset', load_extra=True)
|
||||
var = options.variable[0]
|
||||
if '.' in var:
|
||||
if var.startswith('.'):
|
||||
var = var[1:]
|
||||
namespace = var.split('.')[0]
|
||||
else:
|
||||
namespace = 'creole'
|
||||
var = config.find_first(byname=var, type_='path',
|
||||
force_permissive=True)
|
||||
if options.default:
|
||||
homeconfig, name = config.cfgimpl_get_home_by_path(var)
|
||||
homeconfig.__delattr__(name)
|
||||
else:
|
||||
option = config.unwrap_from_path(var)
|
||||
value = options.value
|
||||
if option.impl_is_multi():
|
||||
values = []
|
||||
for val in value.split('\n'):
|
||||
values.append(convert_value(option, val))
|
||||
value = values
|
||||
else:
|
||||
value = convert_value(option, value)
|
||||
setattr(config, var, value)
|
||||
config_save_values(config, namespace)
|
||||
except PropertiesOptionError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red(u"Erreur de propriété : {0}".format(err))
|
||||
exit(1)
|
||||
except ValueError, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Valeur invalide : {0}".format(err))
|
||||
exit(1)
|
||||
except AttributeError:
|
||||
if options.debug:
|
||||
log.debug("AttributeError", exc_info=True)
|
||||
print_red("Nom de variable inconnue : {0}".format(options.variable[0]))
|
||||
exit(1)
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
print_red("Erreur inconnue : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
454
bin/Maj-Auto
454
bin/Maj-Auto
|
@ -1,454 +0,0 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import atexit
|
||||
import time
|
||||
import locale
|
||||
|
||||
from os import unlink, environ, system
|
||||
from subprocess import Popen, PIPE
|
||||
from os.path import basename, isfile
|
||||
|
||||
from creole import reconfigure, fonctionseole
|
||||
from creole.client import CreoleClient, TimeoutCreoleClientError, NotFoundError, CreoleClientError
|
||||
from creole.error import UserExit, UserExitError
|
||||
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
|
||||
from pyeole.lock import acquire, release, is_locked
|
||||
from pyeole.log import init_logging, set_formatter
|
||||
from pyeole.ihm import question_ouinon, only_root, catch_signal
|
||||
from pyeole.encode import normalize
|
||||
|
||||
from pyeole.pkg import EolePkg, _configure_sources_mirror, report
|
||||
|
||||
from pyeole.diagnose import test_tcp
|
||||
from pyeole import scriptargs
|
||||
|
||||
from pyeole.i18n import i18n
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
#import logging
|
||||
|
||||
log = None
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# FIXME : refactorer le système de lock de zephir-client (ref #6660)
|
||||
from zephir.lib_zephir import lock, unlock
|
||||
zephir_libs = True
|
||||
except Exception:
|
||||
zephir_libs = False
|
||||
|
||||
def release_lock():
|
||||
if zephir_libs:
|
||||
unlock('maj')
|
||||
if is_locked('majauto', level='system'):
|
||||
release('majauto', level='system')
|
||||
|
||||
def user_exit(*args, **kwargs):
|
||||
"""
|
||||
sortie utilisateur "propre"
|
||||
"""
|
||||
log.warn(_(u'! Abandoning configuration !'))
|
||||
log.warn(_(u'System may be in an incoherent state.\n\n'))
|
||||
raise UserExitError()
|
||||
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(prog='Maj-Auto|Query-Auto',
|
||||
description=_(u"Manage EOLE server automatic update"),
|
||||
parents=[scriptargs.logging('info')],
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
action='store_true',
|
||||
help=_(u"run in dry-run mode (force to True when using Query-Auto)."))
|
||||
parser.add_argument('-f', '--force',
|
||||
action='store_true',
|
||||
help=_(u"bypass Zephir authorizations."))
|
||||
parser.add_argument('-F', '--force-update',
|
||||
action='store_true',
|
||||
help=_(u"update your server without any confirmation."))
|
||||
|
||||
parser.add_argument('-s', '--simulate',
|
||||
action='store_true',
|
||||
help=_(u"ask apt-get to simulate packages installation"))
|
||||
|
||||
# Level of upgrade
|
||||
maj_level = parser.add_mutually_exclusive_group()
|
||||
maj_level.add_argument('-C', '--candidat', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use testing packages."))
|
||||
maj_level.add_argument('-D', '--devel', default=False,
|
||||
action='store', nargs='*',
|
||||
choices=['eole', 'envole'],
|
||||
help=_(u"use development packages."))
|
||||
|
||||
parser.add_argument('--release',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
# Action when upgrade is OK
|
||||
parser.add_argument('-r', '--reconfigure',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade."))
|
||||
|
||||
parser.add_argument('-R', '--reboot',
|
||||
action='store_true',
|
||||
help=_(u"run reconfigure on successful upgrade and reboot if necessary (implies -r)."))
|
||||
parser.add_argument('--download', action='store_true',
|
||||
help=_(u'only download packages in cache.'))
|
||||
# Mirror selection
|
||||
parser.add_argument('-S', '--eole-mirror',
|
||||
help=_(u"EOLE repository server."))
|
||||
parser.add_argument('-U', '--ubuntu-mirror',
|
||||
help=_(u"Ubuntu repository server."))
|
||||
parser.add_argument('-V', '--envole-mirror',
|
||||
help=_(u"Envole repository server."))
|
||||
parser.add_argument('-c', '--cdrom', action="store_true",
|
||||
help=_(u"use CDROM as source."))
|
||||
|
||||
# sortie EAD
|
||||
parser.add_argument('-W', action='store_true',
|
||||
help=_(u"specific output for EAD."))
|
||||
# mode sans creoled
|
||||
parser.add_argument('-i', '--ignore', action='store_true',
|
||||
help=_(u"ignore local configuration if creoled not responding."))
|
||||
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if getattr(opts, 'level', None) is None:
|
||||
opts.level = u'updates'
|
||||
if opts.verbose:
|
||||
opts.log_level = 'info'
|
||||
if opts.debug:
|
||||
opts.log_level = 'debug'
|
||||
|
||||
if opts.reboot:
|
||||
opts.reconfigure = True
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
global log
|
||||
opts = parse_cmdline()
|
||||
if opts.W:
|
||||
# variable set for pyeole.ansiprint
|
||||
environ['ModeTxt'] = 'yes'
|
||||
reporting = not (opts.dry_run or opts.simulate or opts.download)
|
||||
if not reporting:
|
||||
z_proc = 'QUERY-MAJ'
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level)
|
||||
else:
|
||||
z_proc = 'MAJ'
|
||||
report_file = '/var/lib/eole/reports/rapport-maj.log'
|
||||
if isfile(report_file):
|
||||
unlink(report_file)
|
||||
log = init_logging(name=basename(sys.argv[0]), level=opts.log_level,
|
||||
filename=report_file)
|
||||
pkg_log = init_logging(name='pyeole.pkg', level=opts.log_level,
|
||||
filename=report_file)
|
||||
diag_log = init_logging(name='pyeole.diagnose', level=opts.log_level,
|
||||
filename=report_file)
|
||||
set_formatter(log, u'file', u'brief')
|
||||
set_formatter(log, u'file', u'with-levelname-date')
|
||||
set_formatter(pkg_log, u'file', u'with-levelname-date')
|
||||
set_formatter(diag_log, u'file', u'with-levelname-date')
|
||||
report(2)
|
||||
locale.setlocale(locale.LC_TIME, "fr_FR.utf8")
|
||||
log.info(_(u'Update at {0}').format(time.strftime("%A %d %B %Y %H:%M:%S")))
|
||||
raised_err = None
|
||||
error_msg = None
|
||||
try:
|
||||
# gestion du ctrl+c
|
||||
catch_signal(user_exit)
|
||||
acquire('majauto', level='system')
|
||||
atexit.register(release_lock)
|
||||
client = CreoleClient()
|
||||
eole_level = 'stable'
|
||||
envole_level = 'stable'
|
||||
try:
|
||||
version = client.get_creole('eole_release')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
version = EOLE_RELEASE
|
||||
else:
|
||||
raise err
|
||||
if opts.candidat is not False:
|
||||
z_level = " en candidate"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.candidat) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde va en candidat
|
||||
eole_level = 'proposed'
|
||||
envole_level = 'proposed'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.candidat:
|
||||
eole_level = 'proposed'
|
||||
if 'envole' in opts.candidat:
|
||||
envole_level = 'proposed'
|
||||
elif opts.devel is not False:
|
||||
z_level = " en devel"
|
||||
# Gestion du niveau par dépôt (16110)
|
||||
if len(opts.devel) == 0:
|
||||
# Si on ne précise aucun dépôt tout le monde vas en candidat
|
||||
eole_level = 'unstable'
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
# Sinon on vérifie dépôt par dépôt, les dépôts non précisés restent en stable
|
||||
if 'eole' in opts.devel:
|
||||
eole_level = 'unstable'
|
||||
if 'envole' in opts.devel:
|
||||
envole_level = 'unstable'
|
||||
else:
|
||||
z_level = ""
|
||||
if opts.release:
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
new_release = opts.release.split('.')
|
||||
if len(new_release) != 3 or \
|
||||
u'.'.join(new_release[0:2]) != EOLE_VERSION or \
|
||||
int(new_release[2]) not in range(current_release+1, int(LAST_RELEASE) + 1):
|
||||
raise Exception(_('Unknown release number'))
|
||||
z_level += " en {0}".format(opts.release)
|
||||
version = opts.release
|
||||
if opts.cdrom:
|
||||
z_level += " via le CDROM"
|
||||
#distro = 'stable'
|
||||
fonctionseole.zephir("INIT", "Début{0}".format(z_level), z_proc)
|
||||
if zephir_libs and not fonctionseole.init_proc('MAJ'):
|
||||
if opts.force:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour forcée par l'utilisateur",
|
||||
z_proc)
|
||||
else:
|
||||
log.warn(_(u"Update is locked, please contact Zéphir administrator"))
|
||||
log.warn(_(u"Use -f option if you want to force execution"))
|
||||
raise UserExitError()
|
||||
lock('maj')
|
||||
PKGMGR = EolePkg('apt', ignore=opts.ignore)
|
||||
if opts.dry_run:
|
||||
PKGMGR.set_option('APT::Get::Simulate', 'true')
|
||||
|
||||
try:
|
||||
module = client.get_creole('eole_module')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
module = 'module'
|
||||
else:
|
||||
raise err
|
||||
try:
|
||||
uai = client.get_creole('numero_etab')
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
if opts.ignore:
|
||||
uai = None
|
||||
else:
|
||||
raise err
|
||||
|
||||
head = "*** {0} {1}"
|
||||
if uai:
|
||||
head += " ({2})"
|
||||
head += " ***\n"
|
||||
|
||||
log.info(head.format(module, version, uai))
|
||||
|
||||
if not opts.force_update:
|
||||
raising_level = u''
|
||||
if opts.release:
|
||||
raising_level = _(u"(CHANGE RELEASE LEVEL)")
|
||||
elif u'unstable' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(UNSTABLE VERSION)")
|
||||
elif u'proposed' in [eole_level, envole_level]:
|
||||
raising_level = _(u"(TESTING VERSION)")
|
||||
|
||||
if raising_level != u'':
|
||||
log.warn(_(u"{0} - Raising update level may prevent "
|
||||
u"lowering back to stable version.").format(raising_level))
|
||||
try:
|
||||
assert question_ouinon(_(u"Do you wish to proceed?")) == 'oui'
|
||||
fonctionseole.zephir("MSG",
|
||||
"Mise à jour{0} forcée par l'utilisateur".format(z_level),
|
||||
z_proc)
|
||||
except (AssertionError, EOFError) as err:
|
||||
log.warn(_(u"Cancelling!"))
|
||||
raise UserExit()
|
||||
|
||||
PKGMGR.check()
|
||||
|
||||
#serveurs à utiliser pour les dépôts Ubuntu et EOLE
|
||||
_configure_sources_mirror(PKGMGR.pkgmgr, ubuntu=opts.ubuntu_mirror,
|
||||
eole=opts.eole_mirror, envole=opts.envole_mirror,
|
||||
ignore=opts.ignore, cdrom=opts.cdrom,
|
||||
release=version, eole_level=eole_level,
|
||||
envole_level=envole_level)
|
||||
|
||||
|
||||
PKGMGR.update(silent=True)
|
||||
upgrades = PKGMGR.get_upgradable_list()
|
||||
|
||||
install = 0
|
||||
upgrade = 0
|
||||
delete = 0
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
if isInstalled:
|
||||
if candidateVersion is None:
|
||||
delete += 1
|
||||
else:
|
||||
upgrade += 1
|
||||
else:
|
||||
install += 1
|
||||
|
||||
total_pkg = install+upgrade
|
||||
|
||||
headers = []
|
||||
if total_pkg == 0:
|
||||
log.info(_(u"Update successful."))
|
||||
log.info(_(u"Nothing to install."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"Aucun paquet à installer{0}".format(z_level),
|
||||
z_proc)
|
||||
if reporting:
|
||||
report(3)
|
||||
sys.exit(0)
|
||||
|
||||
headers.append(_(u"{0} new,", u"{0} news,", install).format(install))
|
||||
headers.append(_(u"{0} upgrade,", u"{0} upgrades,", upgrade).format(upgrade))
|
||||
headers.append(_(u"{0} delete", u"{0} deletes", delete).format(delete))
|
||||
log.info(' '.join(headers))
|
||||
|
||||
for line in PKGMGR.list_upgrade(upgrades=upgrades):
|
||||
log.info(line)
|
||||
|
||||
if opts.dry_run:
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets à mettre à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
sys.exit(0)
|
||||
|
||||
if opts.download:
|
||||
for container, packages in upgrades.items():
|
||||
if not packages:
|
||||
continue
|
||||
pkgs = []
|
||||
for name, isInstalled, candidateVersion in packages:
|
||||
pkgs.append(name)
|
||||
PKGMGR.fetch_archives(container=container, packages=pkgs)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets téléchargés{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
elif opts.simulate:
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour (simulation){1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
|
||||
else:
|
||||
PKGMGR.download_upgrade()
|
||||
PKGMGR.dist_upgrade(simulate=opts.simulate)
|
||||
log.info(_(u"Update successful."))
|
||||
fonctionseole.zephir("FIN",
|
||||
"{0} paquets mis à jour{1}".format(total_pkg, z_level),
|
||||
z_proc)
|
||||
if opts.release:
|
||||
ret_code = system('/usr/share/zephir/scripts/upgrade_distrib.py --auto')
|
||||
if ret_code != 0:
|
||||
error_msg = str('erreur à la mise à jour vers la release {0}'.format(opts.release))
|
||||
else:
|
||||
log.info(_('Upgrade post Maj-Release, please wait'))
|
||||
release('majauto', level='system')
|
||||
cmd = ['/usr/bin/Maj-Auto', '-F']
|
||||
process = Popen(cmd, stdin=PIPE, stderr=PIPE, stdout=PIPE, shell=False)
|
||||
ret_code = process.wait()
|
||||
if ret_code != 0:
|
||||
error_msg = str(_('error in post maj release'))
|
||||
if opts.reconfigure:
|
||||
# rechargement des modules python (#7832)
|
||||
# cf. http://code.activestate.com/recipes/81731-reloading-all-modules/
|
||||
if globals().has_key('init_modules'):
|
||||
for m in [x for x in sys.modules.keys() if x not in init_modules]:
|
||||
del(sys.modules[m])
|
||||
else:
|
||||
init_modules = sys.modules.keys()
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration automatique",
|
||||
z_proc)
|
||||
elif not opts.release:
|
||||
log.warn(_(u"At least one packages has been updated,"
|
||||
u" use command [reconfigure] to apply modifications."))
|
||||
fonctionseole.zephir("MSG",
|
||||
"Reconfiguration du serveur à planifier",
|
||||
z_proc)
|
||||
|
||||
except (UserExit, UserExitError) as err:
|
||||
if reporting:
|
||||
report(1, 'Stopped by user')
|
||||
fonctionseole.zephir("FIN", "Abandon par l'utilisateur", z_proc)
|
||||
sys.exit(1)
|
||||
|
||||
except (TimeoutCreoleClientError, NotFoundError, CreoleClientError) as err:
|
||||
clue = _(". If restarting creoled service does not help, try {} command with '-i' option.")
|
||||
error_msg = str(err) + clue.format('Query-Auto' if opts.dry_run else 'Maj-Auto')
|
||||
raised_err = err
|
||||
|
||||
except Exception as err:
|
||||
error_msg = str(err)
|
||||
raised_err = err
|
||||
else:
|
||||
if reporting:
|
||||
report(0, reconf=opts.reconfigure)
|
||||
|
||||
if error_msg is not None:
|
||||
fonctionseole.zephir("ERR", error_msg, z_proc, console=False)
|
||||
if reporting:
|
||||
if raised_err is not None:
|
||||
report(1, normalize(err))
|
||||
else:
|
||||
report(1, error_msg)
|
||||
if log is None:
|
||||
# No logger defined, error in argument parsing
|
||||
raise
|
||||
if opts.log_level == 'debug' and raised_err is not None:
|
||||
log.error(err, exc_info=True)
|
||||
else:
|
||||
log.error(error_msg)
|
||||
sys.exit(1)
|
||||
|
||||
if opts.reconfigure:
|
||||
try:
|
||||
reconfigure.main(force_options={'auto': opts.reboot, 'log_level': opts.log_level},
|
||||
force_args=[], need_lock=False)
|
||||
except Exception as err:
|
||||
fonctionseole.zephir("ERR", str(err), z_proc, console=False)
|
||||
if reporting:
|
||||
report(1, normalize(err))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --cdrom $@
|
116
bin/Maj-Release
116
bin/Maj-Release
|
@ -1,116 +0,0 @@
|
|||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# Maj-Auto - Manage automatique update of EOLE server
|
||||
# Copyright © 2015 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
from os import system
|
||||
from sys import exit
|
||||
import re
|
||||
from creole.eoleversion import EOLE_RELEASE, LAST_RELEASE, EOLE_VERSION
|
||||
from pyeole.i18n import i18n
|
||||
from pyeole.ihm import print_red
|
||||
|
||||
import argparse
|
||||
from pyeole import scriptargs
|
||||
|
||||
_ = i18n('creole')
|
||||
|
||||
def parse_cmdline():
|
||||
"""Parse commande line.
|
||||
"""
|
||||
description = _(u"This script will upgrade to a new release of this distribution")
|
||||
parser = argparse.ArgumentParser(prog='Maj-Release',
|
||||
description=description,
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=_(u"show this help message and exit"))
|
||||
|
||||
parser.add_argument('--release', help=_(u"Target release number"))
|
||||
|
||||
parser.add_argument('-f', '--force', action='store_true',
|
||||
help=_(u"Do not ask confirmation"))
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
opts = parse_cmdline()
|
||||
|
||||
print(_(u"This script will upgrade to a new release of this distribution"))
|
||||
all_releases = []
|
||||
current_release = int(EOLE_RELEASE.split('.')[-1])
|
||||
choices = range(current_release+1, int(LAST_RELEASE)+1)
|
||||
# Last is firt displayed
|
||||
if choices == []:
|
||||
print_red(_(u"No stable new release available"))
|
||||
exit(1)
|
||||
choices.reverse()
|
||||
for release_suffix in choices:
|
||||
all_releases.append(EOLE_VERSION + '.' + str(release_suffix))
|
||||
|
||||
while True:
|
||||
if opts.release is not None:
|
||||
choice = opts.release
|
||||
else:
|
||||
for idx, release in enumerate(all_releases):
|
||||
print("{0}: {1}".format(idx+1, release))
|
||||
print(_(u"q|quit: abort"))
|
||||
|
||||
try:
|
||||
choice = raw_input("[1] : ")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
print_red(_("\nUpgrade aborted by user"))
|
||||
exit(0)
|
||||
|
||||
if choice == '':
|
||||
# User hit enter
|
||||
choice = 1
|
||||
elif choice in all_releases:
|
||||
# User entrer release number
|
||||
choice = all_releases.index(choice) + 1
|
||||
else:
|
||||
try:
|
||||
choice = int(choice)
|
||||
except ValueError:
|
||||
if re.match(r'^q(uit)?', choice):
|
||||
print_red(_(u"Voluntary stay of proceedings"))
|
||||
exit(0)
|
||||
else:
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
|
||||
if not 1 <= choice <= len(choices):
|
||||
print_red(_(u"Invalid response: {0}").format(choice))
|
||||
if opts.release is not None:
|
||||
exit(1)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
release = all_releases[choice - 1]
|
||||
if opts.force:
|
||||
force = '--force-update'
|
||||
else:
|
||||
force = ''
|
||||
|
||||
majrel = system('/usr/bin/Maj-Auto --release {0} {1}'.format(release, force))
|
||||
|
||||
exit(majrel)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Auto --dry-run $@
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
Maj-Cd --dry-run $@
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Stop all services
|
||||
CreoleService all stop
|
||||
|
||||
# Start only enabled ones
|
||||
CreoleService all start
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "La commande Upgrade-Auto ne permet plus de changer de sous-version du serveur EOLE."
|
||||
echo "Merci d'utiliser la commande Maj-Release à la place."
|
||||
exit 1
|
65
bin/diagnose
65
bin/diagnose
|
@ -1,65 +0,0 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# diagnose
|
||||
#
|
||||
# Verifie l'instanciation d'un serveur
|
||||
#
|
||||
###########################################################################
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
CREOLE_FILE="/etc/eole/config.eol"
|
||||
RELEASE_FILE="/etc/eole/release"
|
||||
DIAG_DIR="/usr/share/eole/diagnose"
|
||||
err_prefix="Diagnose impossible"
|
||||
|
||||
TestFile $CREOLE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit 1
|
||||
fi
|
||||
TestFile $RELEASE_FILE
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : le serveur n'est pas instancié"
|
||||
exit
|
||||
fi
|
||||
TestDir $DIAG_DIR
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : pas script diagnose disponible"
|
||||
exit 1
|
||||
fi
|
||||
TestCreoled
|
||||
if [ ${?} -eq 1 ]
|
||||
then
|
||||
EchoRouge "${err_prefix} : creoled est arrêté"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Opt=""
|
||||
while getopts "LWT" Option
|
||||
do
|
||||
case $Option in
|
||||
L ) export Verbose="yes";;
|
||||
W ) export ModeEad="yes";;
|
||||
T ) export ModeTxt="yes";;
|
||||
* ) exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
. $RELEASE_FILE
|
||||
DETAILS="$(CreoleGet nom_machine) $(CreoleGet numero_etab)"
|
||||
[ -z "$EOLE_RELEASE" ] && EOLE_RELEASE=$EOLE_VERSION
|
||||
EchoGras "*** Test du module $EOLE_MODULE version $EOLE_RELEASE ($DETAILS) ***"
|
||||
echo
|
||||
run-parts $DIAG_DIR
|
||||
EchoGras "*** FIN DU DIAGNOSTIC ***"
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
from os import listdir, system, chdir
|
||||
from os.path import isfile, join, basename
|
||||
from creole import config
|
||||
|
||||
modif_dir = basename(config.modif_dir)
|
||||
distrib_dir = basename(config.distrib_dir)
|
||||
patch_dir = basename(config.patch_dir)
|
||||
|
||||
system('clear')
|
||||
|
||||
# on travaille dans le répertoire eole
|
||||
chdir(config.eoleroot)
|
||||
|
||||
print "** Génération des patches à partir de %s **\n" % modif_dir
|
||||
for modfile in listdir(modif_dir):
|
||||
if modfile.endswith('~'):
|
||||
continue
|
||||
if not isfile(join(distrib_dir, modfile)):
|
||||
print "ATTENTION : le fichier original %s n'existe pas !" % join(distrib_dir, modfile)
|
||||
continue
|
||||
print "Génération du patch %s.patch" % modfile
|
||||
system("diff -uNr %s %s > %s.patch" % (join(distrib_dir,modfile), join(modif_dir,modfile), join(patch_dir,modfile)))
|
||||
|
||||
print "\n** Fin de la génération des patch **\n"
|
137
bin/gen_rpt
137
bin/gen_rpt
|
@ -1,137 +0,0 @@
|
|||
#!/bin/bash
|
||||
###########################################################################
|
||||
# EOLE - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# gen_rpt
|
||||
#
|
||||
# Génère un rapport d'anomalie
|
||||
#
|
||||
###########################################################################
|
||||
|
||||
TestConf()
|
||||
{
|
||||
[ -e "$1" ] && return 0
|
||||
tput setaf 3
|
||||
echo "* Erreur $0 : le fichier de configuration $1 absent"
|
||||
echo "* Instanciez votre serveur"
|
||||
tput sgr0
|
||||
exit 1
|
||||
}
|
||||
|
||||
clear
|
||||
|
||||
. /usr/lib/eole/ihm.sh
|
||||
. /usr/lib/eole/utils.sh
|
||||
|
||||
only_root
|
||||
|
||||
numero_etab=$(CreoleGet numero_etab)
|
||||
CONFIGEOL='/etc/eole/config.eol'
|
||||
EOLEDIRS="/usr/share/eole/creole/dicos"
|
||||
PATCHDIR="/usr/share/eole/creole/patch"
|
||||
TestConf $CONFIGEOL
|
||||
EOLERELEASE="/etc/eole/release"
|
||||
if [ ! -e $EOLERELEASE ]; then
|
||||
EchoRouge "Fichier $EOLERELEASE est introuvable"
|
||||
exit 1
|
||||
fi
|
||||
. $EOLERELEASE
|
||||
Module="${EOLE_MODULE}-${EOLE_VERSION}"
|
||||
echo "Récupération des informations ..."
|
||||
RepRpt="/tmp/GenRpt"
|
||||
rm -fr $RepRpt 2> /dev/null
|
||||
mkdir $RepRpt
|
||||
mkdir $RepRpt/log
|
||||
mkdir $RepRpt/eole
|
||||
mkdir $RepRpt/system
|
||||
Rpt=$RepRpt"/Rpt-"$Module"-"$numero_etab
|
||||
Mel="eole@ac-dijon.fr"
|
||||
|
||||
# les fichiers texte
|
||||
echo "Config.eol"
|
||||
/bin/cp -f $CONFIGEOL $RepRpt/eole
|
||||
echo "Diagnose"
|
||||
/usr/bin/diagnose -LT >> $RepRpt/diagnose.txt 2>&1
|
||||
echo Pstree
|
||||
pstree >> $RepRpt/system/pstree.txt 2>&1
|
||||
echo Lshw
|
||||
lshw >> $RepRpt/system/lshw.txt 2>&1
|
||||
echo Lsusb
|
||||
lsusb >> $RepRpt/system/lsusb.txt 2>&1
|
||||
echo Lspci
|
||||
lspci >> $RepRpt/system/lspci.txt 2>&1
|
||||
echo Iptables
|
||||
iptables -nvL > $RepRpt/system/iptables.txt 2>&1
|
||||
iptables -nvL -t nat >> $RepRpt/system/iptables.txt 2>&1
|
||||
echo History
|
||||
grep -v "^#" /root/.bash_history > $RepRpt/system/history.txt
|
||||
echo Paquets
|
||||
dpkg-query -W > $RepRpt/system/packages.txt 2>&1
|
||||
# les gz
|
||||
echo Syslog
|
||||
for log in rsyslog su sudo kernel cron auth chpasswd exim ; do
|
||||
[ -d /var/log/rsyslog/local/$log ] && gzip -rc /var/log/rsyslog/local/$log > $RepRpt/log/$log.gz
|
||||
done
|
||||
echo Dmesg
|
||||
dmesg > $RepRpt/log/dmesg.log 2>&1
|
||||
gzip $RepRpt/log/dmesg.log
|
||||
echo Creole.log
|
||||
gzip -c /var/log/reconfigure.log > $RepRpt/log/reconfigure.log.gz
|
||||
echo Dicos
|
||||
gzip -rc $EOLEDIRS > $RepRpt/eole/dicos.gz
|
||||
echo Patch
|
||||
gzip -rc $PATCHDIR > $RepRpt/eole/patch.gz
|
||||
echo Stats
|
||||
gzip -rc /usr/share/zephir/monitor/stats > $RepRpt/stats.gz
|
||||
|
||||
# spécifique Scribe
|
||||
if [ -f /var/www/ead/extraction/tmp/rapport.txt ];then
|
||||
echo "Rapport d'extraction"
|
||||
gzip -rc /var/www/ead/extraction/tmp/rapport.txt > $RepRpt/log/extraction.log.gz
|
||||
fi
|
||||
if [ -f /var/log/controle-vnc/main.log ];then
|
||||
echo 'Log client scribe'
|
||||
gzip -rc /var/log/controle-vnc/main.log > $RepRpt/log/controle-vnc.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Scribe/Horus/Eclair
|
||||
if [ -d /var/lib/eole/reports ];then
|
||||
echo "Rapport (sauvegarde/maj/...)"
|
||||
gzip -rc /var/lib/eole/reports > $RepRpt/log/rapport.log.gz
|
||||
fi
|
||||
|
||||
# spécifique Amon
|
||||
if [ -f '/usr/share/eole/test-rvp' ];then
|
||||
echo 'Rvp'
|
||||
/usr/sbin/ipsec status &> $RepRpt/ipsec.status 2>&1
|
||||
fi
|
||||
|
||||
# Rapport debsums
|
||||
if [ -x '/usr/share/eole/debsums/show-reports.py' ]; then
|
||||
echo "Rapport debsums"
|
||||
/usr/share/eole/debsums/show-reports.py > ${RepRpt}/log/rapport-debsums.log 2>&1
|
||||
fi
|
||||
|
||||
echo
|
||||
Archive=$Module-$numero_etab".tar.gz"
|
||||
echo "Création de l'archive locale $Archive"
|
||||
tar -C /tmp -czf $Archive GenRpt
|
||||
echo
|
||||
|
||||
Question_ouinon "Envoyer l'archive par email ?"
|
||||
if [ $? -eq 1 ];then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Destinataire du message : "
|
||||
echo -n "[$Mel] : "
|
||||
read mail
|
||||
if [ "$mail" == "" ];then
|
||||
mail=$Mel
|
||||
fi
|
||||
echo -n "Commentaire : "
|
||||
read comment
|
||||
echo "$comment"|mutt -a $Archive -s "Rapport $Module de $numero_etab" -c $mail -e "set copy=no"
|
24
bin/instance
24
bin/instance
|
@ -1,24 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
# Force interactive mode
|
||||
main(force_options={'interactive': True})
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
173
bin/manage-eole
173
bin/manage-eole
|
@ -1,173 +0,0 @@
|
|||
#!/bin/bash
|
||||
##########################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# Gestion des modules en mode dialogue
|
||||
# avec les comptes système eole, eole2
|
||||
#
|
||||
##########################################################
|
||||
|
||||
# un seul manage ?
|
||||
pmanage=`pgrep manage-eole`
|
||||
nbmanage=`echo $pmanage | wc -w`
|
||||
|
||||
# fichiers temporaires
|
||||
temp="/tmp/InBox-Eol-"
|
||||
tempfile="$temp$$"
|
||||
|
||||
TitreGen="Eole - Gestion du Serveur"
|
||||
|
||||
##########################################################
|
||||
# Fonctions reprises de FonctionsEole
|
||||
##########################################################
|
||||
|
||||
MenuBox()
|
||||
{
|
||||
#${1="Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
NBlignes=${NBlignes=5}
|
||||
Menu="$3"
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--menu "$1" 16 50 $NBlignes \
|
||||
$Menu 2> $tempfile
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="`cat $tempfile`";;
|
||||
1) # Cancel
|
||||
eval $2="CANCEL";;
|
||||
255) # ESC
|
||||
if test -s $tempfile ;
|
||||
then
|
||||
eval $2=`cat $tempfile`
|
||||
else
|
||||
eval $2="ESC"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
OkBox()
|
||||
{
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --cancel-label Abandon\
|
||||
--msgbox "$1" 0 0
|
||||
}
|
||||
|
||||
QuestionBox()
|
||||
{
|
||||
#${1=Votre Saisie"}
|
||||
#${2="Saisie"}
|
||||
dialog $NOMOUSE1 --backtitle "$TitreGen" \
|
||||
--aspect 45 --clear \
|
||||
--yesno "$1" 16 50
|
||||
retval=$?
|
||||
case $retval in
|
||||
0)
|
||||
eval $2="OUI";;
|
||||
1) # Cancel
|
||||
eval $2="NON";;
|
||||
255) # ESC
|
||||
eval $2="ESC" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
Entree(){
|
||||
echo
|
||||
echo "Tapez <Entrée>"
|
||||
read Bidon
|
||||
}
|
||||
|
||||
CleanExit(){
|
||||
echo "Au revoir !"
|
||||
rm -f $tempfile
|
||||
exit $1
|
||||
}
|
||||
|
||||
##########################################################
|
||||
# Programme principal
|
||||
##########################################################
|
||||
|
||||
if [ $nbmanage -gt 1 ]
|
||||
then
|
||||
MenuBox "D'autres instances de manage-eole ont été détectées" Rep "1 Quitter_sans_tuer 2 Quitter_et_tuer"
|
||||
rm -f "$temp*"
|
||||
if [ "$Rep" == "2" ]
|
||||
then
|
||||
for pid in $pmanage
|
||||
do
|
||||
kill -9 $pid
|
||||
done
|
||||
fi
|
||||
CleanExit 0
|
||||
fi
|
||||
|
||||
OkBox "Administration EOLE\n\nPour Vous Deplacer sur l'Ecran\nUtiliser votre Souris\nOu la touche tabulation.\n\n"
|
||||
|
||||
Rep=""
|
||||
while [ 1 ]
|
||||
do
|
||||
# FIXME/TODO : ajouter des entrées de menu !
|
||||
MenuBox "Votre Choix" Rep "1 Diagnostic 2 Reconfiguration 3 Paquets_en_Maj 4 Mise_A_Jour 8 Redemarrer_Serveur 9 Arret_Serveur ! Shell_Linux Q Quitter"
|
||||
|
||||
if [ "$Rep" == "CANCEL" ]
|
||||
then
|
||||
CleanExit 1
|
||||
fi
|
||||
|
||||
case $Rep in
|
||||
1)
|
||||
echo "En cours ..."
|
||||
sudo /usr/bin/diagnose
|
||||
Entree
|
||||
;;
|
||||
2)
|
||||
sudo /usr/bin/reconfigure
|
||||
Entree
|
||||
;;
|
||||
3)
|
||||
sudo /usr/bin/Query-Auto
|
||||
Entree
|
||||
;;
|
||||
4)
|
||||
sudo /usr/bin/Maj-Auto
|
||||
Entree
|
||||
;;
|
||||
# TODO : pouvoir inclure des entrées venant d'ailleurs ;)
|
||||
#5)
|
||||
#sudo /usr/share/eole/Maj-blacklist.sh
|
||||
#Entree
|
||||
#;;
|
||||
8)
|
||||
QuestionBox "Vous avez demandé le redémarrage du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/reboot
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
9)
|
||||
QuestionBox "Vous avez demandé un arret total du serveur\nEtes vous sur ?" Rep
|
||||
if [ "$Rep" == "OUI" ]
|
||||
then
|
||||
sudo /sbin/halt -p
|
||||
sleep 1
|
||||
CleanExit 0
|
||||
fi
|
||||
;;
|
||||
!)
|
||||
echo "\"exit\" ou \"Ctrl + d\" pour revenir au Menu"
|
||||
/bin/bash
|
||||
;;
|
||||
Q)
|
||||
CleanExit 0
|
||||
;;
|
||||
|
||||
esac
|
||||
done
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Application de la configuration EOLE
|
||||
"""
|
||||
|
||||
import sys
|
||||
from creole.reconfigure import main
|
||||
from creole.error import UserExitError, LockError, UnlockError, UserExit
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
try:
|
||||
main()
|
||||
except (UserExitError, LockError, UnlockError):
|
||||
sys.exit(1)
|
||||
except UserExit:
|
||||
sys.exit(0)
|
||||
except:
|
||||
#FIXME: log & affichage géré au raise ?
|
||||
sys.exit(1)
|
1147
creole/annotator.py
1147
creole/annotator.py
|
@ -18,18 +18,6 @@ from .xmlreflector import HIGH_COMPATIBILITY
|
|||
modes_level = ('basic', 'normal', 'expert')
|
||||
|
||||
|
||||
class secure_eosfunc:
|
||||
def __init__(self, eosfunc):
|
||||
self.eosfunc = eosfunc
|
||||
|
||||
def __getattribute__(self, func_name):
|
||||
if func_name == 'eosfunc':
|
||||
return super().__getattribute__('eosfunc')
|
||||
if func_name in self.eosfunc.func_on_zephir_context:
|
||||
return getattr(self.eosfunc)
|
||||
raise Exception(_('unknown or unauthorized function: {}'.format(func_name)))
|
||||
|
||||
|
||||
class Mode(object):
|
||||
def __init__(self, name, level):
|
||||
self.name = name
|
||||
|
@ -66,9 +54,10 @@ modes = mode_factory()
|
|||
# a CreoleObjSpace's attribute has some annotations
|
||||
# that shall not be present in the exported (flatened) XML
|
||||
ERASED_ATTRIBUTES = ('redefine', 'exists', 'fallback', 'optional', 'remove_check', 'namespace',
|
||||
'remove_condition', 'path', 'instance_mode', 'index', 'is_in_master', '_real_container')
|
||||
'remove_condition', 'path', 'instance_mode', 'index', 'is_in_leadership') # , '_real_container')
|
||||
ERASED_CONTAINER_ATTRIBUTES = ('id', 'container', 'group_id', 'group', 'container_group')
|
||||
|
||||
NOT_NEED_ACTIVATE = ('package', 'disknod')
|
||||
NOT_NEED_ACTIVATE = ('disknod',)
|
||||
|
||||
FORCE_CHOICE = {'oui/non': ['oui', 'non'],
|
||||
'on/off': ['on', 'off'],
|
||||
|
@ -86,85 +75,59 @@ KEY_TYPE = {'SymLinkOption': 'symlink',
|
|||
|
||||
TYPE_PARAM_CHECK = ('string', 'python', 'eole')
|
||||
TYPE_PARAM_CONDITION = ('string', 'python', 'number', 'eole')
|
||||
TYPE_PARAM_FILL = ('string', 'eole', 'number', 'container', 'context')
|
||||
TYPE_PARAM_FILL = ('string', 'eole', 'number', 'context')
|
||||
|
||||
DISKNOD_KEY_TYPE = {'major': 'number',
|
||||
'minor': 'number'}
|
||||
|
||||
ERASED_FAMILY_ACTION_ATTRIBUTES = ('index', 'action')
|
||||
|
||||
FREEZE_AUTOFREEZE_VARIABLE = 'module_instancie'
|
||||
|
||||
class ContainerAnnotator(object):
|
||||
|
||||
class ContainerAnnotator:
|
||||
"""Manage container's object
|
||||
"""
|
||||
def __init__(self, space, paths, objectspace):
|
||||
self.space = space
|
||||
self.paths = paths
|
||||
def __init__(self, objectspace):
|
||||
self.space = objectspace.space
|
||||
self.paths = objectspace.paths
|
||||
self.objectspace = objectspace
|
||||
self.extra_conditions = {}
|
||||
var_name = 'mode_conteneur_actif'
|
||||
self.containers_enabled = False
|
||||
try:
|
||||
family_name = self.paths.get_variable_family_name(var_name)
|
||||
if (hasattr(space, 'variables') and
|
||||
'creole' in space.variables and
|
||||
hasattr(space.variables['creole'], 'family') and
|
||||
family_name in space.variables['creole'].family and
|
||||
var_name in space.variables['creole'].family[family_name].variable and
|
||||
hasattr(space.variables['creole'].family[family_name].variable[var_name], 'value')):
|
||||
# assume that mode_conteneur_actif is not a multi
|
||||
value = space.variables['creole'].family[family_name].variable[var_name].value[0].name
|
||||
self.containers_enabled = value == 'oui'
|
||||
except CreoleDictConsistencyError:
|
||||
pass
|
||||
"""for example::
|
||||
<service_access service='ntp'>
|
||||
<port protocol='udp' service_accesslist='ntp_udp'>123</port>
|
||||
<tcpwrapper>ntpd</tcpwrapper>
|
||||
</service_access>
|
||||
"""
|
||||
self.grouplist_conditions = {}
|
||||
self.convert_containers()
|
||||
|
||||
root_container = self.objectspace.container()
|
||||
root_container.name = 'root'
|
||||
root_container.container = 'root'
|
||||
root_container.real_container = 'root'
|
||||
root_container.container_group = 'root'
|
||||
root_container.id = '1'
|
||||
if not hasattr(self.space, 'containers'):
|
||||
self.space.containers = self.objectspace.containers()
|
||||
def convert_containers(self):
|
||||
if hasattr(self.space, 'containers'):
|
||||
if hasattr(self.space.containers, 'container'):
|
||||
old_container = list(self.space.containers.container.items())
|
||||
old_container.insert(0, ('root', root_container))
|
||||
self.space.containers.container = OrderedDict(old_container)
|
||||
#self.space.containers.container['root'] = root_container
|
||||
self.convert_all()
|
||||
subelts = dict()
|
||||
# self.space.containers.containers = self.objectspace.containers()
|
||||
for idx, container in enumerate(self.space.containers.container.values()):
|
||||
family = self.objectspace.family()
|
||||
family.name = 'container{}'.format(idx)
|
||||
family.doc = container.name
|
||||
family.family = OrderedDict()
|
||||
self.convert_container_to_family(family.family, container)
|
||||
setattr(self.space.containers, family.name, family)
|
||||
del self.space.containers.container
|
||||
else:
|
||||
self.space.containers.container = OrderedDict({'root': root_container})
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'all'):
|
||||
all_container = self.objectspace.container()
|
||||
all_container.name = 'all'
|
||||
all_container.container = 'all'
|
||||
if self.containers_enabled:
|
||||
all_container.real_container = 'all'
|
||||
else:
|
||||
all_container.real_container = VIRTMASTER
|
||||
all_container.container_group = 'all'
|
||||
old_container = list(self.space.containers.container.items())
|
||||
old_container.insert(1, ('all', all_container))
|
||||
self.space.containers.container = OrderedDict(old_container)
|
||||
#self.space.containers.container['all'] = all_container
|
||||
if hasattr(space, 'variables') and 'creole' in space.variables:
|
||||
flattened_elts = dict()
|
||||
if hasattr(space, 'files'):
|
||||
for key, values in vars(self.space.files).items():
|
||||
if not isinstance(values, str) and not isinstance(values, int):
|
||||
if isinstance(values, dict):
|
||||
values = values.values()
|
||||
for value in values:
|
||||
value.container = root_container
|
||||
flattened_elts.setdefault(key, []).append(value)
|
||||
del self.space.containers
|
||||
|
||||
def convert_all(self):
|
||||
if hasattr(self.space.containers, 'all'):
|
||||
# Remove "all" and dispatch informations in all containers
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'all') and hasattr(space.containers, 'container'):
|
||||
for type_, containers in vars(space.containers.all).items():
|
||||
for type_, containers in vars(self.space.containers.all).items():
|
||||
if type_ == 'index':
|
||||
continue
|
||||
if isinstance(containers, list):
|
||||
for elt in containers:
|
||||
for container in space.containers.container.values():
|
||||
if container.name not in ['root', 'all']:
|
||||
for container in self.space.containers.container.values():
|
||||
if container.name != 'all':
|
||||
if not hasattr(container, type_):
|
||||
setattr(container, type_, [])
|
||||
new_elt = copy(elt)
|
||||
|
@ -172,8 +135,8 @@ class ContainerAnnotator(object):
|
|||
getattr(container, type_).append(new_elt)
|
||||
else:
|
||||
for name, elt in containers.items():
|
||||
for container in space.containers.container.values():
|
||||
if container.name not in ['root', 'all']:
|
||||
for container in self.space.containers.container.values():
|
||||
if container.name != 'all':
|
||||
if not hasattr(container, type_):
|
||||
setattr(container, type_, OrderedDict())
|
||||
old_container = getattr(container, type_)
|
||||
|
@ -182,362 +145,33 @@ class ContainerAnnotator(object):
|
|||
new_elt = copy(elt)
|
||||
new_elt.container = container
|
||||
old_container[name] = new_elt
|
||||
del space.containers.all
|
||||
if hasattr(space, 'containers') and hasattr(space.containers, 'container'):
|
||||
self.generate_interfaces()
|
||||
groups = {}
|
||||
containers = space.containers.container.values()
|
||||
container_groups = {}
|
||||
update_values = True
|
||||
while update_values:
|
||||
update_values = False
|
||||
for container in containers:
|
||||
if not hasattr(container, 'group'):
|
||||
container.group = container.name
|
||||
if not hasattr(container, 'container_group'):
|
||||
container.container_group = container.group
|
||||
if HIGH_COMPATIBILITY:
|
||||
if self.containers_enabled:
|
||||
real_container = container.group
|
||||
else:
|
||||
real_container = VIRTMASTER
|
||||
container.real_container = real_container
|
||||
if container.group in container_groups:
|
||||
group = container_groups[container.group]
|
||||
else:
|
||||
group = container.group
|
||||
if container_groups.get(container.name) != group:
|
||||
container_groups[container.name] = group
|
||||
container._real_container = group
|
||||
if not HIGH_COMPATIBILITY and self.containers_enabled:
|
||||
container.real_container = group
|
||||
update_values = True
|
||||
del self.space.containers.all
|
||||
|
||||
for container in space.containers.container.values():
|
||||
if not hasattr(container, 'group'):
|
||||
container.group = container.name
|
||||
groupname = container.group
|
||||
groups.setdefault(groupname, []).append(container)
|
||||
for groupname, containers in groups.items():
|
||||
for container in containers:
|
||||
if container.name == 'all':
|
||||
continue
|
||||
#container.container_group = groupname
|
||||
if HIGH_COMPATIBILITY and hasattr(container, 'id'):
|
||||
container.group_id = container.id
|
||||
container.id = space.containers.container[container._real_container].id
|
||||
container.container = container.name
|
||||
for container in space.containers.container.values():
|
||||
container_info = self.objectspace.container()
|
||||
for key, value in vars(container).items():
|
||||
if isinstance(value, str):
|
||||
setattr(container_info, key, value)
|
||||
for key, values in vars(container).items():
|
||||
if not isinstance(values, str) and not isinstance(values, int):
|
||||
if isinstance(values, dict):
|
||||
values = values.values()
|
||||
for value in values:
|
||||
value.container = container_info
|
||||
flattened_elts.setdefault(key, []).append(value)
|
||||
self.generate_containers()
|
||||
if hasattr(self.space, 'files'):
|
||||
del self.space.files
|
||||
self.convert_containers()
|
||||
|
||||
if hasattr(self.space.containers, 'family'):
|
||||
raise Exception('hu?')
|
||||
self.space.containers.family = OrderedDict()
|
||||
self.generate_network_container()
|
||||
def convert_container_to_family(self, container_family, container):
|
||||
# tranform container object to family object
|
||||
# add services, service_accesses, ...
|
||||
for elttype in self.objectspace.container_elt_attr_list:
|
||||
if hasattr(container, elttype):
|
||||
family = self.objectspace.family()
|
||||
key_type_name = elttype.upper() + '_KEY_TYPE'
|
||||
if key_type_name in globals():
|
||||
key_type = globals()[key_type_name]
|
||||
else:
|
||||
key_type = {}
|
||||
elt = flattened_elts.get(elttype, {})
|
||||
families = self.make_group_from_elts(elttype, elt, key_type,
|
||||
'containers.{}s'.format(elttype), True)
|
||||
if families == [] and not HIGH_COMPATIBILITY:
|
||||
continue
|
||||
family = self.objectspace.family()
|
||||
if elttype.endswith('s'):
|
||||
family.name = elttype + 'es'
|
||||
else:
|
||||
family.name = elttype + 's'
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.family = families
|
||||
if HIGH_COMPATIBILITY:
|
||||
values = getattr(container, elttype)
|
||||
if isinstance(values, dict):
|
||||
values = list(values.values())
|
||||
family.family = self.make_group_from_elts(elttype,
|
||||
values,
|
||||
key_type,
|
||||
'containers.{}'.format(family.name),
|
||||
True)
|
||||
family.mode = None
|
||||
self.space.containers.family[elttype + 's'] = family
|
||||
|
||||
def _generate_container_variable(self, name, description, value, family_name, frozen=False):
|
||||
var_data = {'hidden': True, 'mode': 'expert', 'name': name,
|
||||
'doc': description, 'value': value,
|
||||
'type': 'string'}
|
||||
variable = self.objectspace.variable()
|
||||
if HIGH_COMPATIBILITY:
|
||||
if frozen:
|
||||
var_data['frozen'] = True
|
||||
var_data['force_default_on_freeze'] = True
|
||||
var_data['hidden'] = False
|
||||
del var_data['mode']
|
||||
variable.mode = None
|
||||
for key, value in var_data.items():
|
||||
if key == 'value':
|
||||
# Value is a list of objects
|
||||
val = self.objectspace.value()
|
||||
val.name = value
|
||||
value = [val]
|
||||
setattr(variable, key, value)
|
||||
self.paths.append('variable', variable.name, 'creole', family_name, variable)
|
||||
return variable
|
||||
|
||||
def _generate_root_container(self, family_name):
|
||||
if self.containers_enabled:
|
||||
ip_br0 = u'192.0.2.1'
|
||||
mask_br0 = u'255.255.255.0'
|
||||
network_br0 = u'192.0.2.0'
|
||||
bcast_br0 = u'192.0.2.255'
|
||||
else:
|
||||
ip_br0 = u'127.0.0.1'
|
||||
mask_br0 = u'255.0.0.0'
|
||||
network_br0 = u'127.0.0.0'
|
||||
bcast_br0 = u'127.255.255.255'
|
||||
variables = OrderedDict()
|
||||
variable = self._generate_container_variable('adresse_ip_br0',
|
||||
_(u"Bridge IP address"),
|
||||
ip_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
variable = self._generate_container_variable('adresse_netmask_br0',
|
||||
_(u"Bridge IP subnet mask"),
|
||||
mask_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
if HIGH_COMPATIBILITY:
|
||||
msg = u"Bridge IP network_br0 address"
|
||||
else:
|
||||
msg = u"Bridge IP network address"
|
||||
variable = self._generate_container_variable('adresse_network_br0',
|
||||
_(msg),
|
||||
network_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
variable = self._generate_container_variable('adresse_broadcast_br0',
|
||||
_(u"Bridge broadcast IP address"),
|
||||
bcast_br0,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
return variables
|
||||
|
||||
def _get_containers(self):
|
||||
return self.space.containers.container
|
||||
|
||||
def convert_containers(self):
|
||||
idx = 0
|
||||
self.space.containers.containers = self.objectspace.containers()
|
||||
for name, container in self.space.containers.container.items():
|
||||
variables = []
|
||||
for key, value in vars(container).items():
|
||||
if key in ['container', 'group_id'] or key in ERASED_ATTRIBUTES:
|
||||
continue
|
||||
if not isinstance(value, list) and not isinstance(value, OrderedDict):
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = key
|
||||
variable.mode = None
|
||||
if key == 'id':
|
||||
variable.type = 'number'
|
||||
else:
|
||||
variable.type = 'string'
|
||||
if HIGH_COMPATIBILITY:
|
||||
variable.doc = ''
|
||||
val = self.objectspace.value()
|
||||
val.name = value
|
||||
variable.value = [val]
|
||||
variables.append(variable)
|
||||
for key in ['ip', 'path']:
|
||||
var_path = self.paths.get_variable_path('container_{}_{}'.format(key, name), 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = key
|
||||
variable.mode = None
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables.append(variable)
|
||||
|
||||
family = self.objectspace.family()
|
||||
family.name = 'container{}'.format(idx)
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.variable = variables
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
setattr(self.space.containers.containers, 'container{}'.format(idx), family)
|
||||
idx += 1
|
||||
del self.space.containers.container
|
||||
|
||||
def generate_network_container(self):
|
||||
family = self.objectspace.family()
|
||||
family.name = 'network'
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.mode = None
|
||||
variables = []
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
var_path = self.paths.get_variable_path(name, 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = name
|
||||
variable.mode = 'expert'
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables.append(variable)
|
||||
family.variable = variables
|
||||
self.space.containers.family['network'] = family
|
||||
|
||||
def generate_interfaces(self):
|
||||
if self.containers_enabled:
|
||||
for name, container in self._get_containers().items():
|
||||
if name in ['all', 'root']:
|
||||
continue
|
||||
interface = self.objectspace.interface()
|
||||
interface.name = 'containers'
|
||||
interface.container = name
|
||||
interface.linkto = 'br0'
|
||||
interface.method = 'bridge'
|
||||
interface.ip = 'container_ip_{0}'.format(name)
|
||||
interface.mask = 'adresse_netmask_br0'
|
||||
interface.bcast = 'adresse_broadcast_br0'
|
||||
interface.gateway = 'adresse_ip_br0'
|
||||
if not hasattr(container, 'interface'):
|
||||
container.interface = OrderedDict()
|
||||
container.interface['containers'] = interface
|
||||
else:
|
||||
old = list(container.interface.items())
|
||||
old.insert(0, ('containers', interface))
|
||||
container.interface = OrderedDict(old)
|
||||
|
||||
def generate_containers(self):
|
||||
"""generate the root's container informations
|
||||
"""
|
||||
family_description = 'Containers'
|
||||
family_name = family_description.lower()
|
||||
if family_name in self.space.variables:
|
||||
raise CreoleDictConsistencyError(_('{} family already exists').format(family_name))
|
||||
variables = self._generate_root_container(family_name)
|
||||
self._generate_containers(variables)
|
||||
self.paths.append('family', family_name, 'creole')
|
||||
family = self.objectspace.family()
|
||||
family.name = family_description
|
||||
family.doc = _(u'Containers informations')
|
||||
family.hidden = True
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = 'normal'
|
||||
family.icon = 'puzzle-piece'
|
||||
family.variable = variables
|
||||
# this family must be at the beginning
|
||||
if hasattr(self.space.variables['creole'], 'family'):
|
||||
old_families = list(self.space.variables['creole'].family.items())
|
||||
old_families.insert(0, (family_name, family))
|
||||
self.space.variables['creole'].family = OrderedDict(old_families)
|
||||
|
||||
def _generate_container_path(self, container):
|
||||
if container.name == 'all':
|
||||
path = None
|
||||
elif not self.containers_enabled or container.name == VIRTMASTER:
|
||||
path = u''
|
||||
else:
|
||||
group_name = container._real_container
|
||||
path = join(VIRTROOT, group_name, VIRTBASE)
|
||||
if sys.version_info[0] < 3:
|
||||
group_name = unicode(group_name)
|
||||
path = unicode(path)
|
||||
return path
|
||||
|
||||
def _generate_containers(self, variables):
|
||||
"""generate containers informations
|
||||
"""
|
||||
containers = self._get_containers()
|
||||
family_name = 'containers'
|
||||
ids = {}
|
||||
for name, container in containers.items():
|
||||
if not hasattr(container, 'group'):
|
||||
groupname = container.name
|
||||
else:
|
||||
groupname = container.group
|
||||
if name == 'all':
|
||||
ipaddr = None
|
||||
group_name = u'all'
|
||||
else:
|
||||
group_name = container._real_container
|
||||
if sys.version_info[0] < 3:
|
||||
group_name = unicode(group_name)
|
||||
if group_name not in containers:
|
||||
raise CreoleDictConsistencyError(_('the container "{}" does not exist').format(group_name))
|
||||
if not hasattr(containers[group_name], 'id'):
|
||||
raise CreoleDictConsistencyError(_('mandatory attribute "id" missing for container '
|
||||
'"{}"').format(group_name))
|
||||
id_value = containers[group_name].id
|
||||
if id_value in ids and ids[id_value] != group_name:
|
||||
raise CreoleDictConsistencyError(_('attribute "id" must be unique, but "{}" is used for containers "{}" and "{}"'
|
||||
).format(id_value, group_name, ids[id_value]))
|
||||
ids[id_value] = group_name
|
||||
if not self.containers_enabled or name == VIRTMASTER:
|
||||
ipaddr = u'127.0.0.1'
|
||||
group_name = VIRTMASTER
|
||||
else:
|
||||
group_id = id_value
|
||||
ipaddr = u"192.0.2.{}".format(group_id)
|
||||
|
||||
path = self._generate_container_path(container)
|
||||
# Variable : container_path_<conteneur>
|
||||
path_name = 'container_path_{0}'.format(name)
|
||||
variable = self._generate_container_variable(path_name,
|
||||
_(u'Path of container {0}').format(name),
|
||||
path,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : container_ip_<conteneur>
|
||||
ip_name = 'container_ip_{0}'.format(name)
|
||||
msg = u'IP address of container {0}'
|
||||
variable = self._generate_container_variable(ip_name,
|
||||
_(msg).format(
|
||||
name),
|
||||
ipaddr,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : container_name_<conteneur>
|
||||
name_name = 'container_name_{0}'.format(name)
|
||||
variable = self._generate_container_variable(name_name,
|
||||
_(u'Group name of container {0}').format(
|
||||
name),
|
||||
group_name,
|
||||
family_name)
|
||||
variables[variable.name] = variable
|
||||
# Variable : adresse_ip_<conteneur>
|
||||
# adresse_ip_<container> added for compat 2.3 (#5701, #5868)
|
||||
address_name = 'adresse_ip_{0}'.format(name)
|
||||
if HIGH_COMPATIBILITY:
|
||||
msg = u'Path of container {0}'
|
||||
else:
|
||||
msg = u'IP address of container {0}'
|
||||
if not self.paths.path_is_defined(address_name):
|
||||
if not self.containers_enabled:
|
||||
# hack to have "localhost" in non container mode #7183
|
||||
variable = self._generate_container_variable(address_name,
|
||||
_(msg).format(
|
||||
name),
|
||||
'localhost',
|
||||
family_name,
|
||||
frozen=True)
|
||||
else:
|
||||
self.paths.append('variable', address_name, 'creole', family_name, variable)
|
||||
path = self.paths.get_variable_path(address_name, 'creole')
|
||||
var_path = self.paths.get_variable_path(ip_name, 'creole')
|
||||
variable = self.objectspace.variable()
|
||||
variable.name = address_name
|
||||
variable.path = path
|
||||
variable.mode = 'expert'
|
||||
variable.opt = var_path
|
||||
variable.type = 'symlink'
|
||||
variables[variable.name] = variable
|
||||
container_family[family.name] = family
|
||||
|
||||
def _generate_element(self, eltname, name, value, type_, subpath, multi=False):
|
||||
var_data = {'name': name, 'doc': '', 'value': value,
|
||||
|
@ -578,9 +212,6 @@ class ContainerAnnotator(object):
|
|||
choices = []
|
||||
for value in values:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value, 'utf8')
|
||||
else:
|
||||
choice.name = value
|
||||
choices.append(choice)
|
||||
variable.choice = choices
|
||||
|
@ -636,26 +267,8 @@ class ContainerAnnotator(object):
|
|||
disknod.permission = 'allow'
|
||||
|
||||
def _update_file(self, file_, index):
|
||||
# take care of os.path.join and absolute part after first
|
||||
# argument.
|
||||
filename = file_.name
|
||||
if filename[0] == '/':
|
||||
filename = filename[1:]
|
||||
|
||||
full_name = file_.name
|
||||
container_path = self._generate_container_path(file_.container)
|
||||
if container_path:
|
||||
if full_name.startswith('/'):
|
||||
full_name = full_name[1:]
|
||||
full_name = join(container_path, full_name)
|
||||
file_.full_name = full_name
|
||||
|
||||
if not hasattr(file_, 'source'):
|
||||
source = basename(filename)
|
||||
else:
|
||||
source = file_.source
|
||||
source = join(templatedir, source)
|
||||
file_.source = source
|
||||
file_.source = basename(file_.name)
|
||||
|
||||
def _split_elts(self, name, key, value, elt):
|
||||
"""for example::
|
||||
|
@ -693,7 +306,7 @@ class ContainerAnnotator(object):
|
|||
if not hasattr(new_elt, name + 'list'):
|
||||
setattr(new_elt, name + 'list', '___auto_{}'.format(elt.service))
|
||||
else:
|
||||
self.extra_conditions[new_elt] = '___auto_{}'.format(elt.service)
|
||||
self.grouplist_conditions[new_elt] = '___auto_{}'.format(elt.service)
|
||||
yield new_elt
|
||||
|
||||
def _reorder_elts(self, name, elts, duplicate_list):
|
||||
|
@ -738,36 +351,28 @@ class ContainerAnnotator(object):
|
|||
result_elts.extend(elt)
|
||||
return result_elts
|
||||
|
||||
|
||||
def make_group_from_elts(self, name, elts, key_type, path, duplicate_list):
|
||||
"""Splits each objects into a group (and `OptionDescription`, in tiramisu terms)
|
||||
and build elements and its attributes (the `Options` in tiramisu terms)
|
||||
"""
|
||||
index = 0
|
||||
families = []
|
||||
new_elts = self._reorder_elts(name, elts, duplicate_list)
|
||||
for elt_info in new_elts:
|
||||
for index, elt_info in enumerate(new_elts):
|
||||
elt = elt_info['elt']
|
||||
elt_name = elt_info['elt_name']
|
||||
|
||||
# try to launch _update_xxxx() function
|
||||
update_elt = '_update_' + elt_name
|
||||
if hasattr(self, update_elt):
|
||||
getattr(self, update_elt)(elt, index)
|
||||
if hasattr(elt, 'instance_mode'):
|
||||
instance_mode = elt.instance_mode
|
||||
else:
|
||||
instance_mode = 'always'
|
||||
if ((instance_mode == 'when_container' and not self.containers_enabled) or
|
||||
(instance_mode == 'when_no_container' and self.containers_enabled)):
|
||||
continue
|
||||
variables = []
|
||||
subpath = '{}.{}{}'.format(path, name, index)
|
||||
listname = '{}list'.format(name)
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
activate_path = '.'.join([subpath, 'activate'])
|
||||
if elt in self.extra_conditions:
|
||||
if name not in NOT_NEED_ACTIVATE and elt in self.grouplist_conditions:
|
||||
# FIXME transformer le activate qui disparait en boolean
|
||||
self.objectspace.list_conditions.setdefault(listname,
|
||||
{}).setdefault(
|
||||
self.extra_conditions[elt],
|
||||
{}).setdefault(self.grouplist_conditions[elt],
|
||||
[]).append(activate_path)
|
||||
for key in dir(elt):
|
||||
if key.startswith('_') or key.endswith('_type') or key in ERASED_ATTRIBUTES:
|
||||
|
@ -775,29 +380,13 @@ class ContainerAnnotator(object):
|
|||
value = getattr(elt, key)
|
||||
if isinstance(value, list) and duplicate_list:
|
||||
continue
|
||||
if key == listname:
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
if key == 'container':
|
||||
value = value.name
|
||||
if name not in NOT_NEED_ACTIVATE and key == listname:
|
||||
self.objectspace.list_conditions.setdefault(listname,
|
||||
{}).setdefault(
|
||||
value,
|
||||
[]).append(activate_path)
|
||||
if not HIGH_COMPATIBILITY:
|
||||
continue
|
||||
if key == 'container':
|
||||
variables.append(self._generate_element(elt_name, key, value.container,
|
||||
'string', subpath))
|
||||
variables.append(self._generate_element(elt_name, 'container_group',
|
||||
value.container_group,
|
||||
'string', subpath))
|
||||
if HIGH_COMPATIBILITY:
|
||||
if not self.containers_enabled:
|
||||
real_container = value.real_container
|
||||
else:
|
||||
real_container = value._real_container
|
||||
variables.append(self._generate_element(elt_name, 'real_container',
|
||||
real_container,
|
||||
'string', subpath))
|
||||
else:
|
||||
default_type = 'string'
|
||||
if key in self.objectspace.booleans_attributs:
|
||||
default_type = 'boolean'
|
||||
|
@ -805,43 +394,42 @@ class ContainerAnnotator(object):
|
|||
dtd_key_type = key + '_type'
|
||||
if hasattr(elt, dtd_key_type):
|
||||
type_ = KEY_TYPE[getattr(elt, dtd_key_type)]
|
||||
if isinstance(value, list):
|
||||
variables.append(self._generate_element(elt_name, key, value, type_,
|
||||
subpath, True))
|
||||
else:
|
||||
variables.append(self._generate_element(elt_name, key, value, type_,
|
||||
subpath, False))
|
||||
multi = isinstance(value, list)
|
||||
variables.append(self._generate_element(elt_name,
|
||||
key,
|
||||
value,
|
||||
type_,
|
||||
subpath,
|
||||
multi))
|
||||
if name not in NOT_NEED_ACTIVATE:
|
||||
# FIXME ne devrait pas etre True par défaut
|
||||
variables.append(self._generate_element(name, 'activate', True, 'boolean', subpath))
|
||||
family = self.objectspace.family()
|
||||
family.name = '{}{}'.format(name, index)
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.doc = ''
|
||||
family.variable = variables
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
self.paths.append('family', subpath, 'containers', creoleobj=family)
|
||||
families.append(family)
|
||||
index += 1
|
||||
return families
|
||||
|
||||
|
||||
class ActionAnnotator(ContainerAnnotator):
|
||||
|
||||
def __init__(self, space, paths, objectspace):
|
||||
self.space = space
|
||||
self.paths = paths
|
||||
def __init__(self, objectspace):
|
||||
self.space = objectspace.space
|
||||
self.paths = objectspace.paths
|
||||
self.objectspace = objectspace
|
||||
self.extra_conditions = []
|
||||
if hasattr(space, 'family_action'):
|
||||
self.grouplist_conditions = {}
|
||||
self.convert_family_action()
|
||||
|
||||
def convert_family_action(self):
|
||||
if hasattr(self.space, 'family_action'):
|
||||
actions = self.objectspace.family()
|
||||
actions.name = 'actions'
|
||||
if HIGH_COMPATIBILITY:
|
||||
actions.mode = None
|
||||
actions.family = []
|
||||
self.space.actions = actions
|
||||
namespaces = []
|
||||
for name, actions in space.family_action.items():
|
||||
for name, actions in self.space.family_action.items():
|
||||
subpath = 'actions.{}'.format(normalize_family(name))
|
||||
for action in actions.action:
|
||||
namespace = action.namespace
|
||||
|
@ -854,7 +442,6 @@ class ActionAnnotator(ContainerAnnotator):
|
|||
family = self.objectspace.family()
|
||||
family.name = actions.name
|
||||
family.family = new_actions
|
||||
if HIGH_COMPATIBILITY:
|
||||
family.mode = None
|
||||
variables = []
|
||||
for key, value in vars(actions).items():
|
||||
|
@ -863,59 +450,46 @@ class ActionAnnotator(ContainerAnnotator):
|
|||
subpath))
|
||||
family.variable = variables
|
||||
self.space.actions.family.append(family)
|
||||
del space.family_action
|
||||
del self.space.family_action
|
||||
|
||||
|
||||
class SpaceAnnotator(object):
|
||||
"""Transformations applied on a CreoleObjSpace instance
|
||||
"""
|
||||
def __init__(self, space, paths, objectspace, eosfunc_file):
|
||||
self.paths = paths
|
||||
self.space = space
|
||||
def __init__(self, objectspace, eosfunc_file):
|
||||
self.paths = objectspace.paths
|
||||
self.space = objectspace.space
|
||||
self.objectspace = objectspace
|
||||
self.valid_enums = {}
|
||||
self.force_value = {}
|
||||
self.has_calc = []
|
||||
self.force_no_value = []
|
||||
self.force_not_mandatory = []
|
||||
if eosfunc_file is not None:
|
||||
self.eosfunc = imp.load_source('eosfunc', eosfunc_file)
|
||||
else:
|
||||
self.eosfunc = None
|
||||
if HIGH_COMPATIBILITY:
|
||||
self.default_has_no_value = []
|
||||
self.has_frozen_if_in_condition = []
|
||||
try:
|
||||
self.default_variable_options(space.variables)
|
||||
except AttributeError:
|
||||
raise CreoleDictConsistencyError(_('No configuration variables available in the configuration set'))
|
||||
self.default_variable_options()
|
||||
self.convert_auto_freeze()
|
||||
self.convert_groups()
|
||||
self.filter_check()
|
||||
self.filter_condition()
|
||||
self.convert_valid_enums()
|
||||
self.convert_autofill()
|
||||
self.remove_empty_families()
|
||||
self.change_variable_mode()
|
||||
self.change_family_mode()
|
||||
self.filter_separators()
|
||||
self.absolute_path_for_symlink_in_containers()
|
||||
self.convert_helps()
|
||||
|
||||
for family in space.variables.values():
|
||||
if hasattr(family, 'family'):
|
||||
self.change_variable_auto_freeze(family.family, family.name)
|
||||
if 'group' in vars(space.constraints):
|
||||
self.transform_groups(space.constraints.group, space)
|
||||
if hasattr(space.constraints, 'check'):
|
||||
self.filter_check(space.constraints.check)
|
||||
if 'condition' in vars(space.constraints):
|
||||
self.filter_condition(space.constraints.condition)
|
||||
self._parse_object_space(space, None)
|
||||
# valid_enums must be empty now (all information are store in objects)
|
||||
if self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('valid_enum sets for unknown variables {}').format(self.valid_enums.keys()))
|
||||
self.filter_autofill(space)
|
||||
for family in space.variables.values():
|
||||
if not HIGH_COMPATIBILITY:
|
||||
self.remove_empty_family(family.family)
|
||||
if hasattr(family, 'family'):
|
||||
self.change_variable_mode(family.family)
|
||||
if not HIGH_COMPATIBILITY:
|
||||
self.change_family_mode(family.family)
|
||||
if (hasattr(family, 'separators') and
|
||||
hasattr(family.separators, 'separator')):
|
||||
self.filter_separator(family.separators.separator)
|
||||
self.absolute_path_for_symlink_in_containers(space.containers.family.values())
|
||||
if 'help' in vars(space):
|
||||
self.transform_helps(space.help)
|
||||
|
||||
def absolute_path_for_symlink_in_containers(self, families):
|
||||
def absolute_path_for_symlink_in_containers(self):
|
||||
if not hasattr(self.space, 'containers') or not hasattr(self.space.containers, 'family'):
|
||||
return
|
||||
families = self.space.containers.family.values()
|
||||
for family in families:
|
||||
if hasattr(family, 'family'):
|
||||
for fam in family.family:
|
||||
|
@ -923,109 +497,78 @@ class SpaceAnnotator(object):
|
|||
if variable.type == 'symlink' and '.' not in variable.name:
|
||||
variable.opt = self.paths.get_variable_path(variable.opt, 'creole')
|
||||
|
||||
def transform_helps(self, helps):
|
||||
def convert_helps(self):
|
||||
# FIXME l'aide doit etre dans la variable!
|
||||
if not hasattr(self.space, 'help'):
|
||||
return
|
||||
helps = self.space.help
|
||||
if hasattr(helps, 'variable'):
|
||||
for hlp in helps.variable.values():
|
||||
hlp.name = hlp.path
|
||||
variable = self.paths.get_variable_obj(hlp.name)
|
||||
variable.help = hlp.text
|
||||
if hasattr(helps, 'family'):
|
||||
for hlp in helps.family.values():
|
||||
hlp.name = hlp.path
|
||||
variable = self.paths.get_family_obj(hlp.name)
|
||||
variable.help = hlp.text
|
||||
del self.space.help
|
||||
|
||||
def transform_groups(self, groups, space): # pylint: disable=C0111
|
||||
for group in groups:
|
||||
master_fullname = group.master
|
||||
slave_names = list(group.slave.keys())
|
||||
try:
|
||||
master_family_name = self.paths.get_variable_family_name(master_fullname)
|
||||
except CreoleDictConsistencyError as err:
|
||||
if HIGH_COMPATIBILITY:
|
||||
continue
|
||||
raise err
|
||||
namespace = self.paths.get_variable_namespace(master_fullname)
|
||||
master_name = self.paths.get_variable_name(master_fullname)
|
||||
master_family = space.variables[namespace].family[master_family_name]
|
||||
master_path = namespace + '.' + master_family_name
|
||||
is_master = False
|
||||
for variable_name, variable in list(master_family.variable.items()):
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
if variable.name == master_name:
|
||||
master_space = variable
|
||||
is_master = True
|
||||
def convert_groups(self): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'constraints'):
|
||||
if hasattr(self.space.constraints, 'group'):
|
||||
for group in self.space.constraints.group:
|
||||
leader_fullname = group.master
|
||||
follower_names = list(group.slave.keys())
|
||||
leader_family_name = self.paths.get_variable_family_name(leader_fullname)
|
||||
namespace = self.paths.get_variable_namespace(leader_fullname)
|
||||
leader_name = self.paths.get_variable_name(leader_fullname)
|
||||
leader_family = self.space.variables[namespace].family[leader_family_name]
|
||||
leader_path = namespace + '.' + leader_family_name
|
||||
is_leader = False
|
||||
for variable in list(leader_family.variable.values()):
|
||||
if isinstance(variable, self.objectspace.Leadership):
|
||||
# append follower to an existed leadership
|
||||
if variable.name == leader_name:
|
||||
leader_space = variable
|
||||
is_leader = True
|
||||
else:
|
||||
if is_master:
|
||||
# slaves are multi
|
||||
if variable_name in slave_names:
|
||||
variable.multi = True
|
||||
slave_names.remove(variable_name)
|
||||
master_family.variable.pop(variable_name)
|
||||
master_space.variable.append(variable) # pylint: disable=E1101
|
||||
if is_leader:
|
||||
if variable.name == follower_names[0]:
|
||||
# followers are multi
|
||||
if not variable.multi is True:
|
||||
raise CreoleDictConsistencyError(_('the variable {} in a group must be multi').format(variable.name))
|
||||
follower_names.remove(variable.name)
|
||||
leader_family.variable.pop(variable.name)
|
||||
leader_space.variable.append(variable) # pylint: disable=E1101
|
||||
if namespace == 'creole':
|
||||
variable_fullpath = variable_name
|
||||
variable_fullpath = variable.name
|
||||
else:
|
||||
variable_fullpath = master_path + '.' + variable_name
|
||||
self.paths.set_master(variable_fullpath, master_name)
|
||||
if slave_names == []:
|
||||
variable_fullpath = leader_path + '.' + variable.name
|
||||
self.paths.set_leader(variable_fullpath, leader_name)
|
||||
if follower_names == []:
|
||||
break
|
||||
if is_master is False and variable_name == master_name:
|
||||
master_space = self.objectspace.Master()
|
||||
master_space.variable = []
|
||||
master_space.name = master_name
|
||||
# manage master's variable
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('cannot found this follower {}').format(follower_names[0]))
|
||||
if is_leader is False and variable.name == leader_name:
|
||||
leader_space = self.objectspace.Leadership()
|
||||
leader_space.variable = []
|
||||
leader_space.name = leader_name
|
||||
# manage leader's variable
|
||||
if variable.multi is not True:
|
||||
raise CreoleDictConsistencyError(_('the variable {} in a group must be multi').format(variable.name))
|
||||
master_family.variable[master_name] = master_space
|
||||
master_space.variable.append(variable) # pylint: disable=E1101
|
||||
self.paths.set_master(master_fullname, master_name)
|
||||
master_space.path = master_fullname
|
||||
is_master = True
|
||||
else: # pragma: no cover
|
||||
raise CreoleDictConsistencyError(_('cannot found a master {} '
|
||||
'nor a slave {}').format(master_name,
|
||||
slave_names))
|
||||
del space.constraints.group
|
||||
|
||||
def _parse_object_space(self, space, namespace, node_name='creole', parent_path=None, ishelp=False):
|
||||
space_is_help = ishelp
|
||||
vars_space = dict(vars(space))
|
||||
for name, subspace in vars_space.items():
|
||||
if namespace is None and name in ['containers', 'actions']:
|
||||
continue
|
||||
if space_is_help is False:
|
||||
ishelp = name == 'help'
|
||||
self._parse_subobject_space(name, node_name, space, subspace, parent_path, namespace, ishelp)
|
||||
|
||||
def _parse_subobject_space(self, name, node_name, parent, space, parent_path, namespace, ishelp): # pylint: disable=R0913
|
||||
keys = None
|
||||
if isinstance(space, dict):
|
||||
if namespace is None:
|
||||
keys = list(space.keys())
|
||||
space = list(space.values())
|
||||
|
||||
if isinstance(space, list):
|
||||
for idx, subspace in enumerate(space):
|
||||
if keys is not None and namespace is None:
|
||||
if subspace.__class__.__name__ == 'Variable':
|
||||
current_namespace = self.paths.get_variable_namespace(keys[idx])
|
||||
elif subspace.__class__.__name__ == 'Variables':
|
||||
current_namespace = keys[idx]
|
||||
leader_family.variable[leader_name] = leader_space
|
||||
leader_space.variable.append(variable) # pylint: disable=E1101
|
||||
self.paths.set_leader(leader_fullname, leader_name)
|
||||
leader_space.path = leader_fullname
|
||||
is_leader = True
|
||||
else:
|
||||
current_namespace = self.paths.get_family_namespace(normalize_family(keys[idx],
|
||||
check_name=False))
|
||||
else:
|
||||
current_namespace = namespace
|
||||
if hasattr(parent, 'path'):
|
||||
parent_path = parent.path
|
||||
else:
|
||||
parent_path = None
|
||||
self._parse_object_space(subspace, current_namespace, name, parent_path, ishelp)
|
||||
elif isinstance(space, self.objectspace.Atom):
|
||||
for subname, subspace in vars(space).items():
|
||||
self._parse_subobject_space(subname, name, space, subspace, None, namespace, ishelp)
|
||||
else:
|
||||
self.absolute_paths_annotation(name, node_name, parent, space, parent_path, namespace, ishelp)
|
||||
self.uppercase_family_name(name, node_name, parent, space)
|
||||
raise CreoleDictConsistencyError(_('cannot found followers {}').format(follower_names))
|
||||
del self.space.constraints.group
|
||||
|
||||
def remove_empty_family(self, space): # pylint: disable=C0111,R0201
|
||||
def remove_empty_families(self): # pylint: disable=C0111,R0201
|
||||
if hasattr(self.space, 'variables'):
|
||||
for family in self.space.variables.values():
|
||||
if hasattr(family, 'family'):
|
||||
space = family.family
|
||||
removed_families = []
|
||||
for family_name, family in space.items():
|
||||
if not hasattr(family, 'variable') or len(family.variable) == 0:
|
||||
|
@ -1037,47 +580,40 @@ class SpaceAnnotator(object):
|
|||
if family in removed_families:
|
||||
del self.space.help.family[family]
|
||||
|
||||
def uppercase_family_name(self, name, node_name, parent, value): # pylint: disable=C0111,R0201
|
||||
if name == 'name' and node_name == 'family':
|
||||
# let's preserve uppercase letters
|
||||
# just in case where some acronyms are present,
|
||||
# example : 'ARV'
|
||||
if not value[0].isupper():
|
||||
if HIGH_COMPATIBILITY:
|
||||
parent.name = value
|
||||
else:
|
||||
parent.name = value.capitalize()
|
||||
|
||||
def change_family_mode(self, families): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
def change_family_mode(self): # pylint: disable=C0111
|
||||
if not hasattr(self.space, 'variables'):
|
||||
return
|
||||
for family in self.space.variables.values():
|
||||
if hasattr(family, 'family'):
|
||||
for family in family.family.values():
|
||||
mode = modes_level[-1]
|
||||
for variable in family.variable.values():
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
if isinstance(variable, self.objectspace.Leadership):
|
||||
variable_mode = variable.variable[0].mode
|
||||
variable.mode = variable_mode
|
||||
else:
|
||||
variable_mode = variable.mode
|
||||
if variable_mode is not None and modes[mode] > modes[variable_mode]:
|
||||
mode = variable_mode
|
||||
if HIGH_COMPATIBILITY and family.name == 'Containers':
|
||||
if family.name == 'Containers':
|
||||
family.mode = 'normal'
|
||||
else:
|
||||
family.mode = mode
|
||||
|
||||
def _annotate_variable(self, variable, family_mode, is_slave=False):
|
||||
if (HIGH_COMPATIBILITY and variable.type == 'choice' and variable.mode != modes_level[-1] and variable.mandatory is True and variable.path in self.default_has_no_value):
|
||||
def _annotate_variable(self, variable, family_mode, path, is_follower=False):
|
||||
if (HIGH_COMPATIBILITY and variable.type == 'choice' and variable.mode != modes_level[-1] and variable.mandatory is True and path in self.default_has_no_value):
|
||||
variable.mode = modes_level[0]
|
||||
if variable.type == 'choice' and is_slave and family_mode == modes_level[0] and variable.mandatory is True:
|
||||
if variable.type == 'choice' and is_follower and family_mode == modes_level[0] and variable.mandatory is True:
|
||||
variable.mode = modes_level[0]
|
||||
# if the variable is mandatory and doesn't have any value
|
||||
# then the variable's mode is set to 'basic'
|
||||
has_value = hasattr(variable, 'value')
|
||||
if (variable.path not in self.has_calc and variable.mandatory is True and
|
||||
(not has_value or is_slave) and variable.type != 'choice'):
|
||||
if (path not in self.has_calc and variable.mandatory is True and
|
||||
(not has_value or is_follower) and variable.type != 'choice'):
|
||||
variable.mode = modes_level[0]
|
||||
if has_value:
|
||||
if not HIGH_COMPATIBILITY or (not variable.path.startswith('creole.containers.') \
|
||||
and variable.path not in self.force_no_value and variable.path not in self.force_not_mandatory):
|
||||
if not HIGH_COMPATIBILITY or (not path.startswith('creole.containers.') \
|
||||
and path not in self.force_no_value and path not in self.force_not_mandatory):
|
||||
variable.mandatory = True
|
||||
if variable.hidden is True:
|
||||
variable.frozen = True
|
||||
|
@ -1085,15 +621,16 @@ class SpaceAnnotator(object):
|
|||
variable.force_default_on_freeze = True
|
||||
if variable.name == 'frozen' and not variable.auto_save is True:
|
||||
variable.force_default_on_freeze = True
|
||||
if variable.mode != None and not is_slave and modes[variable.mode] < modes[family_mode]:
|
||||
if variable.mode != None and not is_follower and modes[variable.mode] < modes[family_mode]:
|
||||
variable.mode = family_mode
|
||||
if variable.mode != None and variable.mode != modes_level[0] and modes[variable.mode] < modes[family_mode]:
|
||||
variable.mode = family_mode
|
||||
if variable.name == "available_probes":
|
||||
variable.force_default_on_freeze = False
|
||||
|
||||
def default_variable_options(self, variables):
|
||||
for families in variables.values():
|
||||
def default_variable_options(self):
|
||||
if hasattr(self.space, 'variables'):
|
||||
for families in self.space.variables.values():
|
||||
if hasattr(families, 'family'):
|
||||
for family in families.family.values():
|
||||
if hasattr(family, 'variable'):
|
||||
|
@ -1103,62 +640,134 @@ class SpaceAnnotator(object):
|
|||
if variable.type != 'symlink' and not hasattr(variable, 'description'):
|
||||
variable.description = variable.name
|
||||
|
||||
def change_variable_auto_freeze(self, families, namespace): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
def convert_auto_freeze(self): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'variables'):
|
||||
for variables in self.space.variables.values():
|
||||
if hasattr(variables, 'family'):
|
||||
for family in variables.family.values():
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
if variable.auto_freeze:
|
||||
new_condition = self.objectspace.condition()
|
||||
new_condition.name = 'auto_frozen_if_in'
|
||||
new_condition.namespace = namespace
|
||||
new_condition.source = 'module_instancie'
|
||||
new_condition.namespace = variables.name
|
||||
new_condition.source = FREEZE_AUTOFREEZE_VARIABLE
|
||||
new_param = self.objectspace.param()
|
||||
new_param.text = 'oui'
|
||||
new_condition.param = [new_param]
|
||||
new_target = self.objectspace.target()
|
||||
new_target.type = 'variable'
|
||||
if namespace == 'creole':
|
||||
if variables.name == 'creole':
|
||||
path = variable.name
|
||||
else:
|
||||
path = namespace + '.' + family.name + '.' + variable.name
|
||||
path = variable.namespace + '.' + family.name + '.' + variable.name
|
||||
new_target.name = path
|
||||
new_condition.target = [new_target]
|
||||
if not hasattr(self.space.constraints, 'condition'):
|
||||
self.space.constraints.condition = []
|
||||
self.space.constraints.condition.append(new_condition)
|
||||
|
||||
def change_variable_mode(self, families): # pylint: disable=C0111
|
||||
for family in families.values():
|
||||
def _set_valid_enum(self, variable, values, type_):
|
||||
if isinstance(values, list):
|
||||
variable.mandatory = True
|
||||
variable.choice = []
|
||||
choices = []
|
||||
for value in values:
|
||||
choice = self.objectspace.choice()
|
||||
choice.name = str(value)
|
||||
choices.append(choice.name)
|
||||
choice.type = type_
|
||||
variable.choice.append(choice)
|
||||
if not variable.choice:
|
||||
raise CreoleDictConsistencyError(_('empty valid enum is not allowed for variable {}').format(variable.name))
|
||||
if hasattr(variable, 'value'):
|
||||
for value in variable.value:
|
||||
value.type = type_
|
||||
if value.name not in choices:
|
||||
raise CreoleDictConsistencyError(_('value "{}" of variable "{}" is not in list of all expected values ({})').format(value.name, variable.name, choices))
|
||||
else:
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = values[0]
|
||||
new_value.type = type_
|
||||
variable.value = [new_value]
|
||||
else:
|
||||
# probe choice
|
||||
variable.choice = values
|
||||
variable.type = 'choice'
|
||||
|
||||
def _convert_valid_enum(self, variable, path):
|
||||
if variable.type in FORCE_CHOICE:
|
||||
if path in self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('cannot set valid enum for variable with type {}').format(variable.type))
|
||||
self._set_valid_enum(variable, FORCE_CHOICE[variable.type], 'string')
|
||||
if path in self.valid_enums:
|
||||
values = self.valid_enums[path]['values']
|
||||
self._set_valid_enum(variable, values, variable.type)
|
||||
del self.valid_enums[path]
|
||||
if path in self.force_value:
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = self.force_value[path]
|
||||
variable.value = [new_value]
|
||||
del self.force_value[path]
|
||||
|
||||
def convert_valid_enums(self): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'variables'):
|
||||
for variables in self.space.variables.values():
|
||||
namespace = variables.name
|
||||
if hasattr(variables, 'family'):
|
||||
for family in variables.family.values():
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
if isinstance(variable, self.objectspace.Leadership):
|
||||
for follower in variable.variable:
|
||||
path = '{}.{}.{}.{}'.format(namespace, family.name, variable.name, follower.name)
|
||||
self._convert_valid_enum(follower, path)
|
||||
else:
|
||||
path = '{}.{}.{}'.format(namespace, family.name, variable.name)
|
||||
self._convert_valid_enum(variable, path)
|
||||
# valid_enums must be empty now (all information are store in objects)
|
||||
if self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('valid_enum sets for unknown variables {}').format(self.valid_enums.keys()))
|
||||
|
||||
def change_variable_mode(self): # pylint: disable=C0111
|
||||
if not hasattr(self.space, 'variables'):
|
||||
return
|
||||
for variables in self.space.variables.values():
|
||||
if hasattr(variables, 'family'):
|
||||
for family in variables.family.values():
|
||||
family_mode = family.mode
|
||||
if hasattr(family, 'variable'):
|
||||
for variable in family.variable.values():
|
||||
|
||||
if isinstance(variable, self.objectspace.Master):
|
||||
if isinstance(variable, self.objectspace.Leadership):
|
||||
mode = modes_level[-1]
|
||||
for slave in variable.variable:
|
||||
if slave.auto_save is True:
|
||||
raise CreoleDictConsistencyError(_('master/slaves {} '
|
||||
for follower in variable.variable:
|
||||
if follower.auto_save is True:
|
||||
raise CreoleDictConsistencyError(_('leader/followers {} '
|
||||
'could not be '
|
||||
'auto_save').format(slave.name))
|
||||
if slave.auto_freeze is True:
|
||||
raise CreoleDictConsistencyError(_('master/slaves {} '
|
||||
'auto_save').format(follower.name))
|
||||
if follower.auto_freeze is True:
|
||||
raise CreoleDictConsistencyError(_('leader/followers {} '
|
||||
'could not be '
|
||||
'auto_freeze').format(slave.name))
|
||||
if HIGH_COMPATIBILITY and variable.name != slave.name: # and variable.variable[0].mode != modes_level[0]:
|
||||
is_slave = True
|
||||
'auto_freeze').format(follower.name))
|
||||
if HIGH_COMPATIBILITY and variable.name != follower.name: # and variable.variable[0].mode != modes_level[0]:
|
||||
is_follower = True
|
||||
else:
|
||||
is_slave = False
|
||||
self._annotate_variable(slave, family_mode, is_slave)
|
||||
is_follower = False
|
||||
path = '{}.{}.{}.{}'.format(variables.name, family.name, variable.name, follower.name)
|
||||
self._annotate_variable(follower, family_mode, path, is_follower)
|
||||
if HIGH_COMPATIBILITY:
|
||||
# master's variable are right
|
||||
if modes[variable.variable[0].mode] > modes[slave.mode]:
|
||||
slave.mode = variable.variable[0].mode
|
||||
# leader's variable are right
|
||||
if modes[variable.variable[0].mode] > modes[follower.mode]:
|
||||
follower.mode = variable.variable[0].mode
|
||||
else:
|
||||
# auto_save's variable is set in 'basic' mode if its mode is 'normal'
|
||||
if slave.auto_save is True and slave.mode != modes_level[-1]:
|
||||
slave.mode = modes_level[0]
|
||||
if modes[mode] > modes[slave.mode]:
|
||||
mode = slave.mode
|
||||
if follower.auto_save is True and follower.mode != modes_level[-1]:
|
||||
follower.mode = modes_level[0]
|
||||
if modes[mode] > modes[follower.mode]:
|
||||
mode = follower.mode
|
||||
if not HIGH_COMPATIBILITY:
|
||||
# the master's mode is the lowest
|
||||
# the leader's mode is the lowest
|
||||
variable.variable[0].mode = mode
|
||||
variable.mode = variable.variable[0].mode
|
||||
else:
|
||||
|
@ -1168,105 +777,21 @@ class SpaceAnnotator(object):
|
|||
# auto_freeze's variable is set in 'basic' mode if its mode is 'normal'
|
||||
if variable.auto_freeze is True and variable.mode != modes_level[-1]:
|
||||
variable.mode = modes_level[0]
|
||||
self._annotate_variable(variable, family_mode)
|
||||
|
||||
def absolute_paths_annotation(self, name, node_name, parent, value, parent_path, namespace, ishelp): # pylint: disable=C0111,R0913
|
||||
if hasattr(parent, 'path'):
|
||||
return
|
||||
if name == 'name' and node_name in ['variable', 'family']:
|
||||
if node_name == 'family':
|
||||
family_name = normalize_family(value, check_name=False)
|
||||
subpath = self.paths.get_family_path(family_name, namespace)
|
||||
namespace = self.paths.get_family_namespace(family_name)
|
||||
else:
|
||||
if self.paths.path_is_defined(value):
|
||||
value_name = value
|
||||
else:
|
||||
value_name = parent_path + '.' + value
|
||||
if namespace is None:
|
||||
namespace = self.paths.get_variable_namespace(value)
|
||||
subpath = self.paths.get_variable_path(value_name, namespace)
|
||||
if not ishelp and hasattr(parent, 'type') and parent.type in FORCE_CHOICE:
|
||||
if subpath in self.valid_enums:
|
||||
raise CreoleDictConsistencyError(_('cannot set valid enum for variable with type {}').format(parent.type))
|
||||
parent.choice = []
|
||||
for value in FORCE_CHOICE[parent.type]:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value, 'utf8')
|
||||
else:
|
||||
choice.name = str(value)
|
||||
parent.choice.append(choice)
|
||||
parent.type = 'choice'
|
||||
if not HIGH_COMPATIBILITY:
|
||||
parent.mandatory = True
|
||||
if parent.choice == []:
|
||||
raise CreoleDictConsistencyError(_('empty valid enum is not allowed for variable {}').format(value_name))
|
||||
if hasattr(parent, 'type') and parent.type != 'choice':
|
||||
orig_type = parent.type
|
||||
else:
|
||||
orig_type = None
|
||||
if not ishelp and subpath in self.valid_enums:
|
||||
values = self.valid_enums[subpath]['values']
|
||||
if isinstance(values, list):
|
||||
parent.choice = []
|
||||
choices = []
|
||||
for value in values:
|
||||
choice = self.objectspace.choice()
|
||||
if sys.version_info[0] < 3:
|
||||
choice.name = unicode(value)
|
||||
else:
|
||||
choice.name = str(value)
|
||||
choices.append(choice.name)
|
||||
choice.type = parent.type
|
||||
parent.choice.append(choice)
|
||||
if hasattr(parent, 'value'):
|
||||
for value in parent.value:
|
||||
value.type = parent.type
|
||||
if value.name not in choices:
|
||||
raise CreoleDictConsistencyError(_('value "{}" of variable "{}" is not in list of all expected values ({})').format(value.name, parent.name, choices))
|
||||
if parent.choice == []:
|
||||
raise CreoleDictConsistencyError(_('empty valid enum is not allowed for variable {}').format(value_name))
|
||||
else:
|
||||
# probe choice
|
||||
parent.choice = values
|
||||
parent.type = 'choice'
|
||||
del(self.valid_enums[subpath])
|
||||
if not ishelp and subpath in self.force_value:
|
||||
if not hasattr(parent, 'value'):
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = self.force_value[subpath]
|
||||
parent.value = [new_value]
|
||||
self.force_no_value.append(subpath)
|
||||
if not ishelp and hasattr(parent, 'type') and parent.type == 'choice':
|
||||
# if choice with no value, set value with the first choice
|
||||
if not hasattr(parent, 'value'):
|
||||
no_value = False
|
||||
if HIGH_COMPATIBILITY and parent.multi:
|
||||
no_value = True
|
||||
if not no_value:
|
||||
new_value = self.objectspace.value()
|
||||
new_value.name = parent.choice[0].name
|
||||
new_value.type = orig_type
|
||||
if HIGH_COMPATIBILITY:
|
||||
self.default_has_no_value.append(subpath)
|
||||
parent.value = [new_value]
|
||||
self.force_no_value.append(subpath)
|
||||
parent.path = subpath
|
||||
if name == 'name' and node_name == 'separator':
|
||||
pass
|
||||
path = '{}.{}.{}'.format(variables.name, family.name, variable.name)
|
||||
self._annotate_variable(variable, family_mode, path)
|
||||
|
||||
def get_variable(self, name): # pylint: disable=C0111
|
||||
return self.paths.get_variable_obj(name)
|
||||
|
||||
def filter_autofill(self, space): # pylint: disable=C0111
|
||||
self.filter_duplicate_autofill(space.constraints)
|
||||
if 'auto' in vars(space.constraints):
|
||||
self.filter_auto(space.constraints.auto, space)
|
||||
if 'fill' in vars(space.constraints):
|
||||
self.filter_fill(space.constraints.fill, space)
|
||||
def convert_autofill(self): # pylint: disable=C0111
|
||||
if hasattr(self.space, 'constraints'):
|
||||
self.convert_duplicate_autofill(self.space.constraints)
|
||||
if 'auto' in vars(self.space.constraints):
|
||||
self.convert_auto(self.space.constraints.auto, self.space)
|
||||
if 'fill' in vars(self.space.constraints):
|
||||
self.convert_fill(self.space.constraints.fill, self.space)
|
||||
|
||||
def filter_duplicate_autofill(self, constraints):
|
||||
def convert_duplicate_autofill(self, constraints):
|
||||
""" Remove duplicate auto or fill for a variable
|
||||
This variable must be redefined
|
||||
"""
|
||||
|
@ -1311,7 +836,7 @@ class SpaceAnnotator(object):
|
|||
for idx in remove_fills:
|
||||
constraints.fill.pop(idx)
|
||||
|
||||
def filter_auto(self, auto_space, space): # pylint: disable=C0111
|
||||
def convert_auto(self, auto_space, space): # pylint: disable=C0111
|
||||
for auto in auto_space:
|
||||
if HIGH_COMPATIBILITY and auto.target in self.has_frozen_if_in_condition:
|
||||
# if a variable has a 'frozen_if_in' condition
|
||||
|
@ -1333,26 +858,21 @@ class SpaceAnnotator(object):
|
|||
space.constraints.fill.extend(auto_space)
|
||||
del space.constraints.auto
|
||||
|
||||
def filter_separator(self, space): # pylint: disable=C0111,R0201
|
||||
def filter_separators(self): # pylint: disable=C0111,R0201
|
||||
# FIXME devrait etre dans la variable
|
||||
if not hasattr(self.space, 'variables'):
|
||||
return
|
||||
for family in self.space.variables.values():
|
||||
if (hasattr(family, 'separators') and hasattr(family.separators, 'separator')):
|
||||
space = family.separators.separator
|
||||
names = []
|
||||
remove_separators = []
|
||||
for idx, separator in enumerate(space):
|
||||
try:
|
||||
namespace = self.paths.get_variable_namespace(separator.name)
|
||||
subpath = self.paths.get_variable_path(separator.name, namespace)
|
||||
separator.name = subpath
|
||||
except CreoleDictConsistencyError as err:
|
||||
if HIGH_COMPATIBILITY:
|
||||
remove_separators.append(idx)
|
||||
continue
|
||||
else:
|
||||
raise err
|
||||
if separator.name in names:
|
||||
raise CreoleDictConsistencyError(_('{} already has a separator').format(separator.name))
|
||||
names.append(separator.name)
|
||||
remove_separators.sort(reverse=True)
|
||||
for idx in remove_separators:
|
||||
del space[idx]
|
||||
|
||||
|
||||
def load_params_in_validenum(self, param, probe):
|
||||
|
@ -1366,7 +886,7 @@ class SpaceAnnotator(object):
|
|||
raise CreoleDictConsistencyError(_('Cannot load {}').format(param.text))
|
||||
elif param.type == 'python':
|
||||
try:
|
||||
values = eval(param.text, {'eosfunc': secure_eosfunc(self.eosfunc), '__builtins__': {'range': range, 'str': str}})
|
||||
values = eval(param.text, {'eosfunc': self.eosfunc, '__builtins__': {'range': range, 'str': str}})
|
||||
#FIXME : eval('[str(i) for i in range(3, 13)]', {'eosfunc': eosfunc, '__builtins__': {'range': range, 'str': str}})
|
||||
except NameError:
|
||||
raise CreoleDictConsistencyError(_('The function {} is unknown').format(param.text))
|
||||
|
@ -1382,8 +902,11 @@ class SpaceAnnotator(object):
|
|||
values = param.text
|
||||
return values
|
||||
|
||||
def filter_check(self, space): # pylint: disable=C0111
|
||||
def filter_check(self): # pylint: disable=C0111
|
||||
# valid param in check
|
||||
if not hasattr(self.space, 'constraints') or not hasattr(self.space.constraints, 'check'):
|
||||
return
|
||||
space = self.space.constraints.check
|
||||
remove_indexes = []
|
||||
for check_idx, check in enumerate(space):
|
||||
namespace = check.namespace
|
||||
|
@ -1418,7 +941,7 @@ class SpaceAnnotator(object):
|
|||
if HIGH_COMPATIBILITY:
|
||||
if not self.paths.path_is_defined(check.target):
|
||||
continue
|
||||
check.is_in_master = self.paths.get_master(check.target) != None
|
||||
check.is_in_leadership = self.paths.get_leader(check.target) != None
|
||||
# let's replace the target by the path
|
||||
check.target = self.paths.get_variable_path(check.target, namespace)
|
||||
if check.target not in variables:
|
||||
|
@ -1446,9 +969,9 @@ class SpaceAnnotator(object):
|
|||
for idx, check in enumerate(space):
|
||||
if not check.name in dir(self.eosfunc):
|
||||
raise CreoleDictConsistencyError(_('cannot find check function {}').format(check.name))
|
||||
is_probe = not check.name in self.eosfunc.func_on_zephir_context
|
||||
if is_probe:
|
||||
raise CreoleDictConsistencyError(_('cannot have a check with probe function ({})').format(check.name))
|
||||
#is_probe = not check.name in self.eosfunc.func_on_zephir_context
|
||||
#if is_probe:
|
||||
# raise CreoleDictConsistencyError(_('cannot have a check with probe function ({})').format(check.name))
|
||||
if check.name == 'valid_enum':
|
||||
proposed_value_type = False
|
||||
remove_params = []
|
||||
|
@ -1474,7 +997,7 @@ class SpaceAnnotator(object):
|
|||
except NameError as err:
|
||||
raise CreoleDictConsistencyError(_('cannot load value for variable {}: {}').format(check.target, err))
|
||||
add_value = True
|
||||
if HIGH_COMPATIBILITY and check.is_in_master:
|
||||
if HIGH_COMPATIBILITY and check.is_in_leadership:
|
||||
add_value = False
|
||||
if add_value and values:
|
||||
self.force_value[check.target] = values[0]
|
||||
|
@ -1505,7 +1028,7 @@ class SpaceAnnotator(object):
|
|||
param.hidden = None
|
||||
check.transitive = transitive
|
||||
|
||||
def filter_fill(self, fill_space, space): # pylint: disable=C0111,R0912
|
||||
def convert_fill(self, fill_space, space): # pylint: disable=C0111,R0912
|
||||
fills = {}
|
||||
# sort fill/auto by index
|
||||
for idx, fill in enumerate(fill_space):
|
||||
|
@ -1524,7 +1047,7 @@ class SpaceAnnotator(object):
|
|||
fill.target = self.paths.get_variable_path(fill.target, namespace)
|
||||
if not fill.name in dir(self.eosfunc):
|
||||
raise CreoleDictConsistencyError(_('cannot find fill function {}').format(fill.name))
|
||||
is_probe = not fill.name in self.eosfunc.func_on_zephir_context
|
||||
#is_probe = not fill.name in self.eosfunc.func_on_zephir_context
|
||||
if hasattr(fill, 'param'):
|
||||
for param in fill.param:
|
||||
if param.type not in TYPE_PARAM_FILL:
|
||||
|
@ -1534,21 +1057,22 @@ class SpaceAnnotator(object):
|
|||
for fill_idx, param in enumerate(fill.param):
|
||||
if not hasattr(param, 'text') and \
|
||||
(param.type == 'eole' or param.type == 'number' or \
|
||||
param.type == 'container' or param.type == 'python'):
|
||||
#param.type == 'container' or param.type == 'python'):
|
||||
param.type == 'python'):
|
||||
raise CreoleDictConsistencyError(_("All '{}' variables shall be set in "
|
||||
"order to calculate {}").format(
|
||||
param.type,
|
||||
fill.target))
|
||||
if param.type == 'container':
|
||||
param.type = 'eole'
|
||||
param.text = 'container_ip_{}'.format(param.text)
|
||||
# if param.type == 'container':
|
||||
# param.type = 'eole'
|
||||
# param.text = 'container_ip_{}'.format(param.text)
|
||||
if param.type == 'eole':
|
||||
if is_probe:
|
||||
raise CreoleDictConsistencyError(_('Function {0} used to calculate {1} '
|
||||
'is executed on remote server, '
|
||||
'so cannot depends to an '
|
||||
'other variable'
|
||||
).format(fill.name, fill.target))
|
||||
#if is_probe:
|
||||
# raise CreoleDictConsistencyError(_('Function {0} used to calculate {1} '
|
||||
# 'is executed on remote server, '
|
||||
# 'so cannot depends to an '
|
||||
# 'other variable'
|
||||
# ).format(fill.name, fill.target))
|
||||
if HIGH_COMPATIBILITY and param.text.startswith('container_ip'):
|
||||
if param.optional is True:
|
||||
param_option_indexes.append(fill_idx)
|
||||
|
@ -1565,10 +1089,10 @@ class SpaceAnnotator(object):
|
|||
fill.param.pop(param_idx)
|
||||
self.has_calc.append(fill.target)
|
||||
|
||||
if is_probe:
|
||||
variable.force_default_on_freeze = False
|
||||
self.objectspace.probe_variables.append(fill)
|
||||
del_idx.append(fills[idx]['idx'])
|
||||
#if is_probe:
|
||||
# variable.force_default_on_freeze = False
|
||||
# self.objectspace.probe_variables.append(fill)
|
||||
# del_idx.append(fills[idx]['idx'])
|
||||
del_idx.sort(reverse=True)
|
||||
for idx in del_idx:
|
||||
space.constraints.fill.pop(idx)
|
||||
|
@ -1594,7 +1118,10 @@ class SpaceAnnotator(object):
|
|||
for idx in del_idx:
|
||||
space.target.pop(idx)
|
||||
|
||||
def filter_condition(self, space): # pylint: disable=C0111
|
||||
def filter_condition(self): # pylint: disable=C0111
|
||||
if not hasattr(self.space, 'constraints') or not hasattr(self.space.constraints, 'condition'):
|
||||
return
|
||||
space = self.space.constraints.condition
|
||||
remove_conditions = []
|
||||
fallback_variables = []
|
||||
fallback_lists = []
|
||||
|
|
637
creole/cert.py
637
creole/cert.py
|
@ -1,637 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2007
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill cf /root/LicenceEole.txt
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
# libsecure.py
|
||||
#
|
||||
# classes utilitaires pour lancement des services en https
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
points d'entrée de l'api
|
||||
|
||||
- gen_certif -> génère **un** certif
|
||||
- gen_certs -> génère tous les certifs
|
||||
|
||||
cf creole/doc/certifs.txt
|
||||
|
||||
"""
|
||||
# certains imports sont utilisés dans les fragments de code installés
|
||||
# dans /usr/share/eole/certs
|
||||
from os.path import join, splitext, basename, dirname, isdir, isfile, islink, exists, realpath
|
||||
from os import unlink, symlink, stat
|
||||
import os, glob, time
|
||||
from shutil import copy
|
||||
from subprocess import Popen, PIPE
|
||||
from OpenSSL import SSL
|
||||
import re
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# chemin du certificat eole par défaut
|
||||
from .config import cert_file, key_file, SSL_LAST_FILE
|
||||
from .client import CreoleClient
|
||||
from pyeole.process import system_out, system_code
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
global regexp_get_subject
|
||||
regexp_get_subject = None
|
||||
|
||||
def prep_dir() :
|
||||
"""
|
||||
Création de l'arborescence pour openssl
|
||||
"""
|
||||
#on génère le random
|
||||
load_default_conf_if_needed()
|
||||
rand_file = os.path.join(ssl_dir, ".rand")
|
||||
if not os.path.isfile(rand_file) :
|
||||
cmd_random = "/bin/dd if=/dev/urandom of=%s bs=1k count=16 >/dev/null 2>&1" % (rand_file)
|
||||
cmd = Popen(cmd_random, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u"! Error while generating entropy file !"))
|
||||
#on crée les fichiers pour gerer la pki
|
||||
file_serial = os.path.join(ssl_dir, "serial")
|
||||
if not os.path.isfile(file_serial) :
|
||||
f = file(file_serial, "w")
|
||||
f.write(str(start_index))
|
||||
f.close()
|
||||
file_index = os.path.join(ssl_dir, "index.txt")
|
||||
if not os.path.isfile(file_index) :
|
||||
f = file(file_index, "w")
|
||||
f.close()
|
||||
newcerts = os.path.join(ssl_dir, "newcerts")
|
||||
if not os.path.isdir(newcerts):
|
||||
os.makedirs(newcerts)
|
||||
if not os.path.isdir(key_dir):
|
||||
os.makedirs(key_dir)
|
||||
if not os.path.isdir(cert_dir):
|
||||
os.makedirs(cert_dir)
|
||||
if not os.path.isdir(req_dir):
|
||||
os.makedirs(req_dir)
|
||||
if not os.path.isdir(local_ca_dir):
|
||||
os.makedirs(local_ca_dir)
|
||||
##cmd = Popen("chmod 611 %s" % (key_dir), shell=True)
|
||||
dhfile = os.path.join(ssl_dir, "dh")
|
||||
if not os.path.isfile(dhfile):
|
||||
gen_dh = '/usr/bin/openssl dhparam -out "%s" 1024 >/dev/null 2>&1' % (dhfile)
|
||||
Popen(gen_dh, shell=True)
|
||||
|
||||
def sup_passwd(tmp_keyfile, keyfile) :
|
||||
"""
|
||||
Supression de la passphrase sur la clef privée
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
key_cmd = '/usr/bin/openssl rsa -in "%s" -passin pass:secret -out "%s" >/dev/null 2>&1' % (tmp_keyfile, keyfile)
|
||||
cmd = Popen(key_cmd, shell=True)
|
||||
res = cmd.wait()
|
||||
if res != 0:
|
||||
raise Exception(_(u'! Error while generating ssl key in {0} !').format(keyfile))
|
||||
|
||||
def finalise_cert (certfile, keyfile, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod=''):
|
||||
"""
|
||||
Finalisation du certif
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if key_user != '':
|
||||
try:
|
||||
res = Popen("chown %s %s" % (key_user, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_grp != '':
|
||||
try:
|
||||
res=Popen("/bin/chgrp %s %s" % (key_grp, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if key_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (key_chmod, keyfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(keyfile)
|
||||
return False
|
||||
if cert_user != '':
|
||||
try:
|
||||
res = Popen("/bin/chown %s %s" % (cert_user, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_grp != '':
|
||||
try:
|
||||
res = Popen("/bin/chgrp %s %s" % (cert_grp, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
if cert_chmod != '':
|
||||
try:
|
||||
res = Popen("/bin/chmod %s %s" % (cert_chmod, certfile), shell=True).wait()
|
||||
assert res == 0
|
||||
except:
|
||||
print _(u"\n! Rights on {0} can't be modified").format(certfile)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_simple_cert(cert_file):
|
||||
"""
|
||||
Teste si le fichier contient un simple certificat ou une chaîne.
|
||||
:param cert_file: chemin du fichier à tester
|
||||
:type cert_file: str
|
||||
"""
|
||||
with open(cert_file, 'r') as pem:
|
||||
cert_num = len(re.findall(r'-+BEGIN CERTIFICATE-+', pem.read()))
|
||||
return cert_num == 1
|
||||
|
||||
def get_certs_catalog(simple=True):
|
||||
"""
|
||||
Créer un dictionnaire des certificats présents
|
||||
pour accélérer la reconstitution de la chaîne
|
||||
de certificats intermédiaires.
|
||||
:param simple: filtre sur les certificats à référencer
|
||||
:type simple: booléen
|
||||
"""
|
||||
global certs_catalog
|
||||
certs_catalog = {}
|
||||
for cert_file in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if simple and is_simple_cert(cert_file):
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
elif not simple:
|
||||
certs_catalog[get_subject(certfile=cert_file)] = cert_file
|
||||
except:
|
||||
continue
|
||||
return certs_catalog
|
||||
|
||||
|
||||
def get_certs_chain(certs):
|
||||
"""
|
||||
Récupération de la chaîne de certificats
|
||||
:param certs: liste des certificats dans l'ordre de la chaîne.
|
||||
:type certs: liste de chemins
|
||||
"""
|
||||
global certs_catalog, ca_issuer
|
||||
load_default_conf_if_needed()
|
||||
subject = get_subject(certfile=certs[-1])
|
||||
issuer = get_issuer_subject(certfile=certs[-1])
|
||||
if ca_issuer is None:
|
||||
ca_issuer = get_issuer_subject(certfile=ca_file)
|
||||
if subject == issuer:
|
||||
pass
|
||||
elif issuer == ca_issuer:
|
||||
certs.append(ca_file)
|
||||
else:
|
||||
try:
|
||||
if certs_catalog is None:
|
||||
certs_catalog = get_certs_catalog()
|
||||
certs.append(certs_catalog[issuer])
|
||||
get_certs_chain(certs)
|
||||
except KeyError as e:
|
||||
print _(u"Certificate chain incomplete.")
|
||||
return certs
|
||||
|
||||
|
||||
def get_intermediate_certs(cert):
|
||||
"""
|
||||
Récupération de la liste des certificats intermédiaires.
|
||||
:param cert: chemin du certificat pour lequel on reconstitue la chaîne
|
||||
:type cert:
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
try:
|
||||
chain = get_certs_chain([cert,])[1:-1]
|
||||
except:
|
||||
chain = []
|
||||
return chain
|
||||
|
||||
|
||||
def concat_fic(dst_fic, in_fics, overwrite=False, need_link=True):
|
||||
"""
|
||||
Concaténation d'une liste de fichiers dans un fichier de destination
|
||||
(le contenu d'origine est conservé)
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if need_link:
|
||||
remove_link(dst_fic)
|
||||
if type(in_fics) != list:
|
||||
in_fics = [in_fics]
|
||||
for fic in in_fics:
|
||||
if not os.path.isfile(fic):
|
||||
print _(u"Error: file {0} does not exist").format(fic)
|
||||
data = ""
|
||||
for fic_src in in_fics:
|
||||
f_src = file(fic_src)
|
||||
data += f_src.read().rstrip() + '\n'
|
||||
f_src.close()
|
||||
if overwrite:
|
||||
f_dst = file(dst_fic, "w")
|
||||
else:
|
||||
f_dst = file(dst_fic, "a+")
|
||||
f_dst.write(data)
|
||||
f_dst.close()
|
||||
if need_link:
|
||||
build_link(dst_fic, in_fics)
|
||||
|
||||
def gen_certs(regen=False, merge=True):
|
||||
"""
|
||||
Génère la ca puis les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
verif_ca()
|
||||
ca_generated = gen_ca(regen)
|
||||
if merge:
|
||||
merge_ca()
|
||||
if ca_generated:
|
||||
regen = True
|
||||
certif_loader(regen=regen)
|
||||
|
||||
def verif_ca():
|
||||
"""
|
||||
vérifie que la ca est générée correctement (serial > 0xstart_index) et cn valide
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# gestion des anciennes version de ca.crt
|
||||
if os.path.isfile(ca_dest_file) and not os.path.isfile(ca_file):
|
||||
# on reprend le premier certificat présent dans ca.crt dans ca_local.crt
|
||||
ca_certs = open(ca_dest_file).read().strip()
|
||||
tag_begin = '-----BEGIN CERTIFICATE-----'
|
||||
try:
|
||||
ca_data = tag_begin + ca_certs.split(tag_begin)[1]
|
||||
local_ca = open(ca_file, 'w')
|
||||
local_ca.write(ca_data)
|
||||
local_ca.close()
|
||||
except IndexError:
|
||||
# impossible de reprendre la ca actuelle, elle sera regénérée
|
||||
pass
|
||||
serial = int(eval('0x%s'%start_index))
|
||||
# vérification de la valeur actuelle du ca
|
||||
# vérification du cn de la ca
|
||||
if os.path.isfile(ca_file):
|
||||
cmd = Popen(['/usr/bin/openssl', 'x509', '-in', ca_file, '-subject', '-noout'], stdout=PIPE)
|
||||
if cmd.wait() != 0:
|
||||
unlink(ca_file)
|
||||
prep_dir()
|
||||
if os.path.isfile(file_serial):
|
||||
serial = open(file_serial).read().strip()
|
||||
# conversion en hexa
|
||||
serial = int(serial, 16)
|
||||
if serial < min_serial:
|
||||
if os.path.isfile(ca_file):
|
||||
unlink(ca_file)
|
||||
unlink(file_serial)
|
||||
for f_index in glob.glob(os.path.join(ssl_dir, 'index*')):
|
||||
unlink(f_index)
|
||||
for f_cert in glob.glob(os.path.join(newcerts_dir, '*.pem')):
|
||||
unlink(f_cert)
|
||||
prep_dir()
|
||||
|
||||
def gen_ca(regen=False, del_passwd=True, extensions="SERVEUR"):
|
||||
"""
|
||||
Generation ca
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
generated = False
|
||||
prep_dir()
|
||||
if not os.path.isfile(ca_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(ca_conf_file))
|
||||
if regen or (not os.path.isfile(ca_keyfile)) or (not os.path.isfile(ca_file)):
|
||||
print("* " + _(u"Generating CA certificate"))
|
||||
remove_link(ca_file)
|
||||
## On genère le certif de l'ac
|
||||
ca_gen = '/usr/bin/openssl req -x509 -config %s -newkey rsa:%s -days %s -keyout "%s" -out "%s" -extensions %s >/dev/null 2>&1' % (ca_conf_file, ssl_default_key_bits, ssl_default_cert_time, tmp_keyfile, ca_file, extensions)
|
||||
cmd = Popen(ca_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CA"))
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, ca_keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
generated = True
|
||||
## application des droits
|
||||
finalise_cert(ca_file, ca_keyfile, key_chmod='600')
|
||||
build_link(ca_file)
|
||||
## génération d'une crl
|
||||
if not os.path.isfile(os.path.join(ssl_dir, 'eole.crl')):
|
||||
print(_(u"Generating certificate revocation list (CRL)"))
|
||||
crl_gen = '/usr/bin/openssl ca -gencrl -config %s -crldays %s -out %s/eole.crl >/dev/null 2>&1' % (ca_conf_file, ssl_default_cert_time, ssl_dir)
|
||||
cmd = Popen(crl_gen, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u"Error while generating CRL ({0}/eole.crl)").format(ssl_dir))
|
||||
return generated
|
||||
|
||||
def merge_ca():
|
||||
"""
|
||||
concatène toutes les ca utiles dans ca.crt
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
## concaténation des certificats education
|
||||
ca_list = [ca_file, os.path.join(cert_dir, 'ACInfraEducation.pem')]
|
||||
## concaténation de certificats supplémentaires si définis
|
||||
for ca_perso in glob.glob(os.path.join(local_ca_dir,'*.*')):
|
||||
if os.path.isfile(ca_perso):
|
||||
ca_list.append(ca_perso)
|
||||
concat_fic(ca_dest_file, ca_list, True, False)
|
||||
|
||||
def gen_certif(certfile, keyfile=None, key_user='', key_grp='', key_chmod='',
|
||||
cert_user='', cert_grp='', cert_chmod='', regen=False, copy_key=False,
|
||||
del_passwd=True, signe_req=True, container=None, client_cert=False,
|
||||
cert_conf_file=None):
|
||||
"""
|
||||
Génération des requêtes de certificats et signature par la CA
|
||||
"""
|
||||
if not cert_conf_file:
|
||||
if client_cert:
|
||||
cert_conf_file = client_conf_file
|
||||
else:
|
||||
cert_conf_file = conf_file
|
||||
load_default_conf_if_needed()
|
||||
if not os.path.isfile(cert_conf_file):
|
||||
raise Exception(_(u"Certificate configuration template can not be found:\n\t{0}\n").format(cert_conf_file))
|
||||
|
||||
basefile = os.path.splitext(certfile)[0]
|
||||
if keyfile is None:
|
||||
keyfile = "%s.key" % (basefile)
|
||||
|
||||
if container != None:
|
||||
cpath = client.get_container(name=container)['path']
|
||||
certfile = cpath + certfile
|
||||
keyfile = cpath + keyfile
|
||||
|
||||
if regen or not os.path.isfile(certfile) or not os.path.isfile(keyfile):
|
||||
|
||||
remove_link(certfile)
|
||||
if not isdir(dirname(certfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(certfile)))
|
||||
if not isdir(dirname(keyfile)):
|
||||
raise Exception(_(u"Folder {0} does not exist.").format(dirname(keyfile)))
|
||||
|
||||
# certificat absent ou regénération demandée
|
||||
fic_p10 = os.path.join(req_dir, "%s.p10" % (os.path.basename(basefile)))
|
||||
# génération de la requête de certificat x509 et d'un simili certificat auto-signé
|
||||
if exists(keyfile):
|
||||
gen_req = '/usr/bin/openssl req -new -key "%s" -days %s -config %s -out "%s" >/dev/null 2>&1' % (
|
||||
keyfile, ssl_default_cert_time, cert_conf_file, fic_p10)
|
||||
new_key = False
|
||||
else:
|
||||
gen_req = '/usr/bin/openssl req -new -newkey rsa:%s -days %s -config %s -keyout "%s" -out "%s" >/dev/null 2>&1' % (
|
||||
ssl_default_key_bits, ssl_default_cert_time, cert_conf_file, tmp_keyfile, fic_p10)
|
||||
new_key = True
|
||||
cmd = Popen(gen_req, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while generating certificate request {0} !').format(fic_p10))
|
||||
if new_key:
|
||||
if del_passwd:
|
||||
sup_passwd(tmp_keyfile, keyfile)
|
||||
else:
|
||||
copy(tmp_keyfile, keyfile)
|
||||
if os.path.isfile(tmp_keyfile):
|
||||
unlink(tmp_keyfile)
|
||||
if signe_req:
|
||||
# on signe la requête
|
||||
ca_signe = '/usr/bin/openssl ca -in "%s" -config %s -out "%s" -batch -notext >/dev/null 2>&1' % (fic_p10, cert_conf_file, certfile)
|
||||
cmd = Popen(ca_signe, shell=True)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception(_(u'! Error while signing certificate request {0} !') % fic_p10)
|
||||
print(_(u"* Certificate {0} successfully generated").format(certfile))
|
||||
if copy_key:
|
||||
concat_fic(certfile, [keyfile], need_link=False)
|
||||
finalise_cert(certfile, keyfile, key_user=key_user,
|
||||
key_grp=key_grp, key_chmod=key_chmod,
|
||||
cert_user=cert_user, cert_grp=cert_grp,
|
||||
cert_chmod=cert_chmod)
|
||||
build_link(certfile)
|
||||
|
||||
|
||||
def remove_link(name, remove_broken_link=True):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if islink(cert_link):
|
||||
if remove_broken_link and not exists(cert_link):
|
||||
#print 'ok lien cassé pour {} donc supprimé'.format(cert_link)
|
||||
unlink(cert_link)
|
||||
elif str(name) == realpath(cert_link):
|
||||
#print 'ok suppression lien {} comme demandé ({})'.format(cert_link, name)
|
||||
unlink(cert_link)
|
||||
|
||||
|
||||
def build_link(name, concats=[]):
|
||||
load_default_conf_if_needed()
|
||||
if not name.startswith(join(ssl_dir, 'certs')):
|
||||
return
|
||||
def _check_contats_link(link):
|
||||
# supprimer tous les liens vers les fichiers utilises pour la concatenation
|
||||
if islink(link):
|
||||
if realpath(link) in concats:
|
||||
#print 'ok suppression du link {} ({} est dans {})'.format(link, realpath(link), concats)
|
||||
unlink(link)
|
||||
|
||||
def _check_link(fp, suffix):
|
||||
# calcul du bon suffix utilise dans le nom
|
||||
# si le fichier existe avec le suffix courant, ajoute 1 au numero de suffix
|
||||
new_name = join(dir_name, fp) + '.' + str(suffix)
|
||||
if islink(new_name):
|
||||
#print 'pas de suppression du link {} ({} n\'est pas dans {})'.format(new_name, realpath(new_name), concats)
|
||||
return _check_link(fp, suffix + 1)
|
||||
#else:
|
||||
# print "ok ce n'est pas un link {}".format(new_name)
|
||||
return new_name
|
||||
|
||||
def _build_link(ret):
|
||||
# creer un lien a partir du hash du subject
|
||||
if ret != '':
|
||||
fp = ret.split('\n')[0]
|
||||
if fp.isalnum():
|
||||
if concats != []:
|
||||
for link in glob.glob(join(dir_name, fp) + '.*'):
|
||||
_check_contats_link(link)
|
||||
|
||||
new_name = _check_link(fp, 0)
|
||||
#print 'ok creation du link {} vers {}'.format(new_name, name)
|
||||
symlink(name, new_name)
|
||||
return stat(new_name).st_mtime
|
||||
return 0
|
||||
|
||||
dir_name = dirname(name)
|
||||
subject_fp = ["/usr/bin/openssl", "x509", "-subject_hash", "-fingerprint", "-noout", "-in", name]
|
||||
subject_fp_old = ["/usr/bin/openssl", "x509", "-subject_hash_old", "-fingerprint", "-noout", "-in", name]
|
||||
new_timestamp = _build_link(system_out(subject_fp)[1])
|
||||
new_timestamp = max(_build_link(system_out(subject_fp_old)[1]), new_timestamp)
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = float(fh.read().strip())
|
||||
except ValueError:
|
||||
timestamp = 0
|
||||
if new_timestamp > timestamp:
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
def rehash_if_needed():
|
||||
load_default_conf_if_needed()
|
||||
need_rehash = False
|
||||
if isfile(SSL_LAST_FILE):
|
||||
try:
|
||||
fh = open(SSL_LAST_FILE, 'r')
|
||||
timestamp = int(float(fh.read().strip()))
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
try:
|
||||
if timestamp < int(stat(cert_link).st_mtime):
|
||||
need_rehash = True
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except ValueError:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
need_rehash = True
|
||||
else:
|
||||
need_rehash = True
|
||||
|
||||
if need_rehash:
|
||||
system_code(['/usr/bin/c_rehash'])
|
||||
new_timestamp = 0
|
||||
for cert_link in glob.glob(os.path.join(ssl_dir, 'certs/*')):
|
||||
if isfile(cert_link):
|
||||
timestamp = stat(cert_link).st_mtime
|
||||
if timestamp > new_timestamp:
|
||||
new_timestamp = timestamp
|
||||
fh = open(SSL_LAST_FILE, 'w')
|
||||
fh.write(str(new_timestamp))
|
||||
fh.close()
|
||||
|
||||
|
||||
# gen_certif utils reader
|
||||
|
||||
def certif_loader(regen=None):
|
||||
"""charge les fichiers permettant de générer les certificats
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
# XXX FIXME : changer le path de data vers les paquets container,
|
||||
# XXX FIXME et déplacer les .gen_cert
|
||||
files = glob.glob(join('/usr/share/eole/certs', '*_*.gen_cert'))
|
||||
files.sort()
|
||||
for fname in files:
|
||||
# puts name in global namespace because we need it in execfile's
|
||||
# namespace in rules_loader
|
||||
name = splitext(basename(fname))[0].split('_')[1]
|
||||
# exec gen_certs
|
||||
execfile(fname, globals(),locals())
|
||||
|
||||
def get_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
global regexp_get_subject
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-subject', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-subject', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("subject= "):
|
||||
raise Exception(_(u'Invalid certificate subject: {0} ').format(ret))
|
||||
if regexp_get_subject is None:
|
||||
regexp_get_subject = re.compile('^subject= (.*)/CN=(.*)')
|
||||
return regexp_get_subject.findall(ret)[0]
|
||||
|
||||
def get_issuer_subject(cert=None, certfile=None):
|
||||
"""
|
||||
récupère le subject de la CA d'un certificat.
|
||||
spécifier obligatoirement un des deux paramètres :
|
||||
- cert : contenu du certificat
|
||||
- certfile : nom du fichier du certificat
|
||||
"""
|
||||
load_default_conf_if_needed()
|
||||
if None not in (cert, certfile):
|
||||
raise Exception(_(u'cert or certfile must be None'))
|
||||
if cert == certfile:
|
||||
raise Exception(_(u'cert or certfile must be set'))
|
||||
if certfile != None:
|
||||
cmd = ['openssl', 'x509', '-in', certfile, '-issuer', '-noout']
|
||||
stdin = None
|
||||
else:
|
||||
cmd = ['openssl', 'x509', '-issuer', '-noout']
|
||||
stdin = cert
|
||||
ret = system_out(cmd=cmd, stdin=stdin)
|
||||
if ret[0] != 0:
|
||||
raise Exception(_(u'error in {0}: {1}').format(' '.join(cmd), str(ret[2])))
|
||||
ret = ret[1].rstrip()
|
||||
if not ret.startswith("issuer= "):
|
||||
raise Exception(_(u'Invalid certificate issuer: {0} ').format(ret))
|
||||
regexp = '^issuer= (.*)/CN=(.*)'
|
||||
return re.findall(regexp, ret)[0]
|
||||
|
||||
def load_conf(ssl_dico):
|
||||
global ssl_dir, cert_dir, key_dir, tmp_keyfile, file_serial, req_dir
|
||||
global local_ca_dir, newcerts_dir, ca_conf_file, conf_file, client_conf_file
|
||||
global ca_file, ca_dest_file, ca_keyfile, start_index, min_serial
|
||||
global ssl_default_key_bits, ssl_default_cert_time
|
||||
global certs_catalog
|
||||
|
||||
ssl_dir = ssl_dico.get('ssl_dir', ssl_dir)
|
||||
cert_dir = ssl_dico.get('cert_dir', os.path.join(ssl_dir, "certs"))
|
||||
key_dir = ssl_dico.get('key_dir', os.path.join(ssl_dir, "private"))
|
||||
tmp_keyfile = ssl_dico.get('tmp_keyfile', os.path.join(key_dir, "tmpkey.key"))
|
||||
file_serial = ssl_dico.get('file_serial', os.path.join(ssl_dir, "serial"))
|
||||
req_dir = ssl_dico.get('req_dir', os.path.join(ssl_dir, "req"))
|
||||
local_ca_dir = ssl_dico.get('local_ca_dir', os.path.join(ssl_dir, "local_ca"))
|
||||
newcerts_dir = ssl_dico.get('newcerts_dir', os.path.join(ssl_dir, "newcerts"))
|
||||
ca_conf_file = ssl_dico.get('ca_conf_file', ca_conf_file)
|
||||
conf_file = ssl_dico.get('conf_file', conf_file)
|
||||
client_conf_file = ssl_dico.get('client_conf_file', conf_file)
|
||||
# chemin de la CA
|
||||
ca_file = ssl_dico.get('ca_file', os.path.join(cert_dir, "ca_local.crt"))
|
||||
ca_dest_file = ssl_dico.get('ca_dest_file', os.path.join(cert_dir, "ca.crt"))
|
||||
ca_keyfile = ssl_dico.get('ca_keyfile', os.path.join(key_dir, "ca.key"))
|
||||
# index
|
||||
start_index = ssl_dico.get('start_index', hex(int(time.time()))[2:])
|
||||
min_serial = int(eval('0x30'))
|
||||
ssl_default_key_bits = ssl_dico.get('ssl_default_key_bits', client.get_creole('ssl_default_key_bits', 2048))
|
||||
ssl_default_cert_time = ssl_dico.get('ssl_default_cert_time', client.get_creole('ssl_default_cert_time', 1096))
|
||||
|
||||
def load_default_conf_if_needed():
|
||||
"""creoled n'est pas forcement démarré à ce moment là
|
||||
ne charger la configuration par défaut qu'à l'utilisation de la lib
|
||||
et non a l'importantion
|
||||
#8448
|
||||
"""
|
||||
global ssl_dir
|
||||
if ssl_dir == None:
|
||||
load_conf({'ssl_dir': '/etc/ssl',
|
||||
'ca_conf_file': '/etc/eole/ssl/ca-eole.conf',
|
||||
'conf_file': '/etc/eole/ssl/certif-eole.conf',
|
||||
'client_conf_file': '/etc/eole/ssl/client-eole.conf'})
|
||||
|
||||
ssl_dir=None
|
||||
ca_conf_file=None
|
||||
client_conf_file=None
|
||||
conf_file=None
|
||||
certs_catalog = None
|
||||
ca_issuer = None
|
838
creole/client.py
838
creole/client.py
|
@ -1,838 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.client - client to request creole.server through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Request informations from :class:`creole.CreoleServer`
|
||||
|
||||
Simple http :mod:`restkit.request` client to request and manipulate
|
||||
informations from :class:`creole.CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
from http_parser.http import NoMoreData
|
||||
import restkit
|
||||
import eventlet
|
||||
from restkit.errors import ResourceError, RequestError, ParseException, RequestTimeout
|
||||
from eventlet.timeout import Timeout as EventletTimeout
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import json
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from .dtd_parser import parse_dtd
|
||||
from .config import dtdfilename
|
||||
|
||||
from .i18n import _
|
||||
from pyeole.encode import normalize
|
||||
|
||||
import re
|
||||
|
||||
# Stat filesystem
|
||||
import os
|
||||
|
||||
# Create instance method on the fly
|
||||
import types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_CONTAINER_COMPONENTS = ['container'] + parse_dtd(dtdfilename)['container']['options']
|
||||
"""List of components used to define an LXC container.
|
||||
|
||||
They are extracted from the ``creole.dtd``.
|
||||
|
||||
Each of them are use to fabric two accessor methods bound to
|
||||
:class:`CreoleClient`.
|
||||
|
||||
"""
|
||||
LOCAL_URL = 'http://127.0.0.1:8000'
|
||||
#Si on veut garder les threads, on peut désactiver les reap_connections pour éviter les tracebacks
|
||||
#restkit.session.get_session('thread', reap_connections=False)
|
||||
|
||||
|
||||
def _merge_entries(old, new):
|
||||
"""Merge component informations
|
||||
|
||||
This merge keep information from :data:`old` when the :data:`new`
|
||||
is ``None``.
|
||||
|
||||
The boolean information are ored between :data:`old` and
|
||||
:data:`new`.
|
||||
|
||||
:param old: previous component informations
|
||||
:type old: `dict`
|
||||
:param new: new component informations
|
||||
:type new: `dict`
|
||||
:return: merged informations
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
for key, val in new.items():
|
||||
if val is None:
|
||||
# Do not override previous value
|
||||
continue
|
||||
elif isinstance(val, bool):
|
||||
# Switch on first True
|
||||
# old[key] may not exists
|
||||
old[key] = val | old.get(key, False)
|
||||
else:
|
||||
old[key] = val
|
||||
|
||||
return old
|
||||
|
||||
|
||||
def _merge_duplicates_in_components(container_info, keys_to_strip=None):
|
||||
"""Merge duplicates entries
|
||||
|
||||
:param container_info: information on a container or group of
|
||||
containers
|
||||
:type container_info: `dict`
|
||||
:param keys_to_strip: keys for which to remove duplicated entries
|
||||
:type keys_to_strip: `list`
|
||||
|
||||
"""
|
||||
# Do not work in-place
|
||||
info = container_info.copy()
|
||||
|
||||
if keys_to_strip is None:
|
||||
# Run on all keys
|
||||
keys_to_strip = info.keys()
|
||||
|
||||
for key in keys_to_strip:
|
||||
if not isinstance(info[key], list):
|
||||
# Do not work on single values
|
||||
continue
|
||||
|
||||
result = OrderedDict()
|
||||
for entry in info[key]:
|
||||
if 'name' in entry:
|
||||
name = repr(entry['name'])
|
||||
if name in result and not entry.get(u'activate', False):
|
||||
# Duplicate found but inactive
|
||||
continue
|
||||
elif name in result:
|
||||
# Merge old and new informations
|
||||
old_entry = result[name]
|
||||
# Make sure entry appears at right place
|
||||
del(result[name])
|
||||
result[name] = _merge_entries(old=old_entry,
|
||||
new=entry)
|
||||
else:
|
||||
# New entry
|
||||
result[name] = entry
|
||||
|
||||
if result:
|
||||
# Store stripped information
|
||||
info[key] = [ item for item in result.values() ]
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def _build_component_accessors(component):
|
||||
"""Fabric of accessors for container components
|
||||
|
||||
It build two accessors:
|
||||
|
||||
- one to get all components for all containers named
|
||||
``get_<component>s``
|
||||
|
||||
- one to get one comoponent item defined for all containers
|
||||
named ``get_<component>``
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component accessors
|
||||
:rtype: `tuple` of `function`
|
||||
|
||||
"""
|
||||
def all_components(self, container=None):
|
||||
"""Return all components
|
||||
"""
|
||||
return self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
|
||||
all_components.__name__ = 'get_{0}s'.format(component)
|
||||
all_components.__doc__ = """Get {0}s for all containers
|
||||
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0}s informations
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
def single_component(self, name, container=None):
|
||||
"""Return single component
|
||||
"""
|
||||
components = []
|
||||
ret = self.get_components('{0}s'.format(component),
|
||||
container=container)
|
||||
for item in ret:
|
||||
if item['name'] == name:
|
||||
components.append(item)
|
||||
return components
|
||||
single_component.__doc__ = """Get one {0} for all containers
|
||||
|
||||
:param name: name of {0} to return
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:returns: {0} informations for all containers
|
||||
:rtype: `list`
|
||||
|
||||
""".format(component)
|
||||
|
||||
single_component.__name__ = 'get_{0}'.format(component)
|
||||
|
||||
return all_components, single_component
|
||||
|
||||
|
||||
class CreoleClient(object):
|
||||
"""Request informations from :class:`creole.CreoleServer`.
|
||||
|
||||
In addition, this class provides some utilities to manipulate
|
||||
returned data.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, url=None):
|
||||
"""Initialize client.
|
||||
|
||||
:param url: HTTP URL to the :class:`creole.CreoleServer`
|
||||
:type url: `str`
|
||||
|
||||
"""
|
||||
if url is None:
|
||||
if self.is_in_lxc():
|
||||
url = 'http://192.0.2.1:8000'
|
||||
else:
|
||||
url = LOCAL_URL
|
||||
|
||||
self.url = url
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
comp_list.remove('container')
|
||||
# Disable logging of restkit
|
||||
restkit.set_logging('critical', logging.NullHandler())
|
||||
self._is_container_actif = None
|
||||
self._restkit_request = None
|
||||
for component in comp_list:
|
||||
get_all, get_single = _build_component_accessors(component)
|
||||
setattr(self, get_all.__name__,
|
||||
types.MethodType(get_all, self, CreoleClient))
|
||||
setattr(self, get_single.__name__,
|
||||
types.MethodType(get_single, self, CreoleClient))
|
||||
|
||||
@staticmethod
|
||||
def is_in_lxc():
|
||||
"""Check if we are in LXC.
|
||||
|
||||
We are under LXC if /proc/1/cgroup contains ``/lxc``.
|
||||
|
||||
:return: if we are under LXC.
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
if not os.path.isdir('/proc/self'):
|
||||
# when launch in chroot
|
||||
return True
|
||||
else:
|
||||
return os.access('/dev/lxc/console', os.F_OK)
|
||||
|
||||
|
||||
def close(self):
|
||||
if self._restkit_request is not None:
|
||||
self._restkit_request.close()
|
||||
|
||||
|
||||
def _request(self, path, **kwargs):
|
||||
"""Send HTTP request to Creole server.
|
||||
|
||||
If ConnectionError, try three time before leave.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: response of the request
|
||||
:rtype: :class:`restkit.wrappers.Response`
|
||||
:raise CreoleClientError: on HTTP errors
|
||||
|
||||
"""
|
||||
timeout = 5
|
||||
max_try = 3
|
||||
tried = 0
|
||||
|
||||
method = 'GET'
|
||||
if 'method' in kwargs:
|
||||
method = kwargs['method']
|
||||
del(kwargs['method'])
|
||||
|
||||
uri = restkit.util.make_uri(path, **kwargs)
|
||||
|
||||
while tried < max_try:
|
||||
tried += 1
|
||||
try:
|
||||
# use eventlet backend (#13194, #21388)
|
||||
with eventlet.Timeout(timeout):
|
||||
self._restkit_request = restkit.request(uri, method=method, backend='eventlet')
|
||||
return self._restkit_request
|
||||
except (ResourceError, RequestError, ParseException, NoMoreData, RequestTimeout, EventletTimeout) as err:
|
||||
log.debug(_(u"Connexion error '{0}',"
|
||||
u" retry {1}/{2}").format(err, tried, max_try))
|
||||
sleep(1)
|
||||
|
||||
if isinstance(err, RequestError):
|
||||
msg = _(u"HTTP error: {0}\nPlease check creoled's log (/var/log/rsyslog/local/creoled/creoled.info.log)\nand restart service with command 'service creoled start'")
|
||||
else:
|
||||
msg = _(u"HTTP error: {0}")
|
||||
if isinstance(err, RequestTimeout) or isinstance(err, EventletTimeout):
|
||||
err = _(u"creoled service didn't respond in time")
|
||||
|
||||
raise TimeoutCreoleClientError(msg.format(err))
|
||||
|
||||
def is_container_actif(self):
|
||||
if self._is_container_actif is None:
|
||||
self._is_container_actif = self.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
return self._is_container_actif
|
||||
|
||||
def request(self, command, path=None, **kwargs):
|
||||
"""Send HTTP request to creole server.
|
||||
|
||||
:param command: action to perform for the creole resource
|
||||
:type command: `str`
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dictionary of variable:value
|
||||
:rtype: `dict`
|
||||
:raise CreoleClientError: on bad response status or HTTP error
|
||||
|
||||
"""
|
||||
if path is not None:
|
||||
path = self.validate_path(path)
|
||||
ret = self._request(self.url + command + path, **kwargs)
|
||||
else:
|
||||
ret = self._request(self.url + command, **kwargs)
|
||||
if ret.status_int != 200:
|
||||
log.debug(_(u'HTML content: {0}').format(ret.body_string()))
|
||||
raise CreoleClientError(_(u"HTML error {0}, please consult creoled events log (/var/log/rsyslog/local/creoled/creoled.info.log) to have more informations").format(ret.status_int))
|
||||
reply = json.loads(ret.body_string())
|
||||
|
||||
# Previous fix for NoMoreData exception #7218 :
|
||||
#ret.connection.close()
|
||||
|
||||
if reply['status'] != 0:
|
||||
if reply['status'] == 4:
|
||||
raise NotFoundError(u"{0}".format(reply['response']))
|
||||
else:
|
||||
raise CreoleClientError(normalize(_("Creole error {0}: {1}")).format(
|
||||
reply['status'], reply['response']))
|
||||
|
||||
return reply['response']
|
||||
|
||||
@staticmethod
|
||||
def validate_path(path):
|
||||
"""Validate the path for http request.
|
||||
|
||||
:data:`path` must use ``/`` as separator with a leading one or
|
||||
use ``.`` as separator.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
:raise CreoleClientError: when path does not validate
|
||||
|
||||
"""
|
||||
ret = path
|
||||
if not ret.startswith('/'):
|
||||
if ret.find('.') != -1 and ret.find('/') != -1:
|
||||
raise CreoleClientError(_(u"Path must not mix dotted and" +
|
||||
u" slash notation: '{0}'").format(path))
|
||||
elif ret.find('.') != -1:
|
||||
ret = '/{0}'.format( ret.replace('.', '/') )
|
||||
else:
|
||||
raise CreoleClientError(_(u"Path must start" +
|
||||
u" with '/': '{0}'").format(path))
|
||||
return ret
|
||||
|
||||
def get(self, path='/creole', *args, **kwargs):
|
||||
"""Get the values from part of the tree.
|
||||
|
||||
If :data:`path` is a variable, it returns it's value.
|
||||
|
||||
If :data:`path` is a tree node, it returns the whole tree
|
||||
of ``variable:value`` as flat dictionary.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value if any error occurs
|
||||
:return: slash separated path to the resource
|
||||
:rtype: `str`
|
||||
|
||||
"""
|
||||
# Use a dictionary to test existence
|
||||
default = {}
|
||||
if len(args) > 1:
|
||||
raise ValueError(_("Too many positional parameters {0}.").format(args))
|
||||
|
||||
if kwargs.has_key('default'):
|
||||
default['value'] = kwargs['default']
|
||||
del(kwargs['default'])
|
||||
elif len(args) == 1:
|
||||
default['value'] = args[0]
|
||||
|
||||
try:
|
||||
ret = self.request('/get', path, **kwargs)
|
||||
except (NotFoundError, CreoleClientError) as err:
|
||||
if default.has_key('value'):
|
||||
ret = default['value']
|
||||
else:
|
||||
raise err
|
||||
|
||||
return ret
|
||||
|
||||
def list(self, path='/creole'):
|
||||
"""List content of a path.
|
||||
|
||||
If :data:`path` is a variable, it returns it's name.
|
||||
|
||||
If :data:`path` is a tree node, it returns the list of items
|
||||
under it.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: items present under a path
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
return self.request('/list', path)
|
||||
|
||||
def get_creole(self, name=None, *args, **kwargs):
|
||||
"""Get variables under ``/creole``.
|
||||
|
||||
The full path of variable names is stripped in key names.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:param default: default value to return if the variable named
|
||||
:data:`name` does not exist or any error occurs
|
||||
:return: variables and their value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
if name is not None:
|
||||
# Tiramisu has no any meaningful message
|
||||
try:
|
||||
ret = self.get('/creole', *args, variable=name, **kwargs)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown variable {0}')
|
||||
raise NotFoundError(msg.format(name))
|
||||
else:
|
||||
ret = self.strip_full_path(self.get('/creole', *args, **kwargs))
|
||||
|
||||
return ret
|
||||
|
||||
def reload_config(self):
|
||||
"""Reload Tiramisu's config
|
||||
"""
|
||||
return self.request('/reload_config')
|
||||
|
||||
def reload_eol(self):
|
||||
"""Reload Tiramisu's partial config
|
||||
"""
|
||||
return self.request('/reload_eol')
|
||||
|
||||
def valid_mandatory(self):
|
||||
return self.request('/valid_mandatory')
|
||||
|
||||
def get_containers(self, group=None):
|
||||
"""Get basic informations of all containers
|
||||
|
||||
:param group: limit search to a group of containers
|
||||
:type group: `str`
|
||||
:return: containers informations
|
||||
:rtype: `list`
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if group is None or (not mode_container and group == 'root'):
|
||||
args = {}
|
||||
else:
|
||||
args = {'withoption':'group',
|
||||
'withvalue':group}
|
||||
|
||||
try:
|
||||
ret = self.get('/containers/containers', **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
if group is not None:
|
||||
msg = _(u'No container found for group {0}')
|
||||
else:
|
||||
msg = _(u'No container found! Is that possible?')
|
||||
raise NotFoundError(msg.format(group))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret
|
||||
|
||||
|
||||
def get_container(self, name):
|
||||
"""Get informations of one container
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
try:
|
||||
ret = self.get('/containers/containers',
|
||||
withoption='name',
|
||||
withvalue=name)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
raise NotFoundError(_(u'Unknown container {0}').format(name))
|
||||
|
||||
ret = self.to_list_of_dict(ret, prefix='container')
|
||||
return ret[0]
|
||||
|
||||
|
||||
def get_groups(self):
|
||||
"""Get list of container groups
|
||||
|
||||
All groups are a container, but all containers are not a
|
||||
group.
|
||||
|
||||
:return: container groups names
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
containers = self.get_containers()
|
||||
if not mode_container:
|
||||
groups = ['root']
|
||||
else:
|
||||
groups = []
|
||||
for container in containers:
|
||||
if container['name'] == container['group']:
|
||||
groups.append(container['name'])
|
||||
if 'all' in groups:
|
||||
groups.remove('all')
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
def is_group(self, name):
|
||||
"""Verify is a container is a group of containers.
|
||||
|
||||
:param name: name of the container
|
||||
:type name: `str`
|
||||
:return: is the container a group of containers?
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
mode_container = self.is_container_actif()
|
||||
if not mode_container:
|
||||
return name == 'root'
|
||||
|
||||
container = self.get_container(name)
|
||||
return name == container['group']
|
||||
|
||||
|
||||
def get_containers_components(self, containers, group=False, merge_duplicates=False):
|
||||
"""Get all components of a list of containers or group of containers.
|
||||
|
||||
:param containers: container names
|
||||
:type containers: `list` of `str`
|
||||
:param group: containers are names of groups of containers
|
||||
:type group: `bool`
|
||||
:param merge_duplicates: merge duplicate entries
|
||||
:type merge_duplicates: `bool`
|
||||
:return: components of the containers
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
comp_list = [ '{0}s'.format(name) for name in _CONTAINER_COMPONENTS[:] ]
|
||||
component = {}
|
||||
|
||||
if not group:
|
||||
if 'all' in containers:
|
||||
# make sure all is first
|
||||
containers.remove('all')
|
||||
|
||||
# Remove duplicates
|
||||
containers = list(set(containers))
|
||||
containers.insert(0, 'all')
|
||||
|
||||
for comp in comp_list:
|
||||
component[comp] = []
|
||||
for container in containers:
|
||||
by_cont = self.get_components(None, container=container, group=group)
|
||||
|
||||
for comp, items in by_cont.items():
|
||||
if comp + 's' in comp_list:
|
||||
component[comp + 's'].extend(items)
|
||||
|
||||
if merge_duplicates:
|
||||
component = _merge_duplicates_in_components(component, comp_list)
|
||||
|
||||
if 'interfaces' in component:
|
||||
for interface in component['interfaces']:
|
||||
if 'gateway' in interface and interface['gateway']:
|
||||
component['gateway'] = {u'interface': interface['name'],
|
||||
u'ip': interface['gateway']}
|
||||
|
||||
return component
|
||||
|
||||
|
||||
def get_container_infos(self, container):
|
||||
"""Get all components of a container or its group
|
||||
|
||||
:param container: container name
|
||||
:type container: `str`
|
||||
:return: components of the container or its group
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
container_info = self.get_container(container)
|
||||
group_name = container_info[u'real_container']
|
||||
container_info = self.get_group_infos(group_name)
|
||||
|
||||
return container_info
|
||||
|
||||
|
||||
def get_group_infos(self, group):
|
||||
"""Get all components of a group of container
|
||||
|
||||
:param group: container group name
|
||||
:type group: `str`
|
||||
:return: components of the container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
group_info = self.get_containers_components(containers=[group],
|
||||
group=True,
|
||||
merge_duplicates=True)
|
||||
|
||||
# If we need to do thing in the name of all containers in the group
|
||||
names = []
|
||||
found = False
|
||||
for container in group_info['containers']:
|
||||
name = container['name']
|
||||
names.append(name)
|
||||
if name == group:
|
||||
found = True
|
||||
group_info.update(container)
|
||||
if not found:
|
||||
group_info.update(self.get_container(group))
|
||||
group_info['containers'] = names
|
||||
|
||||
return group_info
|
||||
|
||||
|
||||
def get_components(self, name, container=None, group=False):
|
||||
"""Get component for containers
|
||||
|
||||
:param name: type of container variable
|
||||
:type name: `str`
|
||||
:param container: limit search to a container
|
||||
:type container: `str`
|
||||
:return: component for all containers
|
||||
:rtype: `list`
|
||||
"""
|
||||
if container is not None:
|
||||
if group:
|
||||
option_name = 'real_container'
|
||||
else:
|
||||
option_name = 'container'
|
||||
|
||||
args = {'withoption': option_name,
|
||||
'withvalue': container}
|
||||
else:
|
||||
args = {}
|
||||
|
||||
ret = None
|
||||
if name is None:
|
||||
path = '/containers'
|
||||
else:
|
||||
path = '/containers/{0}'.format(name)
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
# Tiramisu has no any meaningful message
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
if container is None:
|
||||
msg = _(u'Unknown container components {0}')
|
||||
else:
|
||||
args = {'withoption':'container_group',
|
||||
'withvalue':container}
|
||||
try:
|
||||
ret = self.get(path, **args)
|
||||
except NotFoundError:
|
||||
msg = _(u'Unknown container components {0} for container {1}')
|
||||
# If not a container, maybe a container's group
|
||||
if ret is None:
|
||||
raise NotFoundError(msg.format(str(name), container))
|
||||
if name is None:
|
||||
comp_list = _CONTAINER_COMPONENTS[:]
|
||||
dico = {}
|
||||
ret_comp = {}
|
||||
for comp in comp_list:
|
||||
dico[comp] = {}
|
||||
for path, item in ret.items():
|
||||
spath = path.split('.')
|
||||
#without 's'
|
||||
comp = spath[0][:-1]
|
||||
dico[comp]['.'.join(spath[1:])] = item
|
||||
for comp in comp_list:
|
||||
ret_comp[comp] = self.to_list_of_dict(dico[comp], prefix=comp)
|
||||
|
||||
else:
|
||||
ret_comp = self.to_list_of_dict(ret, prefix=name)
|
||||
return ret_comp
|
||||
|
||||
@classmethod
|
||||
def to_list_of_dict(cls, flat, prefix=None):
|
||||
"""Convert a flat dictionary to a list of dictionaries.
|
||||
|
||||
Build a list of dictionary ``<name>:<value>`` for each
|
||||
prefix of the form ``<prefix><integer index>.<name>:<value>``
|
||||
|
||||
If list is numerically ordered by ``<integer index>``
|
||||
extracted from each key accordingly to :data:`prefix`.
|
||||
|
||||
If the :data:`prefix` is not specified, a random element of
|
||||
:data:`flat` is extracted to compute it.
|
||||
|
||||
:param flat: absolute attribute variable names and their
|
||||
values
|
||||
:type flat: `dict`
|
||||
:param prefix: alphabetic prefix to extract integer index
|
||||
:type prefix: `str`
|
||||
:return: variables and their attributes values
|
||||
:rtype: `list` of `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
sorted_items = []
|
||||
sort_key = None
|
||||
|
||||
if prefix is None:
|
||||
# Extract prefix name
|
||||
random_key = flat.iterkeys().next()
|
||||
indexed_prefix = random_key.split('.')[0]
|
||||
re_match = re.match(r'(\D+)\d+', indexed_prefix)
|
||||
prefix = re_match.group(1)
|
||||
|
||||
if prefix is not None:
|
||||
# check for none because maybe regexp match did not work
|
||||
# Extract component index as integer for comparaison
|
||||
sort_key = lambda string: int(string.split('.')[0].lstrip(prefix))
|
||||
|
||||
for key in sorted(flat.keys(), key=sort_key):
|
||||
sid, sattr = cls._split_path_leaf(key)
|
||||
if sid not in reply:
|
||||
sorted_items.append(sid)
|
||||
reply[sid] = {}
|
||||
reply[sid][sattr] = flat[key]
|
||||
return [ reply[item] for item in sorted_items ]
|
||||
|
||||
@staticmethod
|
||||
def strip_full_path(flat):
|
||||
"""Strip full path of flat dictionary keys.
|
||||
|
||||
:param flat: absolute variable names and their value
|
||||
:type flat: `dict`
|
||||
:return: short variable names and their value
|
||||
:rtype: `dict`
|
||||
"""
|
||||
ret = {}
|
||||
for path in flat:
|
||||
parts = path.split('.')[1:]
|
||||
if len(parts) == 1:
|
||||
# Single variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
elif len(parts) == 2 and parts[0] == parts[1]:
|
||||
# Master variable
|
||||
ret[ parts[0] ] = flat[path]
|
||||
else:
|
||||
# slave variable
|
||||
ret[ '.'.join(parts) ] = flat[path]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def to_grouped_lists(dict_list, keyname, keyvalue=None):
|
||||
"""Convert a `list` of `dict` to a `dict` :data:`keyvalue`:`list`.
|
||||
|
||||
Build dictionary of ``dictionary[:data:`keyvalue`]:<list of
|
||||
dict>`` to group all items with the same value of a key.
|
||||
|
||||
:param dict_list: dictionaries
|
||||
:type dict_list: `list`
|
||||
:param keyname: name of the key to test
|
||||
:type keyname: `str`
|
||||
:param keyvalue: value to match :data:`keyname`
|
||||
:return: dictionary grouped by a key value
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
reply = {}
|
||||
for key in dict_list:
|
||||
if keyname in key and keyvalue and keyvalue != key[keyname]:
|
||||
continue
|
||||
if keyname not in key:
|
||||
if None not in reply:
|
||||
reply[None] = []
|
||||
reply[None].append(key)
|
||||
else:
|
||||
if key[keyname] not in reply:
|
||||
reply[ key[keyname] ] = []
|
||||
reply[ key[keyname] ].append(key)
|
||||
return reply
|
||||
|
||||
@staticmethod
|
||||
def _split_path_leaf(path, separator='.'):
|
||||
"""Split path in two parts: dirname and basename.
|
||||
|
||||
If :data:`path` does not contains the :data:`separator`, it's
|
||||
considered as leaf and the dirname of :data:`path` is set to
|
||||
`None`.
|
||||
|
||||
:param path: path to the creole resource
|
||||
:type path: `str`
|
||||
:return: dirname and basename of :data:`path`
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if path.find(separator) == -1:
|
||||
return (None, path)
|
||||
|
||||
splited = path.split(separator)
|
||||
return ( '.'.join(splited[:-1]), splited[-1] )
|
||||
|
||||
|
||||
class TimeoutCreoleClientError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class CreoleClientError(StandardError):
|
||||
"""Bad use of :class:`CreoleClient`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(CreoleClientError):
|
||||
"""Requested variable not found
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
print(CreoleClient().get('/'))
|
||||
except Exception as err:
|
||||
print(_(u"Error: {0}").format(err))
|
|
@ -1,224 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.containers - management of LXC containers
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Manage LXC containers
|
||||
|
||||
"""
|
||||
|
||||
from .client import CreoleClient, _CONTAINER_COMPONENTS
|
||||
from .config import VIRTENABLED_LOCKFILE, VIRTDISABLED_LOCKFILE
|
||||
from .error import VirtError
|
||||
from .config import templatedir, VIRTROOT
|
||||
from .template import CreoleTemplateEngine
|
||||
from pyeole.process import system_code, system_out, system_progress_out
|
||||
from pyeole.diagnose import test_tcp
|
||||
from .i18n import _
|
||||
|
||||
from distutils.spawn import find_executable
|
||||
from os.path import isdir
|
||||
from os.path import isfile, islink
|
||||
from os.path import ismount
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os import access
|
||||
from os import F_OK
|
||||
from os import stat
|
||||
from os import symlink
|
||||
from os import makedirs
|
||||
from os import mknod
|
||||
from os import makedev
|
||||
from os import major
|
||||
from os import minor
|
||||
from os import unlink
|
||||
from stat import S_IFBLK
|
||||
from stat import S_ISBLK
|
||||
from hashlib import md5
|
||||
from glob import glob
|
||||
import cjson
|
||||
|
||||
import logging
|
||||
|
||||
client = CreoleClient()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_LXC_MD5 = '/etc/eole/lxc.md5'
|
||||
_LXC_LOG = '/var/log/isolation.log'
|
||||
|
||||
_NOT_REALLY_LXC_CONTAINERS = ['root', 'all']
|
||||
"""List of container names that are not to be generated.
|
||||
|
||||
"""
|
||||
|
||||
_LXC_TEMPLATE = {'config': "lxc.config",
|
||||
'fstab': "lxc.fstab",
|
||||
'rootfs/etc/network/interfaces' : "lxc.interfaces",
|
||||
}
|
||||
"""Creole templates for LXC containers.
|
||||
|
||||
"""
|
||||
|
||||
def is_lxc_locked():
|
||||
"""Check if the LXC virtualization is locked.
|
||||
|
||||
The virtualization is locked after first ``instance`` of the
|
||||
server to avoid switching between modes.
|
||||
|
||||
:return: ``enable`` if LXC is enabled, ``disable`` if LXC is
|
||||
disabled or ``None`` where there is no lockfile.
|
||||
|
||||
"""
|
||||
if isfile(VIRTENABLED_LOCKFILE) and isfile(VIRTDISABLED_LOCKFILE):
|
||||
raise VirtError(_(u"Invalid LXC lock files state: both are present."))
|
||||
elif isfile(VIRTENABLED_LOCKFILE):
|
||||
virtlocked = 'enable'
|
||||
elif isfile(VIRTDISABLED_LOCKFILE):
|
||||
virtlocked = 'disable'
|
||||
else:
|
||||
virtlocked = None
|
||||
return virtlocked
|
||||
|
||||
def is_lxc_enabled():
|
||||
"""Check if LXC controller is enabled
|
||||
|
||||
We do not accept to switch between enabled and disabled LXC, after
|
||||
first ``instance``, a lock file is set to check at each
|
||||
``reconfigure``.
|
||||
|
||||
:return: If the LXC container mode is enabled.
|
||||
:rtype: `bool`
|
||||
:raise VirtError: if state in inconsistent between configuration
|
||||
and lock files.
|
||||
|
||||
"""
|
||||
containers_enabled = client.get_creole('mode_conteneur_actif', 'non') == 'oui'
|
||||
if containers_enabled and not find_executable('lxc-info'):
|
||||
raise VirtError(_(u'LXC is enabled but LXC commands not found in PATH.'))
|
||||
|
||||
if containers_enabled and is_lxc_locked() == 'disable':
|
||||
raise VirtError(_(u"Server already instantiated in no containers mode, attempt to activate containers mode aborted."))
|
||||
elif not containers_enabled and is_lxc_locked() == 'enable':
|
||||
raise VirtError(_(u"Server already instantiated in containers mode, attempt to activate no containers mode aborted."))
|
||||
|
||||
return containers_enabled
|
||||
|
||||
def generate_lxc_container(name, logger=None):
|
||||
"""Run creation of a container.
|
||||
|
||||
Check if LXC is enabled and take care of ``root`` and ``all``
|
||||
containers.
|
||||
|
||||
:param name: name of the LXC container
|
||||
:type name: `str`
|
||||
|
||||
"""
|
||||
if name not in _NOT_REALLY_LXC_CONTAINERS:
|
||||
if not test_tcp('localhost', client.get_creole('apt_cacher_port')):
|
||||
raise Exception(_('cacher not available, please start check log in /var/log/apt-cacher-ng/ and restart it with "service apt-cacher-ng start" command'))
|
||||
if isfile(_LXC_LOG):
|
||||
unlink(_LXC_LOG)
|
||||
cmd = ['lxc-create', '-n', name, '-t', 'eole']
|
||||
log.debug('Run: {0}'.format(' '.join(cmd)))
|
||||
code, stdout, stderr = system_progress_out(cmd, _(u"Managing container {0}").format(name), logger)
|
||||
fh = open(_LXC_LOG, 'w')
|
||||
fh.write(stdout)
|
||||
fh.write(stderr)
|
||||
fh.close()
|
||||
if code != 0 and stdout.find(u"'{0}' already exists'".format(name)) >= 0:
|
||||
raise Exception(_('error during the process of container creation, more informations in {0}').format(_LXC_LOG))
|
||||
path_container = client.get_creole('container_path_{0}'.format(name))
|
||||
path_apt_eole_conf = join(path_container, 'etc', 'apt', 'apt-eole.conf')
|
||||
path_apt_eole = join(path_container, 'usr', 'sbin', 'apt-eole')
|
||||
if not isfile(path_apt_eole_conf) or not isfile(path_apt_eole):
|
||||
raise Exception(_('eole-common-pkg not installed in container, something goes wrong, more informations in {0}').format(_LXC_LOG))
|
||||
|
||||
|
||||
def is_lxc_running(container):
|
||||
"""Check if an LXC container is running.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is running and reachable
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
return is_lxc_started(container) and test_tcp(container[u'ip'], 22)
|
||||
|
||||
|
||||
def is_lxc_started(container):
|
||||
"""Check if an LXC container is started.
|
||||
|
||||
This check at LXC level and check TCP on port SSH.
|
||||
|
||||
:param container: the container informations
|
||||
:type container: `dict`
|
||||
:return: if the container is started
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
if not is_lxc_enabled() or container.get(u'path', None) == '':
|
||||
return True
|
||||
|
||||
if container.get(u'name', None) is None:
|
||||
raise ValueError(_(u"Container has no name"))
|
||||
|
||||
if container.get(u'ip', None) is None:
|
||||
raise ValueError(_(u"Container {0} has no IP").format(container[u'name']))
|
||||
|
||||
cmd = ['lxc-info', '--state', '--name', container[u'name']]
|
||||
code, stdout, stderr = system_out(cmd)
|
||||
|
||||
return stdout.strip().endswith('RUNNING')
|
||||
|
||||
|
||||
def create_mount_point(group):
|
||||
"""Create mount points in LXC.
|
||||
|
||||
This is required for LXC to start.
|
||||
|
||||
"""
|
||||
if 'fstabs' not in group:
|
||||
return
|
||||
for fstab in group['fstabs']:
|
||||
mount_point = fstab.get('mount_point', fstab['name'])
|
||||
full_path = join(group['path'], mount_point.lstrip('/'))
|
||||
if not isdir(full_path):
|
||||
makedirs(full_path)
|
||||
|
||||
|
||||
def lxc_need_restart():
|
||||
def md5sum(file):
|
||||
return md5(open(file).read()).hexdigest()
|
||||
files = ['/etc/lxc/default.conf', '/etc/default/lxc-net']
|
||||
files += glob('/opt/lxc/*/config')
|
||||
files += glob('/opt/lxc/*/fstab')
|
||||
md5s = []
|
||||
for f in files:
|
||||
md5s.append(md5sum(f))
|
||||
if not isfile(_LXC_MD5):
|
||||
ret = True
|
||||
else:
|
||||
try:
|
||||
old_md5s = cjson.decode(open(_LXC_MD5, 'r').read())
|
||||
except cjson.DecodeError:
|
||||
ret = True
|
||||
else:
|
||||
ret = not old_md5s == md5s
|
||||
|
||||
if ret:
|
||||
fh = open(_LXC_MD5, 'w')
|
||||
fh.write(cjson.encode(md5s))
|
||||
fh.close()
|
||||
return ret
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu import option
|
||||
CONVERT_VALUE = {'True': True, 'False': False, 'None': None}
|
||||
forbidden_name = ('level',)
|
||||
|
||||
def parse_dtd(filename):
|
||||
"""Parse DTD file and return a dict.
|
||||
Dict structure:
|
||||
|
||||
- key: name of element
|
||||
- values:
|
||||
|
||||
- type: if text, option type
|
||||
- options: list of subelements
|
||||
- needs: list of mandatory attributes with None or list of possible
|
||||
value
|
||||
- optionals: tuple:
|
||||
- list of optional attributes with None or list of possible
|
||||
value
|
||||
- default value (None if no default value)
|
||||
|
||||
Example:
|
||||
{'container':
|
||||
{'type': False,
|
||||
'options': ['service', 'interface', 'package', 'file', 'disknod'],
|
||||
'needs': {'name': {'values': None, 'type': None},
|
||||
'optionals': {'group': {'values': None, 'default': None,
|
||||
'type': None},
|
||||
'id': {'values': None, 'default': None, 'type': None}}}
|
||||
}
|
||||
"""
|
||||
def parse_option(option):
|
||||
option = option.replace('(', '').replace('*', '').replace(')', '')
|
||||
option = option.replace('>', '').replace(' ', '').replace('+', '')
|
||||
option = option.split('|')
|
||||
options = []
|
||||
for opt in option:
|
||||
options.extend(opt.split(','))
|
||||
if options == ['EMPTY']:
|
||||
options = []
|
||||
return options
|
||||
|
||||
def parse_comment(comment, options=None):
|
||||
type_ = None
|
||||
if comment.startswith('<!--') and comment.endswith('-->'):
|
||||
comment = comment[4:-3]
|
||||
if comment.endswith('Option'):
|
||||
if comment == 'ChoiceOption':
|
||||
raise ValueError(_(u'Do not write "ChoiceOption" in comments'))
|
||||
try:
|
||||
type_ = getattr(option, comment)
|
||||
except AttributeError:
|
||||
raise ValueError(_(u"Unvalid comment content: must match a valid attribute name"))
|
||||
else:
|
||||
#comment is the attribute name, the option type it's value
|
||||
type_ = comment
|
||||
return type_
|
||||
|
||||
fh = open(filename)
|
||||
dtd_load = {}
|
||||
for line in fh.readlines():
|
||||
sline = line.split()
|
||||
if sline == []:
|
||||
continue
|
||||
#for element line
|
||||
if sline[0] == '<!ELEMENT':
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
options = ' '.join(sline[2:-1])
|
||||
else:
|
||||
options = ' '.join(sline[2:])
|
||||
options = parse_option(options)
|
||||
type_ = None
|
||||
if '#PCDATA' in options:
|
||||
options.remove('#PCDATA')
|
||||
if sline[-1].startswith('<!--') and sline[-1].endswith('-->'):
|
||||
type_ = parse_comment(sline[-1], options)
|
||||
else:
|
||||
type_ = option.UnicodeOption
|
||||
dtd_load[sline[1]] = {'type': type_, 'options': options,
|
||||
'needs': {}, 'optionals': {}}
|
||||
#for attlist line
|
||||
elif sline[0] == '<!ATTLIST':
|
||||
if sline[1] in forbidden_name:
|
||||
raise ValueError(_(u'Using name {0} is forbidden in attributes').format(sline[1]))
|
||||
#possible value
|
||||
if sline[3] == 'CDATA':
|
||||
values = None
|
||||
else:
|
||||
if not sline[3].startswith('(') or not sline[3].endswith(')'):
|
||||
raise Exception(_(u'Not a valid list'))
|
||||
sline3 = sline[3][1:-1].split('|')
|
||||
values = []
|
||||
for val in sline3:
|
||||
values.append(CONVERT_VALUE.get(val, val))
|
||||
#comment
|
||||
type_ = parse_comment(sline[-1])
|
||||
#default value or state value (needs or optionals)
|
||||
if sline[4].startswith('#REQUIRED'):
|
||||
dtd_load[sline[1]]['needs'][sline[2]] = {'values': values,
|
||||
'type': type_}
|
||||
elif sline[4].startswith('#IMPLIED'):
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': None,
|
||||
'type': type_}
|
||||
else:
|
||||
default = sline[4].replace('"', '').replace("'", '').replace(
|
||||
'>', '').strip()
|
||||
default = CONVERT_VALUE.get(default, default)
|
||||
dtd_load[sline[1]]['optionals'][sline[2]] = {'values': values,
|
||||
'default': default,
|
||||
'type': type_}
|
||||
return dtd_load
|
|
@ -1,36 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Version variable of EOLE distribution
|
||||
|
||||
"""
|
||||
|
||||
UBUNTU_VERSION = u'xenial'
|
||||
"""Ubuntu version used by EOLE.
|
||||
|
||||
"""
|
||||
EOLE_VERSION = u'2.6'
|
||||
"""Current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
EOLE_RELEASE = u'{0}.2'.format(EOLE_VERSION)
|
||||
"""Release version of the current stable EOLE distribution.
|
||||
|
||||
"""
|
||||
|
||||
ENVOLE_VERSION = u'6'
|
||||
"""Envole version to use.
|
||||
|
||||
"""
|
||||
|
||||
LAST_RELEASE = u'2'
|
||||
"""Last stable release for this version
|
||||
|
||||
"""
|
|
@ -1,280 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
fonctions communes Creole
|
||||
"""
|
||||
import os, time, re
|
||||
from os.path import join, isfile
|
||||
from pyeole.process import system_out, system_code
|
||||
from pyeole.ansiprint import print_orange
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.pkg import EolePkg
|
||||
from pyeole.encode import normalize
|
||||
from .config import LOCALKERNEL_FILE, REBOOT_FILE
|
||||
|
||||
from .i18n import _
|
||||
|
||||
#si creole client n'est pas démarré
|
||||
global PkgManager
|
||||
PkgManager = None
|
||||
|
||||
######################
|
||||
# Gestion des noyaux #
|
||||
######################
|
||||
|
||||
def split_version(version):
|
||||
"""
|
||||
return version as list splitting subnumbers
|
||||
:param version: version number string
|
||||
:type version: string
|
||||
"""
|
||||
version_splitted = re.split('[-\.]', version)
|
||||
version_splitted = map(int, version_splitted)
|
||||
return version_splitted
|
||||
|
||||
def get_version_filtered_pkgs(prefix='linux-image'):
|
||||
"""
|
||||
return installed packages list ordered by version number
|
||||
"""
|
||||
vers_pkg_re = r"{0}-(?P<vers>[0-9]+(?P<upstr_vers>\.[0-9]+)*(-(?P<pkg_vers>[0-9]+))?)".format(prefix)
|
||||
vers_pkg_re = re.compile(vers_pkg_re)
|
||||
installed_pkgs = get_installed_kernel(prefix)
|
||||
vers_pkgs = [(pkg, split_version(vers_pkg_re.search(pkg).group('vers')))
|
||||
for pkg in installed_pkgs
|
||||
if vers_pkg_re.search(pkg)]
|
||||
vers_pkgs = [pkg[0] for pkg in sorted(vers_pkgs, key=lambda p: p[1])]
|
||||
return vers_pkgs
|
||||
|
||||
def get_custom_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau personnalisé
|
||||
ou None
|
||||
"""
|
||||
if isfile(LOCALKERNEL_FILE):
|
||||
# noyau personnalisé détecté
|
||||
kernel_file = LOCALKERNEL_FILE
|
||||
return file(kernel_file).read().strip()
|
||||
|
||||
def get_wanted_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on veut tourner
|
||||
"""
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
ret = custom_kernel
|
||||
else:
|
||||
kernel_images = get_version_filtered_pkgs()
|
||||
# Get last kernel version
|
||||
last_ver = kernel_images[-1].split('-')
|
||||
if len(last_ver) >= 4:
|
||||
last_ver = "{0}-{1}-{2}".format(*last_ver[2:5])
|
||||
elif len(last_ver) == 3:
|
||||
last_ver = "{0}".format(last_ver[-1])
|
||||
ret = last_ver
|
||||
return ret
|
||||
|
||||
def get_current_kernel():
|
||||
"""
|
||||
renvoie le nom du noyau sur lequel on tourne
|
||||
"""
|
||||
version = system_out(['uname', '-r'])[1].strip()
|
||||
return version
|
||||
|
||||
def get_installed_kernel(kernel):
|
||||
"""
|
||||
renvoie la liste des noyaux installés
|
||||
correspondant à celui demandé
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l 2>/dev/null | awk -F " " '/^(i|h)i.*%s/ {print $2}'""" % kernel
|
||||
return os.popen(cmd).read().splitlines()
|
||||
|
||||
def get_package_depends(pkg):
|
||||
"""
|
||||
Renvois les dépendances d'un paquet
|
||||
"""
|
||||
try:
|
||||
global PkgManager
|
||||
if PkgManager is None:
|
||||
PkgManager = EolePkg('apt')
|
||||
res = PkgManager.get_depends(pkg)
|
||||
return res
|
||||
except:
|
||||
return []
|
||||
|
||||
def controle_kernel(force_grub=True):
|
||||
"""
|
||||
Vérifie si on est sur le noyau désiré
|
||||
Renvoie True si un reboot est nécessaire
|
||||
"""
|
||||
need_boot = False
|
||||
if isfile(REBOOT_FILE):
|
||||
# i.e. /var/run/reboot-required
|
||||
need_boot = True
|
||||
|
||||
wanted_kernel = get_wanted_kernel()
|
||||
# on utilise le noyau spécifié
|
||||
if wanted_kernel != get_current_kernel():
|
||||
need_boot = True
|
||||
if force_grub:
|
||||
# Update grub does the job since eole-kernel-version 2.3-eole37~2
|
||||
print _(u"Updating Grub configuration")
|
||||
# ajout de LVM_SUPPRESS_FD_WARNINGS pour #10761
|
||||
system_code("/usr/sbin/update-grub2", env={'LVM_SUPPRESS_FD_WARNINGS': '1', "LC_ALL": 'fr_FR.UTF-8'})
|
||||
# reboot nécessaire ?
|
||||
return need_boot
|
||||
|
||||
def regen_initrd():
|
||||
"""
|
||||
vérifie la présence de l'initrd
|
||||
"""
|
||||
noyau = get_wanted_kernel()
|
||||
if not isfile("/boot/initrd.img-%s" % noyau):
|
||||
print _(u"Initramfs missing, generating :")
|
||||
cmd = ["/usr/sbin/update-initramfs", '-c', '-k', noyau]
|
||||
system_code(cmd)
|
||||
|
||||
def get_kernel_to_remove():
|
||||
"""
|
||||
Obtenir la liste des noyaux a supprimer. Tous les noyaux sauf :
|
||||
- le noyau courant
|
||||
- les deux noyaux les plus récents
|
||||
- l'éventuel noyau personnalisé
|
||||
"""
|
||||
# tous les noyaux installés
|
||||
installed_kernels = get_version_filtered_pkgs()
|
||||
# les deux noyaux les plus récents
|
||||
to_keep = installed_kernels[-2:]
|
||||
# tous les headers installés
|
||||
installed_kernels.extend(get_version_filtered_pkgs(prefix='linux-headers'))
|
||||
# le noyau courant
|
||||
to_keep.append('linux-image-{0}'.format(get_current_kernel()))
|
||||
# l'éventuel noyau personnalisé
|
||||
custom_kernel = get_custom_kernel()
|
||||
if custom_kernel:
|
||||
to_keep.append('linux-image-{0}'.format(custom_kernel))
|
||||
# les headers correspondants aux noyaux à conserver
|
||||
headers_to_keep = [k.replace('image', 'headers') for k in to_keep]
|
||||
headers_to_keep.extend([h.replace('-generic', '') for h in headers_to_keep])
|
||||
to_keep.extend(headers_to_keep)
|
||||
# on fait la différence
|
||||
to_remove = list(set(installed_kernels) - set(to_keep))
|
||||
return to_remove
|
||||
|
||||
def purge_rc():
|
||||
"""
|
||||
Purge des paquets "rc"
|
||||
"""
|
||||
cmd = """COLUMNS=180 dpkg -l|grep "^rc"|awk -F " " '{print $2}'"""
|
||||
rcs = os.popen(cmd).read().splitlines()
|
||||
for pak in rcs:
|
||||
os.system("dpkg -P %s >/dev/null" % pak)
|
||||
|
||||
def log(etat, msg, type_proc, console=True):
|
||||
"""
|
||||
effectue un log local et éventuellement sur zephir
|
||||
"""
|
||||
msg = normalize(msg)
|
||||
type_proc = normalize(type_proc)
|
||||
display = False
|
||||
log_func = 'info'
|
||||
if etat == "ERR":
|
||||
if console:
|
||||
# affichage sur la console
|
||||
display = True
|
||||
log_func = 'error'
|
||||
|
||||
try:
|
||||
z_logger = init_logging(name=u'zephir', syslog=True, level=u'info', console=display)
|
||||
except ValueError, err:
|
||||
z_logger = init_logging(name=u'zephir', level=u'info', console=True)
|
||||
z_logger.warn(_(u"Syslog logging is not working properly: {0}".format(err)))
|
||||
z_logger.warn(_(u"You may need to start/restart systemd-journald"))
|
||||
|
||||
getattr(z_logger, log_func)("%s => %s : %s " % (type_proc, etat, msg))
|
||||
|
||||
def zephir(etat, msg, type_proc, console=True):
|
||||
""" gestion des messages Zephir """
|
||||
etat_zeph = None
|
||||
if etat.upper().startswith("INIT"):
|
||||
etat_zeph = -1
|
||||
elif etat.upper().startswith("FIN"):
|
||||
etat_zeph = 0
|
||||
elif etat.upper().startswith('ERR'):
|
||||
etat_zeph = 1
|
||||
elif etat.upper().startswith('MSG'):
|
||||
etat_zeph = -2
|
||||
# log local si msg ou erreur
|
||||
if (len(msg) > 0) or (etat.upper() == "ERR"):
|
||||
log(etat, msg, type_proc, console)
|
||||
# log sur zephir si disponible
|
||||
if etat_zeph is not None:
|
||||
try:
|
||||
# si serveur enregistré, on envoie un log à Zéphir
|
||||
from zephir.zephir_conf.zephir_conf import id_serveur
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
from zephir.lib_zephir import log as zlog
|
||||
zlog(type_proc, etat_zeph, msg, str(time.ctime()))
|
||||
|
||||
def init_proc(type_proc):
|
||||
"""
|
||||
initialisation d'une procédure (log démarrage + bcage éventuel)
|
||||
"""
|
||||
if verify_lock(type_proc):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
#def end_proc(etat,msg,type_proc):
|
||||
# """
|
||||
# loggue la fin d'une procédure
|
||||
# """
|
||||
# log(etat,msg,type_proc )
|
||||
|
||||
def verify_lock(name):
|
||||
"""
|
||||
vérifie le bloquage ou non d'une procédure
|
||||
"""
|
||||
LOCK_FILE = "/usr/share/zephir/zephir_locks"
|
||||
if name == "":
|
||||
return True
|
||||
from zephir.lib_zephir import zephir_path
|
||||
try:
|
||||
from zephir.lib_zephir import config, zephir, convert
|
||||
locks = convert(zephir.serveurs.get_locks(config.id_serveur))
|
||||
if locks[0] == 0:
|
||||
# erreur sur zephir, on ignore cette phase
|
||||
raise Exception
|
||||
locks = [lock[0] for lock in locks[1]]
|
||||
except Exception, mess:
|
||||
# pas de message d'erreur si le serveur n'est pas enregistré
|
||||
zephir_error = False
|
||||
if isfile(join(zephir_path, "zephir_conf", "zephir_conf.py")):
|
||||
# on ne bloque pas si l'appel à zephir échoue
|
||||
print ""
|
||||
print_orange(_(u"Checking permissions on Zéphir for {0} impossible.").format(name))
|
||||
print_orange(_(u"Error message: {0}").format(mess))
|
||||
zephir_error = True
|
||||
# on regarde le denier état
|
||||
if os.path.exists(LOCK_FILE):
|
||||
if zephir_error:
|
||||
print_orange(_(u"Using stored parameters"))
|
||||
file_lock = file(LOCK_FILE)
|
||||
locks = file_lock.read().split('\n')
|
||||
file_lock.close()
|
||||
# on bloque si interdit
|
||||
if name in locks:
|
||||
return False
|
||||
else:
|
||||
# mise en place du fichier de droits
|
||||
content = "\n".join(locks)
|
||||
try:
|
||||
file_lock = file(LOCK_FILE, "w")
|
||||
file_lock.write(content)
|
||||
file_lock.close()
|
||||
except:
|
||||
print _(u"Updating {0} impossible (insufficient rights).").format(LOCK_FILE)
|
||||
# retour du code
|
||||
if name in locks:
|
||||
return False
|
||||
return True
|
172
creole/loader.py
172
creole/loader.py
|
@ -10,21 +10,16 @@ from tiramisu.option import (UnicodeOption, OptionDescription, PortOption,
|
|||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption,
|
||||
NetworkOption, NetmaskOption, DomainnameOption, BroadcastOption,
|
||||
URLOption, EmailOption, FilenameOption, UsernameOption, DateOption,
|
||||
PasswordOption, BoolOption, Leadership)
|
||||
PasswordOption, BoolOption, MACOption, Leadership)
|
||||
from tiramisu import Config, MetaConfig, MixConfig
|
||||
from tiramisu.setting import groups
|
||||
from tiramisu.error import ConfigError
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu import Params, ParamOption, ParamValue, ParamContext
|
||||
|
||||
from .config import (FLATTENED_CREOLE_DIR, dtdfilename, eoledirs, eoleextradico, forbiddenextra,
|
||||
configeol, eoleextraconfig)
|
||||
from .config import dtdfilename
|
||||
from .i18n import _
|
||||
from .var_loader import convert_tiramisu_value, modes_level, MACOption # FIXME YO
|
||||
from .loader1 import load_config_eol, load_extras, _list_extras
|
||||
#For compatibility
|
||||
from .loader1 import config_save_values, config_load_store, config_get_values, add_eol_version
|
||||
from .loader1 import load_store, load_config_store, load_values
|
||||
from .xmlreflector import HIGH_COMPATIBILITY
|
||||
#from . import eosfunc
|
||||
from .objspace import CreoleObjSpace
|
||||
|
@ -34,6 +29,34 @@ import imp
|
|||
class CreoleLoaderError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def convert_tiramisu_value(value, obj):
|
||||
"""
|
||||
convertit les variables dans le bon type si nécessaire
|
||||
"""
|
||||
if value is None:
|
||||
return value
|
||||
def _convert_boolean(value):
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
prop = {'True': True,
|
||||
'False': False,
|
||||
'None': None}
|
||||
if value not in prop:
|
||||
raise Exception('unknown value {} while trying to cast {} to boolean'.format(value, obj))
|
||||
return prop[value]
|
||||
|
||||
func = {IntOption: int, UnicodeOption: str, PortOption: str,
|
||||
DomainnameOption: str, EmailOption: str, URLOption: str,
|
||||
IPOption: str, NetmaskOption: str, NetworkOption: str,
|
||||
BroadcastOption: str, FilenameOption: str
|
||||
BoolOption: _convert_boolean}.get(obj, return)
|
||||
if isinstance(value, list):
|
||||
return [func(val) for val in value]
|
||||
else:
|
||||
return func(value)
|
||||
|
||||
|
||||
CONVERT_OPTION = {'number': dict(opttype=IntOption),
|
||||
'choice': dict(opttype=ChoiceOption),
|
||||
'string': dict(opttype=UnicodeOption),
|
||||
|
@ -464,9 +487,7 @@ class PopulateTiramisuObjects(object):
|
|||
config.property.setdefault(rw_append, 'read_write', 'append')
|
||||
|
||||
config.property.read_only()
|
||||
_modes = list(modes_level)
|
||||
_modes.append('hidden')
|
||||
config.permissive.set(frozenset(_modes))
|
||||
config.permissive.set(frozenset('basic', 'normal', 'expert', 'hidden'))
|
||||
return config
|
||||
|
||||
|
||||
|
@ -740,134 +761,3 @@ class Family(object):
|
|||
# self.option.impl_set_group_type(groups.master)
|
||||
|
||||
return self.option
|
||||
|
||||
|
||||
def _gen_eol_file(namespace):
|
||||
if namespace == 'creole':
|
||||
return configeol
|
||||
else:
|
||||
return join(eoleextraconfig, namespace, 'config.eol')
|
||||
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None,
|
||||
force_dtdfile=None, force_flattened=None,
|
||||
mandatory_permissive=True, from_zephir=None,
|
||||
force_no_save=False, force_eoleextradico=None,
|
||||
force_eoleextraconfig=None, only_load_flattened=False):
|
||||
"""
|
||||
Loads the Creole XML dictionnary files and return a tiramisu config object
|
||||
|
||||
:param bool load_values: Loads (or not) the :file:`config.eol` file
|
||||
:param bool rw: Config's read/write flag
|
||||
:param str namespace: Root's namespace for the config (example: "creole", "bacula", ...)
|
||||
:param bool load_extra: Loads (or not) the extra dictionnaries (if `namespace='creole'`)
|
||||
:param bool reload_config: This parameter is kept for compatibility reasons
|
||||
:param str owner: forces the owner on a modified variable
|
||||
:param bool disable_mandatory: disables the mandatory variables
|
||||
:param str force_configeol: Forces the used configuration file
|
||||
:param bool try_upgrade: tries to upgrade
|
||||
:param force_load_creole_owner: Forces the owner for the loaded variables
|
||||
:param str force_dirs: Forces the folder's name containing the dictionnaries
|
||||
:param warnings: Shows the validation's warnings
|
||||
:param bool force_instanciate: tells us if the server is already instanciated or not
|
||||
:param force_dtdfile: None or dtd filename
|
||||
:param force_flattened: None or flatened filename's name
|
||||
:param only_load_flattened: boolean to desactivate generated of flattened file
|
||||
"""
|
||||
if namespace is not 'creole':
|
||||
raise CreoleLoaderError(_('Only creole namespace is supported'))
|
||||
#if reload_config is not True:
|
||||
# raise CreoleLoaderError(_('Cannot reload the configuration'))
|
||||
if force_flattened is None:
|
||||
force_flattened = join(FLATTENED_CREOLE_DIR, 'flattened_creole.xml')
|
||||
if force_dtdfile is None:
|
||||
force_dtdfile = dtdfilename
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise CreoleLoaderError(_("Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra and force_eoleextraconfig is None:
|
||||
# if force_configeol, cannot calculate extra configfile name
|
||||
raise CreoleLoaderError(_('Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and load_extra is True and force_eoleextradico is None:
|
||||
raise CreoleLoaderError(_('If force_dirs is defined, namespace must be set to creole and '
|
||||
'load_extra must be set to False.'))
|
||||
if not only_load_flattened:
|
||||
#should not load value now because create a Config
|
||||
eolobj = CreoleObjSpace(force_dtdfile)
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
else:
|
||||
dirs = eoledirs
|
||||
if from_zephir is not None and type(dirs) != list:
|
||||
#if dirs is not a list, add subdirectory 'local'
|
||||
#and 'variante'
|
||||
orig_dir = dirs
|
||||
dirs = [dirs]
|
||||
for tdir in [join(orig_dir, 'local'),
|
||||
join(orig_dir, 'variante')]:
|
||||
if isdir(tdir):
|
||||
dirs.append(tdir)
|
||||
eolobj.create_or_populate_from_xml('creole', dirs, from_zephir=from_zephir)
|
||||
|
||||
if load_extra:
|
||||
if force_eoleextradico == None:
|
||||
force_eoleextradico = eoleextradico
|
||||
extranames = _list_extras(force_eoleextradico)
|
||||
extranames.sort()
|
||||
if isdir(force_eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise CreoleLoaderError(
|
||||
_('Namespace {} for extra dictionary not allowed').format(directory))
|
||||
dirname = join(force_eoleextradico, directory)
|
||||
eolobj.create_or_populate_from_xml(directory, [dirname], from_zephir)
|
||||
eolobj.space_visitor()
|
||||
xmlroot = eolobj.save(force_flattened, force_no_save)
|
||||
else:
|
||||
with open(force_flattened, 'r') as fhd:
|
||||
xmlroot = parse(fhd).getroot()
|
||||
tiramisu_objects = PopulateTiramisuObjects()
|
||||
tiramisu_objects.parse_dtd(force_dtdfile)
|
||||
tiramisu_objects.make_tiramisu_objects(xmlroot)
|
||||
config = tiramisu_objects.build()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values, mandatory_permissive=mandatory_permissive,
|
||||
extradico=force_eoleextradico, force_eoleextraconfig=force_eoleextraconfig)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
if from_zephir is not None:
|
||||
return tiramisu_objects.groups, tiramisu_objects.separators, config
|
||||
else:
|
||||
return config
|
||||
|
|
|
@ -1,769 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
#import cjson
|
||||
import json
|
||||
import fcntl
|
||||
import stat
|
||||
import logging
|
||||
|
||||
from os.path import isdir, isfile, join, basename, dirname, splitext
|
||||
from os import listdir, makedirs, major, minor
|
||||
from os import stat as os_stat
|
||||
from distutils.version import StrictVersion
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from tiramisu.option import UnicodeOption, OptionDescription, \
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption, \
|
||||
NetworkOption, NetmaskOption
|
||||
from tiramisu.error import PropertiesOptionError, LeadershipError
|
||||
from tiramisu.setting import owners
|
||||
|
||||
from .config import configeol, eoledirs, dtdfilename, eoleextradico, \
|
||||
eoleextraconfig, forbiddenextra, VIRTROOT, \
|
||||
VIRTBASE, VIRTMASTER, templatedir
|
||||
from .error import ConfigError
|
||||
from .var_loader import modes_level, CreoleFamily, CreoleConstraint, \
|
||||
CreoleVarLoader
|
||||
try:
|
||||
from .client import CreoleClient, CreoleClientError
|
||||
client = CreoleClient()
|
||||
except:
|
||||
client = None
|
||||
from pyeole.encode import normalize
|
||||
try:
|
||||
from .eosfunc import is_instanciate, get_version
|
||||
except:
|
||||
pass
|
||||
|
||||
from .i18n import _
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class CreoleContainer():
|
||||
"""
|
||||
Charge les conteneurs, les fichiers, les packages, services, interfaces
|
||||
et disknods
|
||||
"""
|
||||
def gen_containers(self, paths):
|
||||
"""
|
||||
Generate Containers information in tiramisu tree
|
||||
|
||||
:paths: paths variables (for added new option in paths's dictionnary)
|
||||
"""
|
||||
containers = []
|
||||
for name, container in self._get_containers().items():
|
||||
container['path'] = 'container_path_{0}'.format(name)
|
||||
container['ip'] = 'container_ip_{0}'.format(name)
|
||||
containers.append(container)
|
||||
|
||||
key_type = {'id': IntOption, 'group': UnicodeOption,
|
||||
'ip': SymLinkOption, 'path': SymLinkOption,
|
||||
'level': UnicodeOption}
|
||||
|
||||
return self._gen_tiramisu_config(paths, "container", containers,
|
||||
key_type)
|
||||
|
||||
def gen_networks(self, paths):
|
||||
var = []
|
||||
descr = None
|
||||
namespace = paths['adresse_ip_br0'].split('.')[0]
|
||||
for descr_ in self.space:
|
||||
if descr_._name == namespace:
|
||||
descr = descr_
|
||||
break
|
||||
if descr == None:
|
||||
raise Exception(_(u'Unable to find namespace: {0}').format(
|
||||
namespace))
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
path = paths[name]
|
||||
subpath = path.split('.')[1:]
|
||||
opt = descr
|
||||
for p in subpath:
|
||||
opt = getattr(opt, p)
|
||||
var.append(SymLinkOption(name, opt))
|
||||
return OptionDescription('network', '', var)
|
||||
|
||||
def gen_interfaces(self, paths):
|
||||
"""Add per container interface linked to inter-containers bridge
|
||||
|
||||
Theses interfaces must come before other containers ones as
|
||||
default gateway.
|
||||
|
||||
"""
|
||||
lxc_net = OrderedDict()
|
||||
if self.containers_enabled:
|
||||
interfaces = OrderedDict()
|
||||
containers = self._get_containers()
|
||||
|
||||
for name, container in containers.items():
|
||||
if name in ['all', 'root']:
|
||||
continue
|
||||
lxc_net[name] = {'name': 'containers',
|
||||
'container': name,
|
||||
'linkto': 'br0',
|
||||
'method': 'bridge',
|
||||
'ip': 'container_ip_{0}'.format(name),
|
||||
'mask': 'adresse_netmask_br0',
|
||||
'bcast': 'adresse_broadcast_br0',
|
||||
'gateway': 'adresse_ip_br0'}
|
||||
|
||||
# Insert default interfaces before
|
||||
self.generic['interfaces'] = lxc_net.values() \
|
||||
+ self.generic['interfaces']
|
||||
|
||||
return self.gen_generic('interfaces', paths, copy_requires='ip')
|
||||
|
||||
def gen_service_accesss(self, paths):
|
||||
return self.__gen_service_access_restriction('service_access', paths)
|
||||
|
||||
def gen_service_restrictions(self, paths):
|
||||
return self.__gen_service_access_restriction('service_restriction', paths)
|
||||
|
||||
def __gen_service_access_restriction(self, service_type, paths):
|
||||
"""Add services requires to service_access/service_restriction
|
||||
If a service is disabled, we remove, also, access to this service
|
||||
"""
|
||||
generic_name = service_type + 's'
|
||||
list_name = service_type + 'list'
|
||||
if 'service' in self.requires:
|
||||
for gen in self.generic[generic_name]:
|
||||
service_name = gen['service']
|
||||
requires_name = gen.get(list_name)
|
||||
if requires_name is None:
|
||||
requires_name = '___auto_{0}'.format(service_name)
|
||||
gen[list_name] = requires_name
|
||||
self.requires[service_type][requires_name] = {'optional': True, 'list': []}
|
||||
if service_name in self.requires['service']:
|
||||
service_requires = self.requires['service'][service_name]['list']
|
||||
if self.requires['service'][service_name]['optional'] is False:
|
||||
self.requires['service'][service_name]['optional'] = False
|
||||
self.requires[service_type][requires_name]['list'].extend(service_requires)
|
||||
return self.gen_generic(generic_name, paths, verify_exists_redefine=False)
|
||||
|
||||
def _gen_file(self, fdata, container, containers):
|
||||
"""Generate one file structure for one container
|
||||
|
||||
:param fdata: file informations
|
||||
:type fdata: `dict`
|
||||
:param container: container of the file
|
||||
:type container: `dict`
|
||||
:return: file information for a container
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
file_infos = fdata.copy()
|
||||
# take care of os.path.join and absolute part after first
|
||||
# argument.
|
||||
_file = fdata['name']
|
||||
if _file[0] == '/':
|
||||
_file = _file[1:]
|
||||
|
||||
file_infos['container'] = container['name']
|
||||
file_infos['full_name'] = fdata['name']
|
||||
if self.containers_enabled and container['name'] != VIRTMASTER:
|
||||
# Prefix the full path with container rootfs
|
||||
if fdata['container'] == 'all':
|
||||
cont_grp = container['group']
|
||||
else:
|
||||
cont_grp = fdata['container']
|
||||
cont_name = self.get_real_container_name(containers, cont_grp)
|
||||
_file = join(VIRTROOT, cont_name, VIRTBASE, _file)
|
||||
file_infos['full_name'] = _file
|
||||
|
||||
source = file_infos.get('source', basename(_file))
|
||||
source = join(templatedir, source)
|
||||
file_infos['source'] = source
|
||||
return file_infos
|
||||
|
||||
def gen_files(self, paths):
|
||||
containers = self._get_containers()
|
||||
files = []
|
||||
for fdata in self.generic.get('files', []):
|
||||
if fdata['container'] == 'all':
|
||||
# Generate a file per container
|
||||
for container in containers.values():
|
||||
if container['name'] in ['all', VIRTMASTER]:
|
||||
continue
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
else:
|
||||
container = containers[fdata['container']]
|
||||
files.append(self._gen_file(fdata, container, containers))
|
||||
|
||||
key_type = {'source': UnicodeOption, 'mode': UnicodeOption,
|
||||
'full_name': UnicodeOption,
|
||||
'owner': UnicodeOption, 'group': UnicodeOption,
|
||||
'mkdir': BoolOption, 'rm': BoolOption,
|
||||
'del_comment': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "file", files, key_type,
|
||||
requires_key='activate')
|
||||
|
||||
def gen_disknods(self, paths):
|
||||
containers = self._get_containers()
|
||||
disknods = []
|
||||
for fdata in self.generic.get('disknods', []):
|
||||
stats = os_stat(fdata['name'])
|
||||
if stat.S_ISBLK(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISCHR(stats.st_mode):
|
||||
dev_type = u'c'
|
||||
device = stats.st_rdev
|
||||
elif stat.S_ISDIR(stats.st_mode):
|
||||
dev_type = u'b'
|
||||
device = stats.st_dev
|
||||
else:
|
||||
dev_type = None
|
||||
device = None
|
||||
fdata['type'] = dev_type
|
||||
if device is not None:
|
||||
fdata['major'] = major(device)
|
||||
fdata['minor'] = minor(device)
|
||||
else:
|
||||
fdata['major'] = None
|
||||
fdata['minor'] = None
|
||||
fdata['mode'] = u'rwm'
|
||||
fdata['permission'] = 'allow'
|
||||
disknods.append(fdata)
|
||||
|
||||
key_type = {'major': IntOption,
|
||||
'minor': IntOption,
|
||||
'name': UnicodeOption,
|
||||
'permission': UnicodeOption,
|
||||
'mode': UnicodeOption,
|
||||
'type': UnicodeOption,
|
||||
'level': UnicodeOption}
|
||||
return self._gen_tiramisu_config(paths, "disknod", disknods, key_type)
|
||||
|
||||
def gen_packages(self, paths):
|
||||
# c'est le dernier 'package' qui a raison
|
||||
# (si présence de deux balises package avec le même nom dans le
|
||||
# même conteneur)
|
||||
return self.gen_generic('packages', paths, verify_exists_redefine=False)
|
||||
|
||||
|
||||
class CreoleLoader(CreoleVarLoader, CreoleContainer):
|
||||
"""
|
||||
charge les variables + les conteneurs
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _gen_eol_file(namespace, root_path=None):
|
||||
if namespace == 'creole':
|
||||
return unicode(configeol)
|
||||
else:
|
||||
if root_path is None:
|
||||
root_path = eoleextraconfig
|
||||
return unicode(join(root_path, namespace, 'config.eol'))
|
||||
|
||||
|
||||
def _list_extras(extradico=eoleextradico):
|
||||
extranames = []
|
||||
if isdir(extradico):
|
||||
for directory in listdir(extradico):
|
||||
content = listdir(join(extradico, directory))
|
||||
if not len(content) == 0:
|
||||
extensions = [splitext(filename)[1] for filename in content]
|
||||
if ".xml" in extensions:
|
||||
extranames.append(directory)
|
||||
return extranames
|
||||
|
||||
|
||||
def set_mandatory_permissive(config, action):
|
||||
descr = config.cfgimpl_get_description()
|
||||
parent = getattr(descr, action, None)
|
||||
if parent is not None:
|
||||
for family in parent.impl_getchildren():
|
||||
for option in family.impl_getchildren():
|
||||
if 'mandatory' in option.impl_getproperties():
|
||||
config.cfgimpl_get_settings().setpermissive(('mandatory',), option)
|
||||
|
||||
|
||||
def load_extras(config, load_values=True, mandatory_permissive=False, extradico=eoleextradico,
|
||||
force_eoleextraconfig=None):
|
||||
actions = set()
|
||||
if mandatory_permissive and hasattr(config, 'actions'):
|
||||
for name, family in config.actions.iter_groups():
|
||||
for aname, action in family.iter_groups():
|
||||
actions.add(action.name)
|
||||
for extraname in _list_extras(extradico=extradico):
|
||||
if extraname in ['creole', 'containers', 'actions']:
|
||||
raise Exception(_('extra name {} not allowed').format(extraname))
|
||||
eol_file = _gen_eol_file(extraname, root_path=force_eoleextraconfig)
|
||||
config.impl_set_information(extraname, eol_file)
|
||||
if extraname in actions:
|
||||
set_mandatory_permissive(config, extraname)
|
||||
if not load_values:
|
||||
continue
|
||||
#if file not exists, create it (for auto_freeze value)
|
||||
if not isfile(eol_file):
|
||||
try:
|
||||
config_save_values(config, extraname, reload_config=False, check_mandatory=False)
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if isfile(eol_file):
|
||||
config_load_values(config, extraname)
|
||||
|
||||
|
||||
def load_config_eol(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, force_instanciate=None):
|
||||
if not configfile:
|
||||
configfile = _gen_eol_file('creole')
|
||||
config.impl_set_information('creole', configfile)
|
||||
config_load_values(config, 'creole', force_load_owner=force_load_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
configfile=configfile,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version)
|
||||
|
||||
def load_config_store(config, store, unset_default=False,
|
||||
force_load_owner=None, current_eol_version=None,
|
||||
force_instanciate=None, remove_unknown_vars=False,
|
||||
try_upgrade=False):
|
||||
"""used on Zéphir to upgrade values (2.4.X -> 2.4.X+1) on a configuration
|
||||
that has already been migrated (2.2/2.3 −> 2.4)
|
||||
"""
|
||||
config_load_store(config, 'creole', store, force_load_owner=force_load_owner,
|
||||
unset_default=unset_default, force_instanciate=force_instanciate)
|
||||
load_values(config,
|
||||
try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_owner,
|
||||
current_eol_version=current_eol_version,
|
||||
remove_unknown_vars=remove_unknown_vars)
|
||||
|
||||
def load_values(config, configfile=None, try_upgrade=True, force_load_owner=None,
|
||||
current_eol_version=None, remove_unknown_vars=False):
|
||||
load_error = config.impl_get_information('load_error', False)
|
||||
if load_error and try_upgrade:
|
||||
#Try to upgrade
|
||||
from .upgrade import upgrade
|
||||
try:
|
||||
store_dico, version = upgrade(config, configfile)
|
||||
config_load_store(config, 'creole', store_dico, unset_default=True, eol_version='1.0')
|
||||
config.impl_set_information('upgrade', version)
|
||||
remove_unknown_vars = True
|
||||
load_error = False
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(e))
|
||||
config.impl_set_information('load_error', True)
|
||||
#print "fichier de configuration invalide 2.2 ou 2.3: {0} : {1}".format(configfile, e)
|
||||
if current_eol_version == None:
|
||||
current_eol_version = get_version('EOLE_RELEASE')
|
||||
eol_version = str(config.impl_get_information('eol_version'))
|
||||
if try_upgrade and not load_error:
|
||||
if StrictVersion(eol_version) > StrictVersion(current_eol_version):
|
||||
raise Exception(_('eol_version ({0}) is greater than current version ({1})').format(eol_version, current_eol_version))
|
||||
if StrictVersion(eol_version) < StrictVersion(current_eol_version):
|
||||
#can be used to edit lower versions on Zéphir
|
||||
from .upgrade24 import upgrade2
|
||||
try:
|
||||
# 2.4.x (greater than 2.4.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.4.0') and StrictVersion(eol_version) < StrictVersion('2.5.0'):
|
||||
upgrade2('2.4', eol_version, current_eol_version, config)
|
||||
# 2.5.x (greater than 2.5.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.5.0') and StrictVersion(eol_version) < StrictVersion('2.6.0'):
|
||||
upgrade2('2.5', eol_version, current_eol_version, config)
|
||||
# 2.6.x (greater than 2.6.0)
|
||||
if StrictVersion(current_eol_version) >= StrictVersion('2.6.0') and StrictVersion(eol_version) < StrictVersion('2.7.0'):
|
||||
upgrade2('2.6', eol_version, current_eol_version, config)
|
||||
if config.impl_get_information('upgrade', '') == '':
|
||||
#set the version only if it is the first upgrade
|
||||
config.impl_set_information('upgrade', eol_version)
|
||||
except Exception as e:
|
||||
log.error(_('Error when trying to upgrade config file: {}').format(normalize(str(e))))
|
||||
config.impl_set_information('upgrade', False)
|
||||
config.impl_set_information('load_error', True)
|
||||
|
||||
if remove_unknown_vars:
|
||||
# nettoyage des variables inconnues en dernier (#9858)
|
||||
config.impl_set_information('unknown_options', {})
|
||||
|
||||
def creole_loader(load_values=True, rw=False, namespace='creole',
|
||||
load_extra=False, reload_config=True, owner=None,
|
||||
disable_mandatory=False, force_configeol=None,
|
||||
try_upgrade=True, force_load_creole_owner=None,
|
||||
force_dirs=None, warnings=None, force_instanciate=None):
|
||||
"""
|
||||
charge les dictionnaires Creole et retourne une config Tiramisu
|
||||
|
||||
:load_values: boolean. Charge ou non le fichier config.eol (default True)
|
||||
:rw: boolean. Mode de travail (lecture seule ou lecture/écriture)
|
||||
:namespace: string. Espace de travail (ex: "creole", "bacula", ...)
|
||||
:load_extra: boolean. Charge ou non les dictionnaire extra (si namespace='creole')
|
||||
:reload_config: boolean. Cette option est conservée pour raison de compatibilité
|
||||
ascendante mais n'a plus de justification, a ne pas utiliser
|
||||
:owner: string. Owner forcé sur les variables modifiées
|
||||
:disable_mandatory: boolean.
|
||||
:force_configeol: string. Force le nom du fichier de configuration utilisé
|
||||
:try_upgrade: boolean.
|
||||
:force_dirs: string. Force le nom du réprtoire contenant les dictionnaires
|
||||
:force_load_creole_owner: Owner forcé pour les variables chargées
|
||||
:warnings: affiche les warnings de validation
|
||||
"""
|
||||
if force_configeol is not None:
|
||||
if not isfile(force_configeol):
|
||||
raise ConfigError(_(u"Configuration file unexistent : {0}").format(
|
||||
force_configeol))
|
||||
if load_extra:
|
||||
#if force_configeol, cannot calculated extra configfile name
|
||||
raise Exception(_(u'Unable to force_configeol with load_extra.'))
|
||||
if force_dirs is not None and (load_extra is True or namespace != 'creole'):
|
||||
raise Exception(_(u'If force_dirs is defined, namespace must be set to creole and load_extra must be set to False.'))
|
||||
if namespace != 'creole' and load_extra:
|
||||
raise ValueError(_(u'namespace is not creole, so load_extra is forbidden.'))
|
||||
#should not load value now because create a Config
|
||||
loader = CreoleLoader()
|
||||
if force_dirs is not None:
|
||||
dirs = force_dirs
|
||||
elif namespace == 'creole':
|
||||
dirs = eoledirs
|
||||
else:
|
||||
dirs = join(eoleextradico, namespace)
|
||||
#load config
|
||||
loader.read_dir(dirs, namespace)
|
||||
if load_extra:
|
||||
extranames = _list_extras()
|
||||
if isdir(eoleextradico):
|
||||
for directory in extranames:
|
||||
if directory in forbiddenextra:
|
||||
raise ValueError(
|
||||
_(u'Namespace {} for extra dictionary not allowed').format(directory))
|
||||
loader.read_dir(join(eoleextradico, directory), directory)
|
||||
config = loader.get_config()
|
||||
if warnings is None:
|
||||
# warnings is disabled in read-only mode and enabled in read-write mode by default
|
||||
warnings = rw
|
||||
if warnings is False:
|
||||
config.cfgimpl_get_settings().remove('warnings')
|
||||
if owner is not None:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
config.cfgimpl_get_settings().setowner(getattr(owners, owner))
|
||||
#load values
|
||||
if force_configeol is not None:
|
||||
configfile = force_configeol
|
||||
else:
|
||||
configfile = _gen_eol_file(namespace)
|
||||
if load_values and isfile(configfile):
|
||||
disable_mandatory = False
|
||||
load_config_eol(config, configfile=configfile, try_upgrade=try_upgrade,
|
||||
force_load_owner=force_load_creole_owner,
|
||||
force_instanciate=force_instanciate)
|
||||
else:
|
||||
config.impl_set_information(namespace, configfile)
|
||||
if load_extra:
|
||||
load_extras(config, load_values=load_values)
|
||||
if rw:
|
||||
config.read_write()
|
||||
elif rw is False:
|
||||
config.read_only()
|
||||
|
||||
if disable_mandatory:
|
||||
config.cfgimpl_get_settings().remove('mandatory')
|
||||
config.cfgimpl_get_settings().remove('empty')
|
||||
return config
|
||||
|
||||
|
||||
def valid_store(store):
|
||||
if not isinstance(store, dict):
|
||||
raise Exception('store is not a dict: {0}'.format(store))
|
||||
for key, value in store.items():
|
||||
if not isinstance(key, unicode):
|
||||
raise Exception('store key is not an unicode for {0}'.format(key))
|
||||
if key != '___version___' and (not isinstance(value, dict) or value.keys() != ['owner', 'val']):
|
||||
raise Exception('store value is not a dict for {0}'.format(key))
|
||||
|
||||
|
||||
def load_store(config, eol_file=configeol):
|
||||
if not isfile(eol_file):
|
||||
store = {}
|
||||
else:
|
||||
fh = open(eol_file, 'r')
|
||||
fcntl.lockf(fh, fcntl.LOCK_SH)
|
||||
try:
|
||||
store = cjson.decode(fh.read(), all_unicode=True)
|
||||
except cjson.DecodeError:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
fh.close()
|
||||
try:
|
||||
valid_store(store)
|
||||
except Exception as err:
|
||||
config.impl_set_information('load_error', True)
|
||||
store = {}
|
||||
return store
|
||||
|
||||
|
||||
def config_load_store(config, namespace, store, force_instanciate=None,
|
||||
unset_default=False, force_load_owner=None, eol_version='2.4.0'):
|
||||
subconfig = getattr(config, namespace)
|
||||
cache_paths = config.cfgimpl_get_description()._cache_paths[1]
|
||||
unknown_options = {}
|
||||
|
||||
def reorder_store(path1, path2):
|
||||
"""
|
||||
sorter function.
|
||||
|
||||
sort description : if varname1 is a master and varname 2
|
||||
is a slave, returns [varname1, varname2]
|
||||
"""
|
||||
idx_1 = cache_paths.index(path1)
|
||||
idx_2 = cache_paths.index(path2)
|
||||
return cmp(idx_1, idx_2)
|
||||
|
||||
def store_path_and_reorder(eol_version):
|
||||
"""Convenience function to replace varnames with full paths
|
||||
and to sort an unordered ConfigObj's
|
||||
|
||||
:returns: a sorted ordereddict.
|
||||
"""
|
||||
store_path = {}
|
||||
if namespace == 'creole':
|
||||
paths = {}
|
||||
for path in subconfig.cfgimpl_get_description().impl_getpaths():
|
||||
vname = path.split('.')[-1]
|
||||
paths[vname] = namespace + '.' + path
|
||||
#variable pas dans Tiramisu
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
elif vname not in paths:
|
||||
unknown_options[vname] = value
|
||||
if vname not in paths or value == {}:
|
||||
continue
|
||||
store_path[paths[vname]] = value
|
||||
else:
|
||||
paths = []
|
||||
subpaths = subconfig.cfgimpl_get_description().impl_getpaths()
|
||||
for path in subpaths:
|
||||
paths.append(namespace + '.' + path)
|
||||
for vname, value in store.items():
|
||||
if vname == '___version___':
|
||||
eol_version = value
|
||||
continue
|
||||
elif vname not in paths:
|
||||
continue
|
||||
store_path[vname] = value
|
||||
store_order = OrderedDict()
|
||||
store_key = store_path.keys()
|
||||
store_key.sort(reorder_store)
|
||||
for path in store_key:
|
||||
store_order[path] = store_path[path]
|
||||
return eol_version, store_order
|
||||
|
||||
#don't frozen auto_freeze before instance (or enregistrement_zephir for Zephir)
|
||||
if force_instanciate is not None:
|
||||
is_inst = force_instanciate
|
||||
else:
|
||||
is_inst = is_instanciate()
|
||||
eol_version, store = store_path_and_reorder(eol_version)
|
||||
orig_values = {}
|
||||
for path, values in store.items():
|
||||
value = values['val']
|
||||
option = config.unwrap_from_path(path)
|
||||
settings = config.cfgimpl_get_settings()
|
||||
tiramisu_values = config.cfgimpl_get_values()
|
||||
if force_load_owner is not None:
|
||||
owner = force_load_owner
|
||||
else:
|
||||
owner = values['owner']
|
||||
if isinstance(owner, dict):
|
||||
for towner in owner.values():
|
||||
if towner not in dir(owners):
|
||||
owners.addowner(towner)
|
||||
else:
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
try:
|
||||
#si unset_default, remet à la valeur par défaut si == à la valeur
|
||||
if unset_default and value == getattr(config, path):
|
||||
continue
|
||||
if isinstance(value, tuple):
|
||||
value = list(value)
|
||||
values['val'] = value
|
||||
orig_values[path.split('.')[-1]] = values
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if not isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner)
|
||||
multi = config.getattr(path, force_permissive=True)
|
||||
if isinstance(value, list):
|
||||
tval = {}
|
||||
for idx, val in enumerate(value):
|
||||
tval[idx] = val
|
||||
value = tval
|
||||
for idx, val in value.items():
|
||||
index = int(idx)
|
||||
if len(multi) > index:
|
||||
multi[index] = val
|
||||
if isinstance(owner, dict):
|
||||
new_owner = getattr(owners, owner[idx])
|
||||
tiramisu_values.setowner(option, new_owner, index=index)
|
||||
else:
|
||||
log.error(_("master's len is lower than the slave variable ({})").format(path))
|
||||
else:
|
||||
if isinstance(owner, str):
|
||||
owner = unicode(owner)
|
||||
if not isinstance(owner, unicode):
|
||||
raise Exception(_('owner must be a string for {}').format(path))
|
||||
new_owner = getattr(owners, owner)
|
||||
try:
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
except ValueError as e:
|
||||
if path == 'schedule.schedule.weekday' and 'schedule.schedule.monthday' in store:
|
||||
settings.remove('validator')
|
||||
config.setattr(path, value, force_permissive=True)
|
||||
config.setattr('schedule.schedule.monthday', store['schedule.schedule.monthday'], force_permissive=True)
|
||||
settings.append('validator')
|
||||
else:
|
||||
raise e
|
||||
tiramisu_values.setowner(option, new_owner)
|
||||
except ValueError as e:
|
||||
msg = str(e).decode('utf8')
|
||||
#msg = unicode(e)
|
||||
log.error(_('unable to load variable {} with value {}: {}').format(path, value, msg))
|
||||
settings[option].append('load_error')
|
||||
config.impl_set_information('error_msg_{}'.format(path), msg)
|
||||
config.impl_set_information('orig_value_{}'.format(path), value)
|
||||
except LeadershipError:
|
||||
# ne pas faire d'erreur #8380
|
||||
pass
|
||||
try:
|
||||
config.impl_get_information('force_store_vars').remove(path)
|
||||
except (KeyError, ValueError) as err:
|
||||
pass
|
||||
|
||||
path_split = path.split('.')
|
||||
family_option = config.unwrap_from_path(namespace + '.' + path_split[1])
|
||||
settings.setpermissive(tuple(modes_level), opt=family_option)
|
||||
if len(path_split) == 4:
|
||||
parent_option = config.unwrap_from_path(namespace + '.' + path_split[1] + '.' + path_split[2])
|
||||
settings.setpermissive(tuple(modes_level), opt=parent_option)
|
||||
settings.setpermissive(tuple(modes_level), opt=option)
|
||||
setting = config.cfgimpl_get_settings()
|
||||
if 'auto_freeze' in setting[option] and is_inst == 'oui' and \
|
||||
not tiramisu_values.is_default_owner(option):
|
||||
setting[option].append('frozen')
|
||||
if namespace == 'creole':
|
||||
config.impl_set_information('unknown_options', unknown_options)
|
||||
config.impl_set_information('eol_version', eol_version)
|
||||
config.impl_set_information('orig_values', orig_values)
|
||||
|
||||
def config_load_values(config, namespace, eol_file=None, force_instanciate=None,
|
||||
force_load_owner=None):
|
||||
subconfig = getattr(config, namespace, None)
|
||||
if subconfig is None:
|
||||
return
|
||||
if eol_file is None:
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
else:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
if not isfile(eol_file):
|
||||
raise IOError(_(u'Can not find file {0}').format(
|
||||
eol_file))
|
||||
store = load_store(config, eol_file)
|
||||
config_load_store(config, namespace, store,
|
||||
force_instanciate=force_instanciate,
|
||||
force_load_owner=force_load_owner)
|
||||
|
||||
def config_get_values(config, namespace, check_mandatory=True, ignore_autofreeze=False):
|
||||
"""check_mandatory: allows to disable mandatory checking
|
||||
(i.e : when returning values for partial configuration in Zéphir)
|
||||
"""
|
||||
def _get_varname(path):
|
||||
if namespace == 'creole':
|
||||
value_name = path.split('.')[-1]
|
||||
else:
|
||||
value_name = path
|
||||
return value_name
|
||||
|
||||
subconfig = getattr(config, namespace)
|
||||
if check_mandatory:
|
||||
mandatory_errors = list(config.cfgimpl_get_values(
|
||||
).mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != []:
|
||||
text = []
|
||||
for error in mandatory_errors:
|
||||
if not error.startswith(namespace + '.'):
|
||||
continue
|
||||
error = error.split('.')
|
||||
text.append(_(u"Mandatory variable '{0}' from family '{1}'"
|
||||
u" is not set !").format(unicode(error[-1]),
|
||||
unicode(error[1].capitalize())).encode('utf-8'))
|
||||
if text != []:
|
||||
raise PropertiesOptionError("\n".join(text), ('mandatory',))
|
||||
store = {}
|
||||
opt_values = subconfig.cfgimpl_get_values().get_modified_values()
|
||||
force_store_values = config.impl_get_information('force_store_values', None)
|
||||
|
||||
for path, own_val in opt_values.items():
|
||||
#for variable not related to current namespace
|
||||
if not path.startswith(namespace+'.'):
|
||||
continue
|
||||
if force_store_values and path in force_store_values:
|
||||
force_store_values.remove(path)
|
||||
store[_get_varname(path)] = {'val': own_val[1], 'owner': own_val[0]}
|
||||
if force_store_values:
|
||||
for path in force_store_values:
|
||||
varname = _get_varname(path)
|
||||
if varname not in store:
|
||||
try:
|
||||
store[varname] = {'val': config.getattr(path, force_permissive=True), 'owner': u'forced'}
|
||||
except PropertiesOptionError:
|
||||
pass
|
||||
if namespace == 'creole':
|
||||
#update with values in store with no known options
|
||||
store.update(config.impl_get_information('unknown_options', {}))
|
||||
return store
|
||||
|
||||
|
||||
def add_eol_version(store, eol_version=None):
|
||||
# on stocke la version passée en paramètre (si >= 2.4.1) ou celle du système le cas échéant
|
||||
if eol_version:
|
||||
if StrictVersion(eol_version) >= StrictVersion('2.4.1'):
|
||||
store['___version___'] = eol_version
|
||||
else:
|
||||
store['___version___'] = get_version('EOLE_RELEASE')
|
||||
|
||||
|
||||
def config_save_values(config, namespace, reload_config=True, eol_file=None, check_mandatory=True, eol_version=None):
|
||||
subconfig = getattr(config, namespace)
|
||||
if eol_file is not None:
|
||||
config.impl_set_information(namespace, eol_file)
|
||||
try:
|
||||
eol_file = config.impl_get_information(namespace)
|
||||
except AttributeError:
|
||||
raise Exception(_(u'config must have eol_file attribute'))
|
||||
store = config_get_values(config, namespace, check_mandatory)
|
||||
add_eol_version(store, eol_version)
|
||||
try:
|
||||
dirn = dirname(eol_file)
|
||||
if not isdir(dirn):
|
||||
makedirs(dirn)
|
||||
if not isfile(eol_file):
|
||||
fh = file(eol_file, 'w')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
else:
|
||||
fh = file(eol_file, 'r+')
|
||||
fcntl.lockf(fh, fcntl.LOCK_EX)
|
||||
fh.truncate() # Here's where the magic happens #7073
|
||||
fh.write(cjson.encode(store))
|
||||
fh.close()
|
||||
except Exception as err:
|
||||
raise Exception(_(u"Error saving file: {0}").format(err))
|
||||
if client is not None and reload_config:
|
||||
try:
|
||||
client.reload_eol()
|
||||
#client.reload_config()
|
||||
except CreoleClientError:
|
||||
pass
|
||||
return True
|
|
@ -1,454 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Parseur LXML des fichiers XML de collecte des variables EOLE
|
||||
"""
|
||||
from lxml import etree
|
||||
from copy import copy
|
||||
from .error import ConfigError
|
||||
from .utils import string_to_bool #, get_text_node
|
||||
from .config import VIRTMASTER
|
||||
from .dtd_parser import CONVERT_VALUE
|
||||
from pyeole.odict import OrderedDict
|
||||
|
||||
from .i18n import _
|
||||
|
||||
def parse_xml_file(filename, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param filename: nom du fichier xml source
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
document = etree.iterparse(filename, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing file {0}: {1}").format(filename, err))
|
||||
|
||||
def parse_string(xml_string, dtd, parse_all=True, test_duplicate=False):
|
||||
"""
|
||||
@param xml_string: dictionnaire xml sous forme de chaîne
|
||||
@return: structure de données permettant de créer les objets Eole
|
||||
"""
|
||||
try:
|
||||
root_node = etree.fromstring(xml_string)
|
||||
document = etree.iterwalk(root_node, events=('end',), tag='creole')
|
||||
return _parse_root_node(document, dtd, parse_all, test_duplicate)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Error while parsing: {0}").format(err))
|
||||
|
||||
def _parse_root_node(document, dtd, parse_all, test_duplicate=False):
|
||||
"""
|
||||
@param document: le noeud XML racine
|
||||
"""
|
||||
def _parse_container(node, options, container_name):
|
||||
for name in options:
|
||||
key_name = '{0}s'.format(name)
|
||||
ret.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name),
|
||||
container_name, dtd, name)
|
||||
if values != []:
|
||||
ret[key_name].extend(values)
|
||||
|
||||
for unused, first_node in document:
|
||||
root_node = first_node
|
||||
|
||||
#verifie les doublons de variable dans le meme dico
|
||||
if test_duplicate:
|
||||
all_var_dict = []
|
||||
for var in root_node.findall('variables/family/variable'):
|
||||
name = var.attrib['name']
|
||||
if name in all_var_dict:
|
||||
raise ConfigError(_(u'Error, var {0} already exists in current dictionaries').format(name))
|
||||
all_var_dict.append(name)
|
||||
|
||||
ret = {'families': parse_families(root_node)}
|
||||
families_action = parse_actions(root_node, dtd)
|
||||
if len(families_action) != 0:
|
||||
ret['families_action'] = families_action
|
||||
|
||||
ret['containers'] = []
|
||||
## balise <files> (données sur le maître)
|
||||
file_node = root_node.findall('files')
|
||||
if file_node != []:
|
||||
if len(file_node) != 1:
|
||||
raise Exception(_(u"Error: extra <files> tags in dictionaries."))
|
||||
if parse_all:
|
||||
_parse_container(file_node[0], dtd['files']['options'], VIRTMASTER)
|
||||
ret['containers'].append({'name': VIRTMASTER, 'id': '1'})
|
||||
|
||||
## balise <containers> (données dans les conteneurs)
|
||||
containers_node = root_node.findall('containers')
|
||||
if containers_node != []:
|
||||
if len(containers_node) != 1:
|
||||
raise Exception(_(u"Error: extra <containers> tags in dictionaries."))
|
||||
container = containers_node[0]
|
||||
for container_node in container.getiterator('container'):
|
||||
name = container_node.attrib['name']
|
||||
if name in [VIRTMASTER, 'all']:
|
||||
raise Exception(_(u"Name '{0}' is not allowed in tag <container>.").format(name))
|
||||
if name in ret['containers']:
|
||||
raise Exception(
|
||||
_(u"There must be only one name '{0}' in a dictionary.").format(name))
|
||||
containerid = _get_optional(container_node, 'id')
|
||||
groupid = _get_optional(container_node, 'group')
|
||||
ret['containers'].append({'name': name, 'id': containerid,
|
||||
'group': groupid})
|
||||
if parse_all:
|
||||
_parse_container(container_node, dtd['container']['options'], name)
|
||||
if parse_all:
|
||||
all_node = container.findall('all')
|
||||
if all_node != []:
|
||||
if len(all_node) != 1:
|
||||
raise Exception(_(u"Error: extra <all> tags in dictionaries."))
|
||||
ret['containers'].append({'name': 'all'})
|
||||
_parse_container(all_node[0], dtd['all']['options'], 'all')
|
||||
|
||||
## gestion des contraintes
|
||||
#FIXME
|
||||
ret.update(parse_constraints(root_node))
|
||||
|
||||
## gestion des groupes de variables
|
||||
ret['groups'] = parse_groups(root_node)
|
||||
|
||||
## gestion de l'aide
|
||||
ret['helps'] = parse_help(root_node)
|
||||
|
||||
## gestion des séparateurs
|
||||
ret['separators'] = parse_separators(root_node)
|
||||
return ret
|
||||
|
||||
|
||||
def _get_boolean_attr(node, attr_name, default=False):
|
||||
"""
|
||||
Gestion spécifique pour les attributs booléens
|
||||
Ils sont à False par défaut
|
||||
"""
|
||||
val = node.get(attr_name)
|
||||
if default:
|
||||
return str(val).lower() != 'false'
|
||||
elif val is None:
|
||||
return None
|
||||
else:
|
||||
return str(val).lower() == 'true'
|
||||
|
||||
|
||||
def _get_optional(node, attr_name):
|
||||
"""
|
||||
Valeur d'un attribut optionnel
|
||||
"""
|
||||
return node.get(attr_name)
|
||||
|
||||
|
||||
def _parse_value(varnode, attr='value'):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = []
|
||||
for val in varnode.findall(attr):
|
||||
# FIX for <value></value> !
|
||||
if val.text is not None:
|
||||
res.append(val.text)
|
||||
else:
|
||||
res.append('')
|
||||
return res
|
||||
|
||||
def parse_value(varnode, name):
|
||||
"""
|
||||
récupération des valeurs d'une variable
|
||||
"""
|
||||
res = None
|
||||
for val in varnode.findall('value'):
|
||||
if val.text is not None:
|
||||
tval = val.text
|
||||
if res != None:
|
||||
#str to list
|
||||
if type(res) == str:
|
||||
res = [res]
|
||||
res.append(tval)
|
||||
else:
|
||||
res = tval
|
||||
return res
|
||||
|
||||
def parse_generic(nodes, container, dtd, name, old_result=None):
|
||||
ret = []
|
||||
keys = dtd[name]
|
||||
for node in nodes:
|
||||
if old_result:
|
||||
result = copy(old_result)
|
||||
result['node_name'] = name
|
||||
elif container is not None:
|
||||
result = {'container': container}
|
||||
else:
|
||||
result = {}
|
||||
if keys['type']:
|
||||
if 'name' in keys['needs'] or 'name' in keys['optionals']:
|
||||
raise Exception('PCDATA + name')
|
||||
result['name'] = node.text
|
||||
for key, values in keys['needs'].items():
|
||||
value = node.attrib[key]
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
for key, values in keys['optionals'].items():
|
||||
value = node.attrib.get(key, values['default'])
|
||||
value = CONVERT_VALUE.get(value, value)
|
||||
if value != None:
|
||||
if values['values'] is not None and value not in values['values']:
|
||||
raise Exception(_(u"Value {0} not in {1}").format(value, values['values']))
|
||||
result[key] = value
|
||||
if keys['options'] == []:
|
||||
ret.append(result)
|
||||
else:
|
||||
for option in keys['options']:
|
||||
ret.extend(parse_generic(node.findall(option), container, dtd, option, result))
|
||||
return ret
|
||||
|
||||
|
||||
def parse_variables(var_node):
|
||||
"""
|
||||
traitement des variables
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for var in var_node.getiterator('variable'):
|
||||
# Default variables are handled in creole.loader
|
||||
hidden = _get_boolean_attr(var, 'hidden')
|
||||
multi = _get_boolean_attr(var, 'multi')
|
||||
redefine = _get_boolean_attr(var, 'redefine')
|
||||
mandatory = _get_boolean_attr(var, 'mandatory')
|
||||
remove_check = _get_boolean_attr(var, 'remove_check')
|
||||
remove_condition = _get_boolean_attr(var, 'remove_condition')
|
||||
exists = _get_boolean_attr(var, 'exists', default=True)
|
||||
disabled = _get_boolean_attr(var, 'disabled', default=False)
|
||||
auto_freeze = _get_boolean_attr(var, 'auto_freeze')
|
||||
auto_save = _get_boolean_attr(var, 'auto_save')
|
||||
mode = _get_optional(var, 'mode')
|
||||
name = var.attrib['name']
|
||||
value = parse_value(var, var.attrib['name'])
|
||||
typ = _get_optional(var, 'type')
|
||||
if typ == None:
|
||||
typ = 'string'
|
||||
desc = _get_optional(var, 'description')
|
||||
if type(desc) == unicode:
|
||||
desc = desc.encode('utf-8')
|
||||
result[name] = dict(value=value,
|
||||
type=typ,
|
||||
description=desc,
|
||||
hidden=hidden,
|
||||
multi=multi,
|
||||
auto='',
|
||||
redefine=redefine,
|
||||
exists=exists,
|
||||
auto_freeze=auto_freeze,
|
||||
auto_save=auto_save,
|
||||
mode=mode,
|
||||
mandatory=mandatory,
|
||||
disabled=disabled,
|
||||
remove_check=remove_check,
|
||||
remove_condition=remove_condition
|
||||
)
|
||||
return result
|
||||
|
||||
def parse_families(var_node):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for family in var_node.findall('variables/family'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Family {0} is set several times.").format(family_name))
|
||||
hidden = _get_boolean_attr(family, 'hidden')
|
||||
# FIXME: mode='' était admis avec domparser
|
||||
mode = _get_optional(family, 'mode')
|
||||
icon = _get_optional(family, 'icon')
|
||||
variables = parse_variables(family)
|
||||
result[family_name] = {'hidden': hidden,
|
||||
'mode': mode,
|
||||
'vars': variables,
|
||||
'icon': icon
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def parse_actions(root_node, dtd):
|
||||
"""
|
||||
traitement des familles
|
||||
@param var_node: noeud <variables>
|
||||
"""
|
||||
result = OrderedDict()
|
||||
def _parse_action(node, options):
|
||||
parse = {}
|
||||
for name in options:
|
||||
key_name = '{0}'.format(name)
|
||||
parse.setdefault(key_name, [])
|
||||
values = parse_generic(node.findall(name), None, dtd, name)
|
||||
if values != []:
|
||||
parse[key_name].extend(values)
|
||||
parse['type'] = node.get("type", "custom")
|
||||
parse['title'] = node.get('title')
|
||||
parse['description'] = node.get('description')
|
||||
image = node.get('image')
|
||||
if image:
|
||||
parse['image'] = image
|
||||
url = node.get('url', None)
|
||||
if url:
|
||||
parse['url'] = url
|
||||
return parse
|
||||
|
||||
for family in root_node.findall('family_action'): #: getiterator('family'):
|
||||
family_name = family.attrib['name']
|
||||
if family_name in result:
|
||||
raise Exception(_(u"Action Family {0} is set several times.").format(family_name))
|
||||
description = _get_optional(family, 'description')
|
||||
color = _get_optional(family, 'color')
|
||||
image = _get_optional(family, 'image')
|
||||
## balise <action>
|
||||
action_node = family.findall('action')
|
||||
if action_node != [] and len(action_node) != 1:
|
||||
raise Exception(_(u"Error: extra <action> tags in dictionaries."))
|
||||
action = _parse_action(action_node[0], dtd['action']['options'])
|
||||
result[family_name] = {'name': family_name,
|
||||
'description': description,
|
||||
'color': color,
|
||||
'image': image,
|
||||
'action': action
|
||||
}
|
||||
return result
|
||||
|
||||
def parse_constraints(node):
|
||||
"""
|
||||
@param node: node des contraintes
|
||||
"""
|
||||
constraints = {'checks' : parse_funcs(node,'check'),
|
||||
'fills' : parse_funcs(node,'fill'),
|
||||
'autos' : parse_funcs(node,'auto'),
|
||||
'conditions' : parse_conditions(node)
|
||||
}
|
||||
return constraints
|
||||
|
||||
|
||||
def _parse_param(param_node):
|
||||
"""
|
||||
traitement des paramètres d'une fonction
|
||||
"""
|
||||
return {'name' : _get_optional(param_node, 'name'),
|
||||
'type' : _get_optional(param_node, 'type'),
|
||||
'value' : param_node.text,
|
||||
'optional' : _get_optional(param_node, 'optional'),
|
||||
'hidden' : _get_optional(param_node, 'hidden'),
|
||||
}
|
||||
|
||||
|
||||
def parse_funcs(node, func_type):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
@param func_type: TagName of the functions to find
|
||||
@return: {target: [(param_name, _parse_params('param'))]}
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.findall('constraints/%s' % func_type):
|
||||
# lecture des paramètres
|
||||
params = []
|
||||
#si balise <target>
|
||||
targets = _parse_value(func, 'target')
|
||||
#sinon c'est un attribut target=
|
||||
if not targets:
|
||||
#met dans une liste parce que <target> retourne une liste
|
||||
targets = [_get_optional(func, 'target')]
|
||||
level = _get_optional(func, 'level')
|
||||
if not level:
|
||||
level = 'error'
|
||||
for target in targets:
|
||||
if target is not None:
|
||||
for param in func.getiterator('param'):
|
||||
params.append(_parse_param(param))
|
||||
funcs.setdefault(target, []).append((func.attrib['name'],
|
||||
params, level))
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_conditions(node):
|
||||
"""
|
||||
@param node: node des fonctions
|
||||
"""
|
||||
# fonctions de vérification
|
||||
funcs = {}
|
||||
for func in node.getiterator('condition'):
|
||||
# lecture des paramètres
|
||||
targets = []
|
||||
family_targets = []
|
||||
list_targets = []
|
||||
# paramètres de la fonction
|
||||
params = [_parse_param(param)
|
||||
for param in func.getiterator('param')]
|
||||
# cibles de la dépendance
|
||||
for target in func.getiterator('target'):
|
||||
ttype = target.get('type')
|
||||
optional = target.get('optional', False)
|
||||
if ttype == 'family':
|
||||
family_targets.append((target.text, optional))
|
||||
elif ttype in ['variable', None]:
|
||||
targets.append((target.text, optional))
|
||||
else:
|
||||
if ttype.endswith('list'):
|
||||
#suppress list in ttype
|
||||
list_targets.append((ttype[:-4], target.text, optional))
|
||||
else:
|
||||
raise Exception(_(u'Unknown type {0} for condition target.').format(ttype))
|
||||
funcdef = {'name': func.attrib['name'], 'family': family_targets,
|
||||
'variable': targets, 'list': list_targets, 'param': params,
|
||||
'fallback': _get_boolean_attr(func, 'fallback')}
|
||||
source = _get_optional(func, 'source')
|
||||
if source == None:
|
||||
raise Exception(_(u'Impossible condition without source for {0}.').format(funcdef))
|
||||
funcs.setdefault(source, []).append(funcdef)
|
||||
return funcs
|
||||
|
||||
|
||||
def parse_groups(node):
|
||||
"""
|
||||
Traitement des groupes de variables
|
||||
"""
|
||||
result = {}
|
||||
for group in node.findall('constraints/group'):
|
||||
slaves = _parse_value(group, 'slave')
|
||||
result[group.attrib['master']] = slaves
|
||||
return result
|
||||
|
||||
|
||||
def parse_help(node):
|
||||
"""
|
||||
Traitement de l'aide
|
||||
"""
|
||||
var_help = {}
|
||||
for var in node.findall('help/variable'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
var_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for variable {0}.").format(name))
|
||||
fam_help = {}
|
||||
for var in node.findall('help/family'):
|
||||
name = var.attrib['name']
|
||||
try:
|
||||
fam_help[name] = var.text.strip()
|
||||
except AttributeError:
|
||||
raise Exception(_(u"Invalid help for family {0}").format(name))
|
||||
return {'variables':var_help, 'families': fam_help}
|
||||
|
||||
|
||||
def parse_separators(node):
|
||||
"""dictionnaire des séparateurs, format {'variable':'text'}
|
||||
variable : nom de la première variable après le sépateur"""
|
||||
var_sep = {}
|
||||
for var in node.findall('variables/separators/separator'):
|
||||
if not var.text:
|
||||
libelle = ''
|
||||
else:
|
||||
libelle = var.text.strip()
|
||||
var_sep[var.attrib['name']] = (libelle, _get_boolean_attr(var, 'never_hidden'))
|
||||
return var_sep
|
||||
|
148
creole/maj.py
148
creole/maj.py
|
@ -1,148 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###########################################################################
|
||||
#
|
||||
# Eole NG - 2010
|
||||
# Copyright Pole de Competence Eole (Ministere Education - Academie Dijon)
|
||||
# Licence CeCill http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# eole@ac-dijon.fr
|
||||
#
|
||||
###########################################################################
|
||||
"""
|
||||
Librairie pour la gestion des mises à jour
|
||||
"""
|
||||
from os import system
|
||||
from dateutil import parser
|
||||
from pyeole.schedule import ManageSchedule, list_once, add_schedule, \
|
||||
del_schedule, apply_schedules, DAY_TO_STRING
|
||||
from pyeole.process import system_out
|
||||
from .client import CreoleClient
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# fichier d'information pour la mise à jour unique
|
||||
DIFF_FILENAME = '/var/lib/eole/reports/maj-diff.txt'
|
||||
|
||||
#########################################
|
||||
## Mise à jour hebdomadaire (maj_auto) ##
|
||||
#########################################
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
def maj_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return client.get('/schedule/majauto/day') == 'weekly'
|
||||
|
||||
def get_maj_day():
|
||||
"""
|
||||
renvoie le jour et l'heure des mises à jour
|
||||
par exemple :
|
||||
{'hour': 5, 'minute': 4, 'weekday': 'vendredi'}
|
||||
"""
|
||||
shed = client.get('/schedule/schedule')
|
||||
shed.pop('monthday')
|
||||
shed['weekday'] = DAY_TO_STRING[shed['weekday']]
|
||||
return shed
|
||||
|
||||
def enable_maj_auto():
|
||||
"""
|
||||
active la mise à jour hebdomadaire
|
||||
"""
|
||||
if not maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.add('majauto', 'weekly', 'post')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
def disable_maj_auto():
|
||||
"""
|
||||
désactive la mise à jour hebdomadaire
|
||||
"""
|
||||
if maj_enabled():
|
||||
manage_schedule = ManageSchedule()
|
||||
manage_schedule.delete('majauto')
|
||||
manage_schedule.save()
|
||||
apply_schedules()
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (schedule once) ##
|
||||
#########################################
|
||||
|
||||
def maj_once_enabled():
|
||||
"""
|
||||
vérifie si la mise à jour est activée ou non
|
||||
"""
|
||||
return 'majauto' in list_once('post')
|
||||
|
||||
def enable_maj_once():
|
||||
"""
|
||||
active la mise à jour 'once'
|
||||
"""
|
||||
if not maj_once_enabled():
|
||||
cancel_maj_differee()
|
||||
add_schedule('once', 'post', 'majauto')
|
||||
write_diff(True, 'ce soir')
|
||||
return True
|
||||
|
||||
def disable_maj_once():
|
||||
"""
|
||||
désactive la mise à jour 'once'
|
||||
"""
|
||||
if maj_once_enabled():
|
||||
del_schedule('once', 'post', 'majauto')
|
||||
|
||||
|
||||
#########################################
|
||||
## Mise à jour unique (maj_differee) ##
|
||||
#########################################
|
||||
|
||||
def write_diff(enable, heure=None):
|
||||
""" ecrit les informations du gestionnaire de mise a jour
|
||||
dans le fichier de config de l'ead """
|
||||
fic = file(DIFF_FILENAME, 'w')
|
||||
if enable:
|
||||
fic.write(_(u'An update is scheduled at {0}').format(heure))
|
||||
else:
|
||||
fic.write("")
|
||||
fic.close()
|
||||
|
||||
def cancel_maj_differee():
|
||||
"""
|
||||
déprogramme les mises à jour differées
|
||||
"""
|
||||
disable_maj_once()
|
||||
cmd = """for i in `grep -l "Maj-Auto" /var/spool/cron/atjobs/* 2>/dev/null`; do rm -f $i ; done;"""
|
||||
system(cmd)
|
||||
write_diff(False)
|
||||
return True
|
||||
|
||||
def prog_maj_differee(heure, options='-R'):
|
||||
"""
|
||||
Programmation une mise à jour différée de quelques heures
|
||||
Elle est lancée via la commande at pour l'utilisateur root
|
||||
options : options à passer à Maj-Auto
|
||||
"""
|
||||
if heure == 'once':
|
||||
return enable_maj_once()
|
||||
# suppression des éventuelles autres maj différées
|
||||
cancel_maj_differee()
|
||||
stdin = "rm -f %s\nMaj-Auto %s\n" % (DIFF_FILENAME, options)
|
||||
env_path = {'PATH': '/usr/share/eole:/usr/share/eole/sbin:/usr/local/sbin:'
|
||||
'/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
|
||||
'LC_ALL': 'fr_FR.UTF-8'}
|
||||
ret = system_out(['/usr/bin/at', 'now', '+', str(heure), 'hours'], stdin=stdin, env=env_path)
|
||||
if ret[0] != 0:
|
||||
return False
|
||||
scheduled_maj = " ".join(ret[2].splitlines()[1].split()[3:7])
|
||||
scheduled_maj = parser.parse(scheduled_maj)
|
||||
scheduled_day = "{0:0=2d}".format(scheduled_maj.day)
|
||||
scheduled_month = "{0:0=2d}".format(scheduled_maj.month)
|
||||
scheduled_year = "{0:0=2d}".format(scheduled_maj.year)
|
||||
scheduled_hour = "{0:0=2d}".format(scheduled_maj.hour)
|
||||
scheduled_minute = "{0:0=2d}".format(scheduled_maj.minute)
|
||||
scheduled_maj = _(u'{0} the {1}').format(":".join((scheduled_hour, scheduled_minute)), \
|
||||
"/".join((scheduled_day, scheduled_month, scheduled_year)))
|
||||
write_diff(True , scheduled_maj)
|
||||
return True
|
|
@ -60,8 +60,8 @@ class CreoleObjSpace(object):
|
|||
"""DOM XML reflexion free internal representation of a Creole Dictionary
|
||||
"""
|
||||
choice = type('Choice', (RootCreoleObject,), OrderedDict())
|
||||
# Creole ObjectSpace's Master variable class type
|
||||
Master = type('Master', (RootCreoleObject,), OrderedDict())
|
||||
# Creole ObjectSpace's Leadership variable class type
|
||||
Leadership = type('Leadership', (RootCreoleObject,), OrderedDict())
|
||||
"""
|
||||
This Atom type stands for singleton, that is
|
||||
an Object Space's atom object is present only once in the
|
||||
|
@ -90,7 +90,7 @@ class CreoleObjSpace(object):
|
|||
self.container_elt_attr_list = [] #
|
||||
# ['variable', 'separator', 'family']
|
||||
self.forced_text_elts = set()
|
||||
# ['disknod', 'slave', 'target', 'service', 'package', 'ip', 'value', 'tcpwrapper',
|
||||
# ['disknod', 'follower', 'target', 'service', 'package', 'ip', 'value', 'tcpwrapper',
|
||||
# 'interface', 'input', 'port']
|
||||
self.forced_text_elts_as_name = set(['choice'])
|
||||
self.forced_choice_option = {}
|
||||
|
@ -167,7 +167,7 @@ class CreoleObjSpace(object):
|
|||
elif value == 'False':
|
||||
return False
|
||||
else:
|
||||
raise TypeError(_('{} is not True or False').format(value).encode('utf8')) # pragma: no cover
|
||||
raise TypeError(_('{} is not True or False').format(value)) # pragma: no cover
|
||||
|
||||
def _is_already_exists(self, name, space, child, namespace):
|
||||
if isinstance(space, self.family): # pylint: disable=E1101
|
||||
|
@ -267,7 +267,7 @@ class CreoleObjSpace(object):
|
|||
raise SpaceObjShallNotBeUpdated()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Already present in another XML file, {} '
|
||||
'cannot be re-created').format(name).encode('utf8'))
|
||||
'cannot be re-created').format(name))
|
||||
else:
|
||||
redefine = self._convert_boolean(subspace.get('redefine', False))
|
||||
exists = self._convert_boolean(subspace.get('exists', False))
|
||||
|
@ -275,7 +275,7 @@ class CreoleObjSpace(object):
|
|||
return getattr(self, child.tag)()
|
||||
else:
|
||||
raise CreoleDictConsistencyError(_('Redefined object: '
|
||||
'{} does not exist yet').format(name).encode('utf8'))
|
||||
'{} does not exist yet').format(name))
|
||||
|
||||
def generate_creoleobj(self, child, space, namespace):
|
||||
"""
|
||||
|
@ -352,7 +352,7 @@ class CreoleObjSpace(object):
|
|||
# UNREDEFINABLE concerns only 'variable' node so we can fix name
|
||||
# to child.attrib['name']
|
||||
name = child.attrib['name']
|
||||
raise CreoleDictConsistencyError(_("cannot redefine attribute {} for variable {}").format(attr, name).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_("cannot redefine attribute {} for variable {}").format(attr, name))
|
||||
if isinstance(getattr(creoleobj, attr, None), bool):
|
||||
if val == 'False':
|
||||
val = False
|
||||
|
@ -360,7 +360,7 @@ class CreoleObjSpace(object):
|
|||
val = True
|
||||
else: # pragma: no cover
|
||||
raise CreoleOperationError(_('value for {} must be True or False, '
|
||||
'not {}').format(attr, val).encode('utf8'))
|
||||
'not {}').format(attr, val))
|
||||
if not (attr == 'name' and getattr(creoleobj, 'name', None) != None):
|
||||
setattr(creoleobj, attr, val)
|
||||
|
||||
|
@ -373,7 +373,7 @@ class CreoleObjSpace(object):
|
|||
self.remove_condition(creoleobj.name)
|
||||
if child.tag in ['auto', 'fill', 'check']:
|
||||
variable_name = child.attrib['target']
|
||||
# XXX not working with variable not in creole and in master/slave
|
||||
# XXX not working with variable not in creole and in leader/followers
|
||||
if variable_name in self.redefine_variables:
|
||||
creoleobj.redefine = True
|
||||
else:
|
||||
|
@ -397,7 +397,7 @@ class CreoleObjSpace(object):
|
|||
if child.tag == 'family':
|
||||
is_in_family = True
|
||||
if child.attrib['name'] in family_names:
|
||||
raise CreoleDictConsistencyError(_('Family {} is set several times').format(child.attrib['name']).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_('Family {} is set several times').format(child.attrib['name']))
|
||||
family_names.append(child.attrib['name'])
|
||||
if child.tag == 'variables':
|
||||
child.attrib['name'] = namespace
|
||||
|
@ -457,9 +457,9 @@ class CreoleObjSpace(object):
|
|||
self.xml_parse_document(document, self.space, namespace)
|
||||
|
||||
def space_visitor(self, eosfunc_file): # pylint: disable=C0111
|
||||
ActionAnnotator(self.space, self.paths, self)
|
||||
ContainerAnnotator(self.space, self.paths, self)
|
||||
SpaceAnnotator(self.space, self.paths, self, eosfunc_file)
|
||||
ActionAnnotator(self)
|
||||
ContainerAnnotator(self)
|
||||
SpaceAnnotator(self, eosfunc_file)
|
||||
|
||||
def save(self, filename, force_no_save=False):
|
||||
"""Save an XML output on disk
|
||||
|
@ -511,10 +511,10 @@ class CreoleObjSpace(object):
|
|||
space = list(space.values())
|
||||
if isinstance(space, list):
|
||||
for subspace in space:
|
||||
if isinstance(subspace, self.Master):
|
||||
_name = 'master'
|
||||
if isinstance(subspace, self.Leadership):
|
||||
_name = 'leader'
|
||||
subspace.doc = subspace.variable[0].description
|
||||
#subspace.doc = 'Master {}'.format(subspace.name)
|
||||
#subspace.doc = 'Leadership {}'.format(subspace.name)
|
||||
else:
|
||||
_name = name
|
||||
if name in ['containers', 'variables', 'actions']:
|
||||
|
@ -554,9 +554,6 @@ class CreoleObjSpace(object):
|
|||
if sys.version_info[0] < 3 and isinstance(space, unicode):
|
||||
node.text = space
|
||||
elif isinstance(space, str):
|
||||
if sys.version_info[0] < 3:
|
||||
node.text = space.decode('utf8')
|
||||
else:
|
||||
node.text = space
|
||||
else:
|
||||
node.text = str(space)
|
||||
|
@ -606,7 +603,7 @@ class Path(object):
|
|||
else:
|
||||
varname = '.'.join([namespace, family, name])
|
||||
self.variables[varname] = dict(name=name, family=family, namespace=namespace,
|
||||
master=None, creoleobj=creoleobj)
|
||||
leader=None, creoleobj=creoleobj)
|
||||
else: # pragma: no cover
|
||||
raise Exception('unknown pathtype {}'.format(pathtype))
|
||||
|
||||
|
@ -617,7 +614,7 @@ class Path(object):
|
|||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A family located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
dico['namespace'], current_namespace))
|
||||
path = dico['name']
|
||||
if dico['namespace'] is not None and '.' not in dico['name']:
|
||||
path = '.'.join([dico['namespace'], path])
|
||||
|
@ -631,7 +628,7 @@ class Path(object):
|
|||
|
||||
def get_family_obj(self, name): # pylint: disable=C0111
|
||||
if name not in self.families:
|
||||
raise CreoleDictConsistencyError(_('unknown family {}').format(name).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_('unknown family {}').format(name))
|
||||
dico = self.families[name]
|
||||
return dico['creoleobj']
|
||||
|
||||
|
@ -650,8 +647,8 @@ class Path(object):
|
|||
def get_variable_family_path(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
if dico['leader'] is not None:
|
||||
list_path.append(dico['leader'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def get_variable_namespace(self, name): # pylint: disable=C0111
|
||||
|
@ -665,29 +662,29 @@ class Path(object):
|
|||
if dico['namespace'] != 'creole' and current_namespace != dico['namespace']:
|
||||
raise CreoleDictConsistencyError(_('A variable located in the {} namespace '
|
||||
'shall not be used in the {} namespace').format(
|
||||
dico['namespace'], current_namespace).encode('utf8'))
|
||||
dico['namespace'], current_namespace))
|
||||
if '.' in dico['name']:
|
||||
return dico['name']
|
||||
list_path = [dico['namespace'], dico['family']]
|
||||
if dico['master'] is not None:
|
||||
list_path.append(dico['master'])
|
||||
if dico['leader'] is not None:
|
||||
list_path.append(dico['leader'])
|
||||
list_path.append(dico['name'])
|
||||
return '.'.join(list_path)
|
||||
|
||||
def path_is_defined(self, name): # pylint: disable=C0111
|
||||
return name in self.variables
|
||||
|
||||
def set_master(self, name, master): # pylint: disable=C0111
|
||||
def set_leader(self, name, leader): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
namespace = dico['namespace']
|
||||
if dico['master'] != None:
|
||||
raise CreoleDictConsistencyError(_('Already defined master {} for variable'
|
||||
' {}'.format(dico['master'], name)).encode('utf8'))
|
||||
dico['master'] = master
|
||||
if dico['leader'] != None:
|
||||
raise CreoleDictConsistencyError(_('Already defined leader {} for variable'
|
||||
' {}'.format(dico['leader'], name)))
|
||||
dico['leader'] = leader
|
||||
if namespace != 'creole':
|
||||
new_path = self.get_variable_path(name, namespace)
|
||||
self.append('variable', new_path, namespace, family=dico['family'], creoleobj=dico['creoleobj'])
|
||||
self.variables[new_path]['master'] = master
|
||||
self.variables[new_path]['leader'] = leader
|
||||
del self.variables[name]
|
||||
|
||||
def _get_variable(self, name):
|
||||
|
@ -695,10 +692,10 @@ class Path(object):
|
|||
if name.startswith('creole.'):
|
||||
raise CreoleDictConsistencyError(
|
||||
_("don't set full path variable in creole's namespace "
|
||||
"(set '{}' not '{}')").format(name.split('.')[-1], name).encode('utf8'))
|
||||
raise CreoleDictConsistencyError(_('unknown option {}').format(name).encode('utf8'))
|
||||
"(set '{}' not '{}')").format(name.split('.')[-1], name))
|
||||
raise CreoleDictConsistencyError(_('unknown option {}').format(name))
|
||||
return self.variables[name]
|
||||
|
||||
def get_master(self, name): # pylint: disable=C0111
|
||||
def get_leader(self, name): # pylint: disable=C0111
|
||||
dico = self._get_variable(name)
|
||||
return dico['master']
|
||||
return dico['leader']
|
||||
|
|
|
@ -1,1006 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Apply configuration of EOLE servers.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import time
|
||||
import shutil
|
||||
|
||||
from glob import glob
|
||||
|
||||
import spwd
|
||||
import getpass
|
||||
from itertools import count
|
||||
|
||||
from pyeole.log import getLogger
|
||||
from pyeole.log import init_logging
|
||||
from pyeole.log import set_formatter
|
||||
from pyeole.log import set_filters
|
||||
|
||||
from pyeole import scriptargs
|
||||
from pyeole.lock import acquire, release
|
||||
from pyeole import process
|
||||
from pyeole.schedule import display_schedules, apply_schedules
|
||||
from pyeole import ihm
|
||||
from pyeole.pkg import report, EolePkg, _configure_sources_mirror, _MIRROR_DIST
|
||||
from pyeole.pkg import PackageNotFoundError, RepositoryError, AptProxyError, AptCacherError
|
||||
from pyeole.service import manage_service, unmanaged_service, manage_services, \
|
||||
ServiceError
|
||||
from pyeole.encode import normalize
|
||||
from pyeole.diagnose.diagnose import MAJ_SUCCES_LOCK
|
||||
|
||||
from .error import FileNotFound, LockError, UnlockError
|
||||
from .error import UserExit, UserExitError
|
||||
from .error import VirtError
|
||||
from .client import CreoleClient, CreoleClientError, NotFoundError
|
||||
import fonctionseole, template, cert
|
||||
from .eosfunc import is_instanciate
|
||||
from .config import configeol, INSTANCE_LOCKFILE, UPGRADE_LOCKFILE, \
|
||||
container_instance_lockfile, gen_conteneurs_needed, VIRTROOT, charset
|
||||
from .containers import is_lxc_enabled, is_lxc_running, is_lxc_started, \
|
||||
generate_lxc_container, create_mount_point, lxc_need_restart
|
||||
from .error import NetworkConfigError
|
||||
|
||||
from pyeole.i18n import i18n
|
||||
_ = i18n('creole')
|
||||
|
||||
try:
|
||||
from zephir.lib_zephir import lock, unlock
|
||||
zephir_libs = True
|
||||
except Exception:
|
||||
zephir_libs = False
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
global PKGMGR
|
||||
PKGMGR = None
|
||||
|
||||
error_msg_documentation = _(u"""For more informations, read section
|
||||
'Mise en œuvre des modules EOLE' in module documentation or
|
||||
common documentation.""")
|
||||
def load_pkgmgr():
|
||||
global PKGMGR
|
||||
if PKGMGR is None:
|
||||
cache()
|
||||
PKGMGR = EolePkg('apt', container_mode=CACHE['is_lxc_enabled'])
|
||||
PKGMGR.pkgmgr.groups = CACHE
|
||||
PKGMGR.pkgmgr._load_apt_cache()
|
||||
eoles = []
|
||||
for eole in client.get_creole(u'serveur_maj'):
|
||||
eoles.append('http://{0}/eole/'.format(eole))
|
||||
ubuntus = []
|
||||
for ubuntu in client.get_creole(u'ubuntu_update_mirrors'):
|
||||
ubuntus.append('http://{0}/ubuntu/'.format(ubuntu))
|
||||
envoles = []
|
||||
try:
|
||||
for envole in client.get_creole(u'envole_update_mirrors'):
|
||||
envoles.append('http://{0}/envole/'.format(envole))
|
||||
except NotFoundError:
|
||||
pass
|
||||
for cache_ in PKGMGR.pkgmgr.cache._list.list:
|
||||
if cache_.uri in eoles:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['EOLE'])
|
||||
eoles = []
|
||||
if cache_.uri in ubuntus:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['Ubuntu'])
|
||||
ubuntus = []
|
||||
if cache_.uri in envoles:
|
||||
PKGMGR.pkgmgr._test_mirror(cache_.uri, _MIRROR_DIST['Envole'])
|
||||
envoles = []
|
||||
fonctionseole.PkgManager = PKGMGR
|
||||
|
||||
_LOGFILENAME = '/var/log/reconfigure.log'
|
||||
|
||||
# Command line options
|
||||
class Option:
|
||||
"""Manage commande line options with defaults
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
self.parser = argparse.ArgumentParser(
|
||||
description=_(u"Applying EOLE configuration."),
|
||||
parents=[scriptargs.container(),
|
||||
scriptargs.logging(level='info')])
|
||||
self.parser.add_argument('-i', '--interactive', action='store_true',
|
||||
help=_(u"leave process in interactive mode"))
|
||||
self.parser.add_argument('-f', '--force', action='store_true',
|
||||
help=_(u"override Zéphir lock"))
|
||||
self.parser.add_argument('-a', '--auto', action='store_true',
|
||||
help=_(u"automatic reboot if necessary"))
|
||||
self.__opts = self.parser.parse_args([])
|
||||
|
||||
def update_from_cmdline(self, force_args=None, force_options=None):
|
||||
"""Parse command line
|
||||
"""
|
||||
self.__opts = self.parser.parse_args(force_args)
|
||||
if self.__opts.verbose:
|
||||
self.__opts.log_level = 'info'
|
||||
if self.__opts.debug:
|
||||
self.__opts.log_level = 'debug'
|
||||
if force_options is not None:
|
||||
for key, value in force_options.items():
|
||||
setattr(self.__opts, key, value)
|
||||
self.__dict__.update(self.__opts.__dict__)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in ['__opts', 'update_from_cmdline']:
|
||||
return self.__dict__[name]
|
||||
else:
|
||||
return getattr(self.__opts, name)
|
||||
|
||||
options = Option()
|
||||
|
||||
# To use log from every functions
|
||||
log = getLogger(__name__)
|
||||
|
||||
# Same name for instance and reconfigure
|
||||
LOCK_NAME = u'reconfigure'
|
||||
|
||||
# Run scripts in directories
|
||||
RUNPARTS_PATH = u'/usr/share/eole'
|
||||
RUNPARTS_CMD = u'/bin/run-parts --exit-on-error -v {directory} --arg {compat} 2>&1'
|
||||
|
||||
# Compatibility
|
||||
COMPAT_NAME = u'reconfigure'
|
||||
|
||||
#def parse_cmdline():
|
||||
# """Parse command line
|
||||
# """
|
||||
# descr = u"Application de la configuration EOLE"
|
||||
# parser = argparse.ArgumentParser(description=descr,
|
||||
# parents=[scriptargs.container(),
|
||||
# scriptargs.logging(level='info')])
|
||||
# parser.add_argument('-i', '--interactive', action='store_true',
|
||||
# help=u"lancer le processus en mode interactif")
|
||||
# parser.add_argument('-f', '--force', action='store_true',
|
||||
# help=u"force l'action même s'il existe des verrous")
|
||||
# parser.add_argument('-a', '--auto', action='store_true',
|
||||
# help=u"redémarrage automatique si nécessaire")
|
||||
#
|
||||
# opts = parser.parse_args()
|
||||
# if opts.verbose:
|
||||
# opts.log_level = 'info'
|
||||
# if opts.debug:
|
||||
# opts.log_level = 'debug'
|
||||
# return opts
|
||||
|
||||
def copyDirectoryContent(src, dst):
|
||||
for fic in os.listdir(src):
|
||||
# Skip links or we ovewrite existing certificates
|
||||
if os.path.islink(os.path.join(src, fic)):
|
||||
continue
|
||||
try:
|
||||
shutil.copy2(os.path.join(src, fic), dst)
|
||||
except shutil.Error, err:
|
||||
# ignore if files already exists
|
||||
pass
|
||||
|
||||
def user_exit(*args, **kwargs):
|
||||
"""
|
||||
sortie utilisateur "propre"
|
||||
"""
|
||||
log.warn(_(u'! Abandoning configuration !'))
|
||||
log.warn(_(u'System may be in an incoherent state.\n\n'))
|
||||
raise UserExitError()
|
||||
|
||||
def unlock_actions(need_lock=True):
|
||||
if zephir_libs:
|
||||
#FIXME: lock de Zephir !
|
||||
unlock('actions')
|
||||
try:
|
||||
release(LOCK_NAME, level='system')
|
||||
except Exception, err:
|
||||
# FIXME: move lock exception to pyeole.lock #7400
|
||||
if need_lock:
|
||||
raise UnlockError(str(err))
|
||||
|
||||
def lock_actions():
|
||||
try:
|
||||
acquire(LOCK_NAME, level="system")
|
||||
except Exception, err:
|
||||
# FIXME: move lock exception to pyeole.lock #7400
|
||||
raise LockError(str(err))
|
||||
if zephir_libs:
|
||||
#FIXME: lock de Zephir !
|
||||
lock('actions')
|
||||
|
||||
def reset_compat_name():
|
||||
"""
|
||||
Réinitialise le nom de la procédure en cours
|
||||
en fonction de l'environnement
|
||||
"""
|
||||
global COMPAT_NAME
|
||||
if options.interactive:
|
||||
COMPAT_NAME = u'instance'
|
||||
else:
|
||||
COMPAT_NAME = u'reconfigure'
|
||||
|
||||
def run_parts(directory):
|
||||
"""Run scripts in a directory
|
||||
|
||||
@param directory: name of a directory
|
||||
@type directory: C{str}
|
||||
"""
|
||||
dirpath = os.path.join(RUNPARTS_PATH, directory)
|
||||
if os.path.isdir(dirpath):
|
||||
ihm.print_title(_(u'Running scripts {0}').format(directory))
|
||||
code = os.system(RUNPARTS_CMD.format(directory=dirpath, compat=COMPAT_NAME))
|
||||
if code != 0:
|
||||
raise Exception(_(u'Error {0}').format(directory))
|
||||
|
||||
def restart_creoled():
|
||||
"""
|
||||
Restart creoled service and verify if the client is OK
|
||||
"""
|
||||
unmanaged_service(u'restart', u'creoled', u'service', display='console')
|
||||
try:
|
||||
client.get_creole(u'eole_version')
|
||||
except CreoleClientError:
|
||||
msg = _(u"Please check creoled's log (/var/log/rsyslog/local/creoled/creoled.info.log)\nand restart service with command 'service creoled start'")
|
||||
raise CreoleClientError(msg)
|
||||
|
||||
def prepare(need_lock=True):
|
||||
"""Sanity checks.
|
||||
|
||||
"""
|
||||
global RUNPARTS_CMD
|
||||
# Clean exit
|
||||
if need_lock:
|
||||
ihm.catch_signal(user_exit)
|
||||
lock_actions()
|
||||
|
||||
if options.container != None:
|
||||
RUNPARTS_CMD += u" --regex '^[09][09]-{0}$'".format(options.container)
|
||||
|
||||
ihm.print_title(_(u"Preparation for {0}").format(COMPAT_NAME))
|
||||
|
||||
if not os.path.isfile(configeol):
|
||||
print _(u"Server is not configured.")
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
raise FileNotFound(_(u'Missing file {0}.').format(configeol))
|
||||
|
||||
display_info = False
|
||||
|
||||
if not options.interactive and (is_instanciate() == 'non' or os.path.isfile(UPGRADE_LOCKFILE)):
|
||||
ihm.print_red(_(u"Server must be instantiated before any reconfiguration can occur."))
|
||||
display_info = True
|
||||
|
||||
if options.interactive and is_instanciate() == 'oui' and \
|
||||
not os.path.isfile(UPGRADE_LOCKFILE) and \
|
||||
not os.path.isfile(container_instance_lockfile):
|
||||
ihm.print_red(_(u"Server already instantiated."))
|
||||
print
|
||||
print _(u"To modify configuration parameter (e.g. IP address), use:")
|
||||
print _(u"'gen_config'")
|
||||
print _(u"then 'reconfigure' to apply changes.")
|
||||
display_info = True
|
||||
|
||||
if os.path.isfile(container_instance_lockfile) and not options.interactive:
|
||||
raise Exception(_('you have run gen_conteneurs, please use instance instead of reconfigure'))
|
||||
|
||||
if os.path.isfile(gen_conteneurs_needed):
|
||||
raise Exception(_('You have to run gen_conteneurs before instance'))
|
||||
|
||||
if display_info:
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
if not options.interactive:
|
||||
raise Exception(_(u"First instantiate server."))
|
||||
else:
|
||||
if ihm.prompt_boolean(_(u"Proceeding with instantiation ?"),
|
||||
interactive=options.interactive,
|
||||
default=False) is False:
|
||||
raise UserExit()
|
||||
else:
|
||||
fonctionseole.zephir("MSG", "Instance forcée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
# redémarrage du service creoled
|
||||
restart_creoled()
|
||||
|
||||
if fonctionseole.init_proc(COMPAT_NAME.upper()) == False and not options.force:
|
||||
log.warn(_(u"This process is blocked, contact Zéphir administrator."))
|
||||
if ihm.prompt_boolean(_(u"Force execution?"),
|
||||
interactive=options.interactive,
|
||||
default=False) is False:
|
||||
if not options.interactive:
|
||||
log.warn(_(u"Use -f option if you want to force execution"))
|
||||
raise UserExitError()
|
||||
else:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Instance forcée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
|
||||
def valid_mandatory(need_lock):
|
||||
try:
|
||||
client.valid_mandatory()
|
||||
except Exception, err:
|
||||
log.warn(_('Configuration validation problem, please check server configuration.'))
|
||||
print
|
||||
print error_msg_documentation
|
||||
print
|
||||
unlock_actions(need_lock)
|
||||
raise ValueError(str(err))
|
||||
|
||||
def _start_containers():
|
||||
""" Try to start containers and make sure they are started
|
||||
"""
|
||||
cache()
|
||||
for group_name in CACHE['groups_container']:
|
||||
group = CACHE['group_infos'][group_name]
|
||||
create_mount_point(group)
|
||||
|
||||
if os.access('/usr/share/eole/preservice/00-lxc-net', os.X_OK):
|
||||
log.debug("Override lxc-net systemd script")
|
||||
process.system_code(['/usr/share/eole/preservice/00-lxc-net'])
|
||||
|
||||
unmanaged_service(u'start', u'lxc-net', u'systemd', display='console', ctx=CACHE['group_infos']['root'])
|
||||
try:
|
||||
unmanaged_service(u'status', u'lxc', u'systemd')
|
||||
except ServiceError:
|
||||
unmanaged_service(u'start', u'lxc', u'systemd', display='console', ctx=CACHE['group_infos']['root'])
|
||||
#if lxc not started, do not wait for it
|
||||
#(we already waiting for it in systemd service)
|
||||
#if started, waiting for ssh access
|
||||
|
||||
max_try = 10
|
||||
for count in range(max_try):
|
||||
s_code, s_out, s_err = process.system_out(['lxc-ls', '--stopped'])
|
||||
stopped = s_out.split()
|
||||
f_code, f_out, f_err = process.system_out(['lxc-ls', '--frozen'])
|
||||
frozen = f_out.split()
|
||||
|
||||
if stopped or frozen:
|
||||
not_running = stopped + frozen
|
||||
else:
|
||||
# Everything is started by LXC
|
||||
# Are they reachable by SSH?
|
||||
not_running = []
|
||||
for group_name in CACHE['groups_container']:
|
||||
group_infos = CACHE['group_infos'][group_name]
|
||||
if not is_lxc_running(group_infos):
|
||||
not_running.append(group_name)
|
||||
|
||||
log.debug('Waiting 1 second for SSH access')
|
||||
time.sleep(1)
|
||||
|
||||
if not not_running:
|
||||
break
|
||||
|
||||
if stopped:
|
||||
for cont in stopped:
|
||||
log.debug('Manual start of stopped container “{0}”'.format(cont))
|
||||
process.system_out(['lxc-start', '--name', cont, '--daemon',
|
||||
'-o', '/var/log/lxc-{0}.log'.format(cont)])
|
||||
|
||||
if frozen:
|
||||
for cont in frozen:
|
||||
log.debug('Manual unfreeze of frozen container “{0}”'.format(cont))
|
||||
process.system_out(['lxc-unfreeze', '--name', cont,
|
||||
'-o', '/var/log/lxc-{0}.log'.format(cont)])
|
||||
|
||||
if not_running:
|
||||
waiting_for = ', '.join(not_running)
|
||||
msg = _(u'Unable to start LXC container : {0}',
|
||||
u'Unable to start LXC containers : {0}', len(not_running))
|
||||
raise VirtError(msg.format(waiting_for))
|
||||
|
||||
|
||||
def containers(minimal=False, log_=None):
|
||||
"""Generate containers
|
||||
"""
|
||||
if log_ is None:
|
||||
log_ = log
|
||||
VAR_LXC='/var/lib/lxc'
|
||||
OPT_LXC='/opt/lxc'
|
||||
|
||||
cache()
|
||||
if not CACHE['is_lxc_enabled']:
|
||||
log.debug(_(u'Container mode is disabled.'))
|
||||
return True
|
||||
if not options.interactive:
|
||||
for group in CACHE['groups_container']:
|
||||
if not os.path.isdir(os.path.join(VIRTROOT, group)):
|
||||
raise Exception(_(u'container {0} does not already exist, please use gen_conteneurs to create this container').format(group))
|
||||
else:
|
||||
# make /var/lib/lxc -> /opt/lxc
|
||||
if os.path.isdir(VAR_LXC) and not os.path.exists(OPT_LXC):
|
||||
ihm.print_title(_(u"Setting up {0}").format(OPT_LXC))
|
||||
unmanaged_service(u'stop', u'lxc', u'systemd', display='console')
|
||||
unmanaged_service(u'stop', u'lxc-net', u'systemd', display='console')
|
||||
shutil.move(VAR_LXC, OPT_LXC)
|
||||
os.symlink(OPT_LXC, VAR_LXC)
|
||||
#first instance should be in minimal mode
|
||||
minimal = True
|
||||
|
||||
ihm.print_title(_(u'Generating containers'))
|
||||
|
||||
engine = template.CreoleTemplateEngine()
|
||||
rootctx = CACHE['group_infos']['root']
|
||||
if minimal:
|
||||
# inject var _minimal_mode in creole's vars that can be used in template
|
||||
engine.creole_variables_dict['_minimal_mode'] = True
|
||||
engine.instance_file(u'/etc/ssh/ssh_config', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/lxc/default.conf', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/dnsmasq.d/lxc', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/default/lxc-net', ctx=rootctx)
|
||||
engine.instance_file(u'/etc/apt/apt.conf.d/02eoleproxy', ctx=rootctx)
|
||||
if CACHE['module_instancie'] == 'oui':
|
||||
engine.instance_file(u'/etc/resolv.conf', ctx=rootctx)
|
||||
|
||||
load_pkgmgr()
|
||||
PKGMGR.pkgmgr._prepare_cache()
|
||||
for group in CACHE['groups_container']:
|
||||
generate_lxc_container(group)
|
||||
groupctx = CACHE['group_infos'][group]
|
||||
if minimal:
|
||||
engine.instance_file(u'../fstab', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'../config', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'../devices.hook', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/network/interfaces', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/apt/apt.conf.d/02eoleproxy', container=group, ctx=groupctx)
|
||||
engine.instance_file(u'/etc/ssh/sshd_config', container=group, ctx=groupctx)
|
||||
if CACHE['module_instancie'] == 'oui':
|
||||
container_path = os.path.join(groupctx['path'], 'etc/resolv.conf')
|
||||
if os.path.islink(container_path):
|
||||
os.remove(container_path)
|
||||
engine.instance_file(u'/etc/resolv.conf', container=group, ctx=groupctx)
|
||||
PKGMGR.pkgmgr._umount_cdrom()
|
||||
|
||||
ihm.print_title(_(u'Starting containers'))
|
||||
_start_containers()
|
||||
|
||||
def remove_packages():
|
||||
""" Remove packages listed in /usr/share/eole/remove.d/ files
|
||||
param: repo: EoleApt Object
|
||||
"""
|
||||
torm_conf = glob(u'/usr/share/eole/remove.d/*.conf')
|
||||
pkg_list = []
|
||||
for config in torm_conf:
|
||||
try:
|
||||
f_h = open(config, 'r')
|
||||
for line in f_h.readlines():
|
||||
pkg_list.append(line.strip('\n'))
|
||||
f_h.close()
|
||||
except IOError, err:
|
||||
log.error(_(u'Can not read file {0}: {1}').format(config, err))
|
||||
|
||||
try:
|
||||
load_pkgmgr()
|
||||
except (RepositoryError, AptProxyError, AptCacherError), err:
|
||||
pass
|
||||
|
||||
kernels = fonctionseole.get_kernel_to_remove()
|
||||
|
||||
if kernels:
|
||||
ihm.print_line(_(u"Removing old linux kernels and associate headers."))
|
||||
pkg_list.extend(kernels)
|
||||
|
||||
if pkg_list != []:
|
||||
try:
|
||||
PKGMGR.remove(packages=pkg_list)
|
||||
except (PackageNotFoundError, SystemError), err:
|
||||
msg = _(u'Unable to remove some packages: {0}')
|
||||
log.warn(msg.format(err))
|
||||
log.warn(_(u"These packages will be removed next 'reconfigure'"))
|
||||
|
||||
|
||||
CACHE = {}
|
||||
def cache():
|
||||
global CACHE
|
||||
if not 'groups' in CACHE:
|
||||
CACHE['groups'] = client.get_groups()
|
||||
CACHE['groups_container'] = []
|
||||
for group in CACHE['groups']:
|
||||
if group not in ['root', 'all']:
|
||||
CACHE['groups_container'].append(group)
|
||||
CACHE['group_infos'] = {}
|
||||
for group_name in CACHE['groups']:
|
||||
group_infos = client.get_group_infos(group_name)
|
||||
CACHE['group_infos'][group_name] = group_infos
|
||||
CACHE['is_lxc_enabled'] = is_lxc_enabled()
|
||||
CACHE['module_instancie'] = client.get_creole('module_instancie')
|
||||
|
||||
|
||||
|
||||
def install_packages(silent=False):
|
||||
"""Install package for each container group
|
||||
"""
|
||||
load_pkgmgr()
|
||||
|
||||
cache()
|
||||
header = _(u'Checking Packages for container')
|
||||
for group_name, group_infos in CACHE['group_infos'].items():
|
||||
package_names = [pkg[u'name'] for pkg in group_infos[u'packages']]
|
||||
if package_names != []:
|
||||
msg = header + ' {0}: {1}'.format(group_name, ' '.join(package_names))
|
||||
ihm.print_line(msg)
|
||||
PKGMGR.install(packages=package_names,
|
||||
silent=silent,
|
||||
container=group_infos[u'name'])
|
||||
|
||||
|
||||
def packages():
|
||||
"""Manage packages
|
||||
"""
|
||||
ihm.print_title(_(u'Managing packages'))
|
||||
log.info(_(u' Removing packages'))
|
||||
ihm.print_line(_(u'Removing packages'))
|
||||
remove_packages()
|
||||
log.info(_(u' Installing packages'))
|
||||
ihm.print_line(_(u'Installing packages'))
|
||||
install_packages()
|
||||
|
||||
|
||||
def templates():
|
||||
"""Run pretemplate scripts and manage templates
|
||||
"""
|
||||
ihm.print_title(_(u'Generating configuration files'))
|
||||
log.info(_(u'Generating configuration files'))
|
||||
cache()
|
||||
try:
|
||||
tmpl = template.CreoleTemplateEngine()
|
||||
tmpl.instance_files(container=options.container, containers_ctx=CACHE['group_infos'].values())
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
raise err
|
||||
|
||||
|
||||
def services(action, display_title=True, try_restart_lxc=True):
|
||||
"""Manage services
|
||||
"""
|
||||
cache()
|
||||
exclude = None
|
||||
if action == u'stop':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Stopping services"))
|
||||
exclude = (('root', 'networking'),)
|
||||
elif action == u'start':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Starting services"))
|
||||
# ne pas demarrer le service certbot, c'est un service oneshot
|
||||
# et pyeole.service n'a pas l'air d'aimer ... #22092
|
||||
exclude = (('root', 'networking'), ('root', 'certbot'))
|
||||
ctx = CACHE['group_infos']['root']
|
||||
manage_services(action, u'networking', display='console', containers_ctx=[ctx])
|
||||
if try_restart_lxc and CACHE['is_lxc_enabled']:
|
||||
if lxc_need_restart():
|
||||
unmanaged_service(u'stop', u'lxc', u'systemd', display='console', ctx=ctx)
|
||||
unmanaged_service(u'stop', u'lxc-net', u'systemd', display='console', ctx=ctx)
|
||||
_start_containers()
|
||||
elif action == u'configure':
|
||||
if display_title:
|
||||
ihm.print_title(_(u"Configuring services"))
|
||||
else:
|
||||
raise ValueError(_(u"Unknown service action: {0}").format(action))
|
||||
if options.container is not None:
|
||||
containers_ctx = [CACHE['group_infos'][options.containers]]
|
||||
else:
|
||||
containers_ctx = CACHE['group_infos'].values()
|
||||
manage_services(action, container=options.container, display='console', exclude=exclude, containers_ctx=containers_ctx)
|
||||
|
||||
|
||||
def _gen_user_list():
|
||||
"""Generate list of users for password modification
|
||||
|
||||
Start with basic one and ask for supplementary users.
|
||||
"""
|
||||
yield 'root'
|
||||
|
||||
node = client.get_creole(u'activer_onenode', 'non')
|
||||
master = client.get_creole(u'activer_onesinglenode', 'non')
|
||||
if node == 'oui' and master == 'non':
|
||||
yield 'oneadmin'
|
||||
|
||||
for number in count(1):
|
||||
if number == 1:
|
||||
yield 'eole'
|
||||
else:
|
||||
yield 'eole{0}'.format(number)
|
||||
|
||||
|
||||
|
||||
def users():
|
||||
"""Manage users
|
||||
"""
|
||||
from passlib.context import CryptContext
|
||||
ihm.print_title(_(u'Managing system user accounts'))
|
||||
schemes = [u'sha512_crypt', u'sha256_crypt', u'sha1_crypt', u'md5_crypt']
|
||||
cryptctx = CryptContext(schemes=schemes)
|
||||
default_pass = {u'root': u'$eole&123456$',
|
||||
u'eole': u'$fpmf&123456$',
|
||||
u'oneadmin': u'$eole&123456$'}
|
||||
|
||||
if not options.interactive:
|
||||
log.debug(_(u'No system user account management in non-interactive mode.'))
|
||||
return
|
||||
|
||||
for user in _gen_user_list():
|
||||
try:
|
||||
user_infos = spwd.getspnam(user)
|
||||
except KeyError:
|
||||
if user == u'root':
|
||||
msg = _(u"'root' user unknown. This is abnormal.")
|
||||
raise Exception(msg)
|
||||
|
||||
# no new administrator with NFS (#16321)
|
||||
if user != 'eole' and client.get_creole(u'adresse_serveur_nfs', None) is not None:
|
||||
log.warn(_(u'No new EOLE account with /home on NFS'))
|
||||
break
|
||||
|
||||
prompt = _('Create new administrator user account {0}?')
|
||||
if user != 'eole' and ihm.prompt_boolean(prompt.format(user)) is False:
|
||||
break
|
||||
|
||||
msg = _(u"Creating unexistent user {0}")
|
||||
log.info(msg.format(user))
|
||||
|
||||
cmd = ['adduser', '--quiet', '--shell', '/usr/bin/manage-eole',
|
||||
'--gecos', '{0} user'.format(user.upper()),
|
||||
'--disabled-password', user]
|
||||
code = process.system_code(cmd)
|
||||
if code != 0:
|
||||
msg = _(u"Unable to create user {0}")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
cmd = ['usermod', '--append', '--groups', 'adm,mail', user]
|
||||
code, out, err = process.system_out(cmd)
|
||||
if code != 0:
|
||||
msg = _(u"Unable to add '{0}' to group 'adm'.")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
# Update informations
|
||||
user_infos = spwd.getspnam(user)
|
||||
|
||||
if user not in default_pass and user_infos.sp_pwd not in ['!', '*']:
|
||||
msg = _(u"No modification of password of administrator user account {0}.")
|
||||
log.warn(msg.format(user))
|
||||
continue
|
||||
|
||||
# Change password:
|
||||
# - on first instance
|
||||
# - if user is not an EOLE default user
|
||||
# - if user password match default ones
|
||||
if (not os.path.isfile(INSTANCE_LOCKFILE)
|
||||
or (user not in default_pass or user_infos.sp_pwd in ['!', '*']
|
||||
or cryptctx.verify(default_pass[user], user_infos.sp_pwd))):
|
||||
|
||||
msg = _(u"# Modificating password for user account {0} #")
|
||||
msg = msg.format(user)
|
||||
log.warn(u'#' * len(msg))
|
||||
log.warn(msg)
|
||||
log.warn(u'#' * len(msg))
|
||||
max_try = 5
|
||||
prompt = u'{0}{1}: '
|
||||
first_prompt = _(u"New password")
|
||||
second_prompt = _(u"Confirming new password")
|
||||
loop_counter = u''
|
||||
for attempt in range(1, max_try+2):
|
||||
if attempt == max_try+1:
|
||||
msg = _(u"Password input errors for {0}. Abandon.")
|
||||
raise Exception(msg.format(user))
|
||||
|
||||
loop_counter = loop_counter.format(attempt, max_try)
|
||||
passwd = getpass.getpass(prompt.format(first_prompt,
|
||||
loop_counter))
|
||||
confirm_pass = getpass.getpass(prompt.format(second_prompt,
|
||||
loop_counter))
|
||||
if passwd == confirm_pass:
|
||||
if user in default_pass and default_pass[user] == passwd:
|
||||
log.error(_(u"Can not use default password."))
|
||||
else:
|
||||
# Now we have the password
|
||||
stdin = '{0}:{1}'.format(user, passwd)
|
||||
code, stdout, stderr = process.system_out(['chpasswd'],
|
||||
stdin=stdin)
|
||||
if code == 0:
|
||||
msg = _(u'User {0} password updated.')
|
||||
log.info(msg.format(user))
|
||||
# Success
|
||||
break
|
||||
msg = _(u"Error changing password for {0}.")
|
||||
try_again_pos = stdout.find('Try again.')
|
||||
chpassmsg = stdout[0:try_again_pos]
|
||||
log.error(msg.format(user))
|
||||
print chpassmsg
|
||||
else:
|
||||
log.error(_(u"Passwords mismatch."))
|
||||
|
||||
# Display counter
|
||||
loop_counter = u' ({0}/{1})'
|
||||
|
||||
|
||||
def certificates():
|
||||
"""Manage certificates
|
||||
|
||||
"""
|
||||
ihm.print_title(_(u'Managing certificates'))
|
||||
try:
|
||||
# regénération des hashes des certificats SSL après avec créé les nouveaux certificats
|
||||
# porté de 2.3 #8488
|
||||
cert.rehash_if_needed()
|
||||
cert.gen_certs()
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
raise Exception(_(u"Error while generating certificates: {0}").format(err))
|
||||
cache()
|
||||
if CACHE['is_lxc_enabled']:
|
||||
src = os.path.join(cert.ssl_dir, "certs")
|
||||
for group_name in CACHE['groups_container']:
|
||||
group = CACHE['group_infos'][group_name]
|
||||
ihm.print_line(_("Copying certificates in {0}").format(group['name']))
|
||||
dst = os.path.join('/', group['path'].lstrip('/').encode(charset), src.lstrip('/'))
|
||||
copyDirectoryContent(src, dst)
|
||||
process.system_out(['/usr/bin/c_rehash'], container=group_name)
|
||||
|
||||
|
||||
def param_kernel():
|
||||
"""Manage kernel parameters
|
||||
"""
|
||||
ihm.print_title(_(u'Applying kernel parameters'))
|
||||
os.system('/sbin/sysctl -p >/dev/null')
|
||||
|
||||
def kill_dhclient():
|
||||
"""Kill dhclient for static IP configuration.
|
||||
|
||||
"""
|
||||
if client.get_creole(u'eth0_method') == u'statique':
|
||||
os.system('killall dhclient dhclient3 2>/dev/null')
|
||||
|
||||
def finalize(need_lock=True):
|
||||
"""Clean up
|
||||
"""
|
||||
ihm.print_title(_(u'Finalizing configuration'))
|
||||
# enregistrement
|
||||
try:
|
||||
process.system_out("/usr/share/creole/diag.py")
|
||||
except Exception:
|
||||
pass
|
||||
fonctionseole.zephir("FIN", "Configuration terminée", COMPAT_NAME.upper())
|
||||
if not os.path.isfile(INSTANCE_LOCKFILE):
|
||||
# l'instance est allée à son terme (#7051)
|
||||
file(INSTANCE_LOCKFILE, 'w').close()
|
||||
|
||||
if os.path.isfile(UPGRADE_LOCKFILE):
|
||||
os.unlink(UPGRADE_LOCKFILE)
|
||||
|
||||
if os.path.isfile(container_instance_lockfile):
|
||||
os.unlink(container_instance_lockfile)
|
||||
|
||||
# sauvegarde des 2 dernières versions de la configuration (#8455)
|
||||
old = '{0}.bak'.format(configeol)
|
||||
old1 = '{0}.bak.1'.format(configeol)
|
||||
if not os.path.isfile(old):
|
||||
log.debug(_(u'Backup {0} in {1}'.format(configeol, old)))
|
||||
shutil.copy(configeol, old)
|
||||
elif process.system_out(['diff', '-q', configeol, old])[0] == 0:
|
||||
log.debug(_(u"{0} was not modified".format(configeol)))
|
||||
else:
|
||||
log.debug(_(u'Backup {0} in {1}'.format(old, old1)))
|
||||
shutil.copy(old, old1)
|
||||
log.debug(_(u'Backup {0} in {1}'.format(configeol, old)))
|
||||
shutil.copy(configeol, old)
|
||||
if need_lock:
|
||||
unlock_actions()
|
||||
|
||||
def update_server():
|
||||
"""Manage server update
|
||||
"""
|
||||
if os.path.isfile(MAJ_SUCCES_LOCK):
|
||||
os.remove(MAJ_SUCCES_LOCK)
|
||||
if options.interactive:
|
||||
log.info(_(u'Managing update'))
|
||||
|
||||
ihm.print_title(_(u'Updating server'))
|
||||
if ihm.prompt_boolean(_(u"""An update is recommended.
|
||||
Do you want to proceed with network update now ?"""),
|
||||
default=True, level='warn',
|
||||
default_uninteractive=False) is True:
|
||||
report(2)
|
||||
try:
|
||||
load_pkgmgr()
|
||||
_configure_sources_mirror(PKGMGR.pkgmgr)
|
||||
PKGMGR.update(silent=True)
|
||||
upgrades = PKGMGR.get_upgradable_list(silent=True)
|
||||
require_dist_upgrade = False
|
||||
for container, upgrades in upgrades.items():
|
||||
if upgrades:
|
||||
require_dist_upgrade = True
|
||||
break
|
||||
if require_dist_upgrade:
|
||||
# At least one container require upgrade
|
||||
PKGMGR.dist_upgrade()
|
||||
# Update lock => OK, will be deleted at next reconfigure
|
||||
report(0)
|
||||
# recall reconfigure
|
||||
main(force_options={'interactive': False})
|
||||
# back to instance
|
||||
options.interactive = True
|
||||
reset_compat_name()
|
||||
else:
|
||||
log.warn(_(u"No updates available."))
|
||||
report(3)
|
||||
except Exception, err:
|
||||
report(1, normalize(err))
|
||||
raise err
|
||||
|
||||
|
||||
def schedule():
|
||||
"""Manage task scheduling
|
||||
"""
|
||||
ihm.print_title(_(u'Task scheduling'))
|
||||
apply_schedules()
|
||||
display_schedules()
|
||||
# 1er lancement de instance
|
||||
#if not os.path.isfile(schedule.SCHEDULE_FILE):
|
||||
# schedule.add_post_schedule('majauto', 'weekly')
|
||||
#schedule.prog_schedule()
|
||||
|
||||
def is_valid_ip_eth0():
|
||||
"""Check if adresse_ip_eth0 is 169.254.0.1
|
||||
"""
|
||||
ip_eth0 = client.get_creole(u'adresse_ip_eth0')
|
||||
if ip_eth0 == "169.254.0.1":
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def reboot_server():
|
||||
"""Reboot the server if required
|
||||
"""
|
||||
if fonctionseole.controle_kernel():
|
||||
if options.interactive:
|
||||
print
|
||||
if ihm.prompt_boolean(_(u"""Reboot is necessary.
|
||||
Do you want to reboot now?"""),
|
||||
default=True, level='warn') is True:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Demande de redémarrage acceptée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
process.system_code(['reboot'])
|
||||
else:
|
||||
fonctionseole.zephir("MSG",
|
||||
"Demande de redémarrage refusée par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
else:
|
||||
print
|
||||
ihm.print_orange(_(u'Reboot necessary'))
|
||||
time.sleep(1)
|
||||
print
|
||||
if options.auto:
|
||||
fonctionseole.zephir("MSG", "Redémarrage automatique",
|
||||
COMPAT_NAME.upper())
|
||||
process.system_code(['reboot'])
|
||||
else:
|
||||
fonctionseole.zephir("MSG", "Redémarrage du serveur à planifier",
|
||||
COMPAT_NAME.upper())
|
||||
|
||||
|
||||
def main(force_options=None, force_args=None, need_lock=True):
|
||||
"""Entry point
|
||||
"""
|
||||
global log
|
||||
options.update_from_cmdline(force_args=force_args,
|
||||
force_options=force_options)
|
||||
|
||||
try:
|
||||
# module level logger
|
||||
log = init_logging(name=u'reconfigure', level=options.log_level,
|
||||
console=['stderr', 'stddebug'],
|
||||
filename=_LOGFILENAME)
|
||||
|
||||
# Remove module name prefix from Warn/error messages emitted
|
||||
# from here
|
||||
set_formatter(log, 'stderr', 'brief')
|
||||
|
||||
# Define handlers for additional loggers
|
||||
# Thoses logger are not for use
|
||||
# Log pyeole.service
|
||||
pyeole_service_log = init_logging(name=u'pyeole.service',
|
||||
level=options.log_level,
|
||||
filename=_LOGFILENAME,
|
||||
console=['stderr'])
|
||||
# Log pyeole.pkg
|
||||
pyeole_pkg_log = init_logging(name=u'pyeole.pkg',
|
||||
level=options.log_level,
|
||||
filename=_LOGFILENAME)
|
||||
passlib_log = init_logging(name=u'passlib.registry',
|
||||
level='error',
|
||||
filename=_LOGFILENAME)
|
||||
|
||||
# Disable warnings from pyeole.service
|
||||
set_filters(pyeole_service_log, 'stderr',
|
||||
['error', 'critical'])
|
||||
|
||||
if options.verbose or options.debug:
|
||||
# Enable creole logs
|
||||
creole_log = init_logging(name=u'creole', level=options.log_level,
|
||||
filename=_LOGFILENAME)
|
||||
# Define a root logger when verbose or debug is activated
|
||||
root_log = init_logging(level=options.log_level)
|
||||
else:
|
||||
# Enable creole logs
|
||||
creole_log = init_logging(name=u'creole', level=options.log_level,
|
||||
filename=_LOGFILENAME,
|
||||
console=['stderr'])
|
||||
|
||||
creolemajauto_log = init_logging(name=u'creole.majauto', level=options.log_level,
|
||||
filename=_LOGFILENAME, console=['stderr', 'stdout'])
|
||||
|
||||
ihm.print_title(_(u'Beginning of configuration'))
|
||||
# instance or reconfigure ?
|
||||
reset_compat_name()
|
||||
fonctionseole.zephir("INIT", "Début de configuration",
|
||||
COMPAT_NAME.upper())
|
||||
prepare(need_lock)
|
||||
valid_mandatory(need_lock)
|
||||
cache()
|
||||
containers()
|
||||
packages()
|
||||
run_parts(u'preservice')
|
||||
services(action=u'stop')
|
||||
run_parts(u'pretemplate')
|
||||
templates()
|
||||
if not is_valid_ip_eth0():
|
||||
log.info(_(u"eth0 network interface does not have a valid IP address."))
|
||||
log.info(_(u"Restarting networking service"))
|
||||
manage_service(u'restart', u'networking', display='console')
|
||||
templates()
|
||||
if not is_valid_ip_eth0():
|
||||
log.info(_(u"eth0 network interface does not have a valid IP address."))
|
||||
msg = _(u"Unable to obtain IP address.")
|
||||
raise NetworkConfigError(msg)
|
||||
|
||||
services(action=u'configure')
|
||||
# posttemplate/00-annuaire needs the certificates
|
||||
certificates()
|
||||
run_parts(u'posttemplate')
|
||||
#close all connexion before param kernel #17408
|
||||
client.close()
|
||||
param_kernel()
|
||||
kill_dhclient()
|
||||
services(action=u'start')
|
||||
users()
|
||||
run_parts(u'postservice')
|
||||
schedule()
|
||||
finalize(need_lock)
|
||||
ihm.print_title(_(u'Reconfiguration OK'))
|
||||
update_server()
|
||||
# IMPORTANT : Ne rien faire après ces lignes
|
||||
# car le serveur est susceptible d'être redémarré
|
||||
reboot_server()
|
||||
|
||||
except (UserExit, UserExitError), err:
|
||||
unlock_actions(need_lock)
|
||||
fonctionseole.zephir("FIN", "Abandon par l'utilisateur",
|
||||
COMPAT_NAME.upper())
|
||||
raise err
|
||||
|
||||
except Exception, err:
|
||||
if options.debug:
|
||||
log.debug(err, exc_info=True)
|
||||
else:
|
||||
log.error(err)
|
||||
fonctionseole.zephir('ERR', str(err),
|
||||
COMPAT_NAME.upper(),
|
||||
console=False)
|
||||
if need_lock:
|
||||
release(LOCK_NAME, valid=False, level='system')
|
||||
raise err
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
651
creole/server.py
651
creole/server.py
|
@ -1,651 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
##########################################################################
|
||||
# creole.server - distribute creole variables through REST API
|
||||
# Copyright © 2012,2013 Pôle de compétences EOLE <eole@ac-dijon.fr>
|
||||
#
|
||||
# License CeCILL:
|
||||
# * in french: http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html
|
||||
# * in english http://www.cecill.info/licences/Licence_CeCILL_V2-en.html
|
||||
##########################################################################
|
||||
|
||||
"""Distribute Creole configuration through REST API
|
||||
|
||||
Setup a daemon based on `cherrypy` listening by default on
|
||||
127.0.0.1:8000 for queries on Creole configuration.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import threading
|
||||
|
||||
from creole import eosfunc
|
||||
|
||||
from traceback import format_exc
|
||||
|
||||
from os.path import basename, dirname, isdir, samefile, splitext
|
||||
|
||||
from pyeole.log import init_logging, getLogger
|
||||
from pyeole import scriptargs
|
||||
|
||||
from .config import configeoldir, eoledirs, eoleextradico, \
|
||||
eoleextraconfig
|
||||
from .loader import creole_loader, load_config_eol, load_extras
|
||||
|
||||
from .i18n import _
|
||||
|
||||
from tiramisu.config import Config, SubConfig, undefined
|
||||
from tiramisu.error import PropertiesOptionError
|
||||
|
||||
from pyeole.cherrypy_plugins import InotifyMonitor
|
||||
|
||||
import cherrypy
|
||||
import socket
|
||||
|
||||
from pyinotify import ProcessEvent
|
||||
from pyinotify import IN_DELETE
|
||||
from pyinotify import IN_CREATE
|
||||
from pyinotify import IN_MODIFY
|
||||
from pyinotify import IN_MOVED_TO
|
||||
from pyinotify import IN_MOVED_FROM
|
||||
|
||||
from systemd import daemon
|
||||
|
||||
import logging
|
||||
|
||||
# Global logger
|
||||
log = getLogger(__name__)
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
num_error = [(PropertiesOptionError, 1), (KeyError, 2),
|
||||
(AttributeError, 4), (Exception, 3)]
|
||||
|
||||
# For pyinotify handler and filtering
|
||||
_INOTIFY_EOL_DIRS = [configeoldir, eoleextraconfig]
|
||||
|
||||
_INOTIFY_MASK = IN_DELETE | IN_CREATE | IN_MODIFY | IN_MOVED_TO | IN_MOVED_FROM
|
||||
|
||||
|
||||
def _inotify_filter(event):
|
||||
"""Check if the path must be excluded from being watched.
|
||||
|
||||
:param event: event to look for
|
||||
:type event: :class:`pyinotify.Event`
|
||||
:return: if the :data:`event` must be excluded
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
|
||||
_INOTIFY_EOL = True
|
||||
|
||||
if isdir(event.pathname):
|
||||
# Always ok for EOLE directories
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.pathname, directory):
|
||||
_INOTIFY_EOL = False
|
||||
|
||||
if not _INOTIFY_EOL:
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
extension = splitext(event.name)[1]
|
||||
|
||||
if event.mask != IN_DELETE and not os.access(event.pathname, os.F_OK):
|
||||
log.debug(_(u'File not accessible: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
if event.mask != IN_DELETE and os.stat(event.pathname).st_size == 0:
|
||||
log.debug(_(u'File with null size: {0}').format(event.pathname))
|
||||
return {"EOL": True}
|
||||
|
||||
# Check only for files in EOLE directories
|
||||
|
||||
for directory in _INOTIFY_EOL_DIRS:
|
||||
if not os.access(directory, os.F_OK):
|
||||
continue
|
||||
if samefile(event.path, directory) or str(event.path).startswith(directory):
|
||||
_INOTIFY_EOL = extension != '.eol'
|
||||
break
|
||||
|
||||
return {"EOL": _INOTIFY_EOL}
|
||||
|
||||
|
||||
class CreoleInotifyHandler(ProcessEvent):
|
||||
"""Process inotify events
|
||||
|
||||
"""
|
||||
|
||||
_server = None
|
||||
"""Instance of :class:`CreoleServer`.
|
||||
|
||||
"""
|
||||
|
||||
def my_init(self, server):
|
||||
"""Subclass constructor.
|
||||
|
||||
This is the constructor, it is automatically called from
|
||||
:meth:`ProcessEvent.__init__()`,
|
||||
|
||||
Extra arguments passed to ``__init__()`` would be delegated
|
||||
automatically to ``my_init()``.
|
||||
|
||||
"""
|
||||
self._server = server
|
||||
|
||||
def process_default(self, event):
|
||||
"""Reload :class:`CreoleServer` on all managed inotify events
|
||||
|
||||
"""
|
||||
inotify_data = _inotify_filter(event)
|
||||
if not inotify_data["EOL"]:
|
||||
log.warn(_(u'Reload config.eol due to {0} on {1}').format(event.maskname,
|
||||
event.pathname))
|
||||
try:
|
||||
self._server.reload_eol()
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
log.debug(_(u'Filtered inotify event for {0}').format(event.pathname))
|
||||
|
||||
|
||||
class CreoleServer(object):
|
||||
"""Cherrypy application answering REST requests
|
||||
"""
|
||||
|
||||
def __init__(self, running=True):
|
||||
"""Initialize the server
|
||||
|
||||
Load the tiramisu configuration.
|
||||
|
||||
:param `bool` running: Is the web server running during server
|
||||
initialization.
|
||||
|
||||
"""
|
||||
|
||||
log.debug(_(u"Loading tiramisu configuration"))
|
||||
self.config = None
|
||||
self.reload_config(running)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_config(self, running=True):
|
||||
lock.acquire()
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify('RELOADING=1')
|
||||
|
||||
try:
|
||||
log.debug(u"Set umask to 0022")
|
||||
os.umask(0022)
|
||||
reload(eosfunc)
|
||||
eosfunc.load_funcs(force_reload=True)
|
||||
self.config = creole_loader(load_extra=True, reload_config=False,
|
||||
disable_mandatory=True, owner='creoled',
|
||||
try_upgrade=False)
|
||||
if log.isEnabledFor(logging.DEBUG) and self.config.impl_get_information('load_error', False):
|
||||
msg = _('Load creole configuration with errors')
|
||||
log.debug(msg)
|
||||
ret = self.response()
|
||||
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
|
||||
if running:
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify('READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def reload_eol(self):
|
||||
if not self.config:
|
||||
return self.reload_config()
|
||||
|
||||
lock.acquire()
|
||||
|
||||
# Tell systemd that we are reloading the configuration
|
||||
daemon.notify(u'RELOADING=1')
|
||||
|
||||
config = Config(self.config.cfgimpl_get_description())
|
||||
try:
|
||||
load_config_eol(config)
|
||||
except Exception, err:
|
||||
# Avoid using format as exception message could be undecoded
|
||||
msg = _('Unable to load creole configuration from config.eol: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
try:
|
||||
load_extras(config)
|
||||
except:
|
||||
msg = _('Unable to load creole configuration from extra: ')
|
||||
msg += unicode(str(err), 'utf-8')
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(msg, exc_info=True)
|
||||
else:
|
||||
log.error(msg)
|
||||
|
||||
#self.config = None
|
||||
ret = self.response(status=3)
|
||||
else:
|
||||
config.read_only()
|
||||
self.config = config
|
||||
ret = self.response()
|
||||
|
||||
|
||||
# Tell systemd that we are now ready again
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
lock.release()
|
||||
|
||||
return ret
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def valid_mandatory(self):
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
msg = _(u'All variables are not set, please configure your system:')
|
||||
error = False
|
||||
mandatory_errors = set(self.config.cfgimpl_get_values().mandatory_warnings(force_permissive=True))
|
||||
if mandatory_errors != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables are mandatories') + ' (' + ', '.join(mandatory_errors) + ')'
|
||||
force_vars = set()
|
||||
for force_store_var in self.config.impl_get_information('force_store_vars'):
|
||||
if force_store_var not in mandatory_errors:
|
||||
try:
|
||||
getattr(self.config, force_store_var)
|
||||
force_vars.add(force_store_var)
|
||||
except:
|
||||
pass
|
||||
if force_vars != set():
|
||||
error = True
|
||||
msg += ' ' + _('variables must be in config file') + ' (' + ', '.join(force_vars) + ')'
|
||||
|
||||
if error:
|
||||
log.debug(mandatory_errors)
|
||||
return self.response(msg, 3)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
return self.response(str(err), 3)
|
||||
return self.response()
|
||||
|
||||
@staticmethod
|
||||
def response(response='OK', status=0):
|
||||
"""Generate a normalized response
|
||||
|
||||
:param response: message of the response
|
||||
:type response: `object`
|
||||
:param status: status code for the response, ``0`` for OK
|
||||
:type status: `int`
|
||||
:return: response of the form: ``{"status": `int`, "response": `message`}``
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return {u'status': status, u'response': response}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def get(self, *args, **kwargs):
|
||||
"""Return the content of a tiramisu path
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
:return: Value of a single variable or sub tree
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
def _remove_properties_error(val):
|
||||
new_val = []
|
||||
for v in val:
|
||||
if isinstance(v, PropertiesOptionError):
|
||||
new_val.append({'err': str(v)})
|
||||
else:
|
||||
new_val.append(v)
|
||||
return new_val
|
||||
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) != 0:
|
||||
subconfig = getattr(config, '.'.join(args))
|
||||
else:
|
||||
subconfig = config
|
||||
if isinstance(subconfig, SubConfig):
|
||||
if u'variable' in kwargs:
|
||||
name = kwargs[u'variable']
|
||||
path = subconfig.find_first(byname=name,
|
||||
type_=u'path',
|
||||
check_properties=False)
|
||||
try:
|
||||
val = getattr(config, path)
|
||||
except PropertiesOptionError as err:
|
||||
if err.proptype == ['mandatory']:
|
||||
raise Exception(_(u'Mandatory variable {0} '
|
||||
u'is not set.').format(name))
|
||||
raise err
|
||||
if isinstance(val, list):
|
||||
val = _remove_properties_error(val)
|
||||
return self.response(val)
|
||||
else:
|
||||
withoption = kwargs.get(u'withoption')
|
||||
withvalue = kwargs.get(u'withvalue')
|
||||
if withvalue is None:
|
||||
withvalue = undefined
|
||||
dico = subconfig.make_dict(withoption=withoption, withvalue=withvalue)
|
||||
for key, val in dico.items():
|
||||
if isinstance(val, list):
|
||||
dico[key] = _remove_properties_error(val)
|
||||
return self.response(dico)
|
||||
else:
|
||||
#if config is a value, not a SubConfig
|
||||
if isinstance(subconfig, list):
|
||||
subconfig = _remove_properties_error(subconfig)
|
||||
return self.response(subconfig)
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
def list(self, *args):
|
||||
"""List subtree pointed by :data:`args`
|
||||
|
||||
List the nodes and variables under a path.
|
||||
|
||||
If the path point to a single variable, then return its value.
|
||||
|
||||
:param args: path elements of the query
|
||||
:type args: `list`
|
||||
|
||||
:return: Nodes and/or variables under a path, or value of a
|
||||
variable
|
||||
:rtype: `list`
|
||||
|
||||
"""
|
||||
if self.config is None:
|
||||
return self._no_config()
|
||||
try:
|
||||
config = self.config
|
||||
if len(args) == 0:
|
||||
# root of configuration
|
||||
obj = config
|
||||
else:
|
||||
# Path to a sub configuration
|
||||
base = '.'.join(args)
|
||||
obj = getattr(config, base)
|
||||
if isinstance(obj, SubConfig):
|
||||
# Path is a node
|
||||
groups = [u'%s/' % g[0] for g in obj.iter_groups()]
|
||||
items = [u'%s' % i[0] for i in obj]
|
||||
return self.response(groups + items)
|
||||
else:
|
||||
# Path is a leaf
|
||||
value = self.get(*args)[u'response']
|
||||
return self.response([value])
|
||||
except Exception, err:
|
||||
log.debug(err, exc_info=True)
|
||||
for error_match in num_error:
|
||||
if isinstance(err, error_match[0]):
|
||||
break
|
||||
return self.response(str(err), error_match[1])
|
||||
|
||||
def _no_config(self):
|
||||
"""Return an error message when no configuration is loaded
|
||||
|
||||
:return: a failure response
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return self.response(_(u'No configuration'), status=3)
|
||||
|
||||
class CreoleDaemon(object):
|
||||
"""Run the CreoleServer
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the cherrypy daemon
|
||||
"""
|
||||
|
||||
# Built-in configuration
|
||||
self.argparse = self._load_argparse()
|
||||
# Read command line arguments
|
||||
self.option = self.argparse.parse_args()
|
||||
if self.option.verbose:
|
||||
self.option.log_level = u'info'
|
||||
if self.option.debug:
|
||||
self.option.log_level = u'debug'
|
||||
self._configure_log()
|
||||
|
||||
def _load_argparse(self):
|
||||
"""Parse command line arguments
|
||||
|
||||
:return: command line parser
|
||||
:rtype: `argparse.ArgumentParser`
|
||||
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description=u'Run creole daemon',
|
||||
parents=[scriptargs.logging('warning')],
|
||||
conflict_handler='resolve')
|
||||
parser.add_argument("-b", "--base-dir", default='/tmp',
|
||||
help=_(u"Base directory in which the server"
|
||||
" is launched (default: /tmp)"))
|
||||
parser.add_argument("-c", "--conf-file",
|
||||
default='/etc/eole/creoled.conf',
|
||||
help=_(u"Configuration file of the server"
|
||||
" (default: /etc/eole/creoled.conf"))
|
||||
parser.add_argument("-d", "--daemon", action='store_true',
|
||||
help=_(u"Run the server as a daemon (default: false)"))
|
||||
parser.add_argument("-l", "--listen", action='store',
|
||||
default='127.0.0.1:8000',
|
||||
help=_(u"Listen on the specified IP:PORT"
|
||||
" (default: 127.0.0.1:8000)"))
|
||||
parser.add_argument("-m", "--mount-base", default='/',
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-p", "--pidfile",
|
||||
default='/tmp/{0}.pid'.format(
|
||||
basename(sys.argv[0])),
|
||||
help=_(u"Base under which the application is mounted"
|
||||
" (default: /)"))
|
||||
parser.add_argument("-u", "--user", default='nobody',
|
||||
help=_(u"User of the running process"
|
||||
" (default: nobody)"))
|
||||
parser.add_argument("-g", "--group", default='nogroup',
|
||||
help=_(u"Group of the running process"
|
||||
" (default: nogroup)"))
|
||||
parser.add_argument("--umask", default='0640',
|
||||
help=_(u"Umask of the running process"
|
||||
" (default: 0644)"))
|
||||
return parser
|
||||
|
||||
def _get_conf(self, name):
|
||||
"""Map command line arguments to cherrypy configuration
|
||||
|
||||
:param name: internal name of argparse option store
|
||||
:returns: piece of cherrypy configuration
|
||||
:rtype: `dict`
|
||||
"""
|
||||
try:
|
||||
option_map = { 'listen' :
|
||||
{ 'server.socket_host' :
|
||||
self.option.listen.split(':')[0],
|
||||
'server.socket_port' :
|
||||
int(self.option.listen.split(':')[1])},
|
||||
}
|
||||
return option_map[name]
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def load_conf(self):
|
||||
"""Load daemon configuration
|
||||
|
||||
Take care to load the configuration in proper order and avoid
|
||||
overriding configuration file parameter by default command
|
||||
line arguments.
|
||||
|
||||
Order is:
|
||||
|
||||
- default values from command line option parser
|
||||
|
||||
- option from a configuration file
|
||||
|
||||
- command line arguments
|
||||
|
||||
"""
|
||||
# Load all default value
|
||||
config = {'engine.autoreload.on': False}
|
||||
for opt in vars(self.option):
|
||||
config.update(self._get_conf(opt))
|
||||
|
||||
cherrypy.config.update( { 'global' : config} )
|
||||
|
||||
# Load configuration file
|
||||
if os.access(self.option.conf_file, os.F_OK):
|
||||
cherrypy.config.update(self.option.conf_file)
|
||||
|
||||
# Override config file option present on command line
|
||||
config = {}
|
||||
for opt in sys.argv[1:]:
|
||||
config.update(self._get_conf(opt))
|
||||
cherrypy.config.update( {'global' : config } )
|
||||
|
||||
def _configure_log(self):
|
||||
"""Configure the module logger
|
||||
|
||||
Avoid logging apache style time since the logger does it.
|
||||
|
||||
"""
|
||||
global log
|
||||
log_filename = None
|
||||
if self.option.daemon:
|
||||
log_filename = u'/var/log/creoled.log'
|
||||
|
||||
log = init_logging(name=u'creoled', as_root=True,
|
||||
level=self.option.log_level,
|
||||
console=not self.option.daemon,
|
||||
syslog=None,
|
||||
filename=log_filename)
|
||||
|
||||
# Cherrypy do not handle logs
|
||||
cherrypy.log.error_file = None
|
||||
cherrypy.log.access_file = None
|
||||
# Do not output on screen
|
||||
cherrypy.log.screen = False
|
||||
# Hack to avoid time in log message
|
||||
cherrypy.log.time = lambda : ''
|
||||
|
||||
def run(self):
|
||||
"""Start the cherrypy server.
|
||||
"""
|
||||
engine = cherrypy.engine
|
||||
|
||||
# Load server but we are not running now
|
||||
# Do not let him tell systemd otherwise
|
||||
server = CreoleServer(running=False)
|
||||
|
||||
inotify_handler = CreoleInotifyHandler(server=server)
|
||||
|
||||
if hasattr(engine, "signal_handler"):
|
||||
engine.signal_handler.subscribe()
|
||||
# Error exit on SIGINT (Ctl-c) #6177
|
||||
engine.signal_handler.set_handler(2, self._kill)
|
||||
|
||||
if hasattr(engine, "console_control_handler"):
|
||||
engine.console_control_handler.subscribe()
|
||||
|
||||
cherrypy.tree.mount(server, self.option.mount_base,
|
||||
config={'global' : {} })
|
||||
|
||||
# Merge configuration from build-in, configuration file and command line
|
||||
self.load_conf()
|
||||
|
||||
if server.config is None:
|
||||
msg = _(u"No configuration found: do not check for container mode.")
|
||||
log.warn(msg)
|
||||
elif server.config.creole.general.mode_conteneur_actif == 'oui':
|
||||
container_ip = server.config.creole.containers.adresse_ip_br0
|
||||
container_port = cherrypy.config.get('server.socket_port')
|
||||
# Start a server for containers if ip can be bounded
|
||||
try:
|
||||
container_socket = socket.socket(socket.AF_INET,
|
||||
socket.SOCK_STREAM)
|
||||
container_socket.setsockopt(socket.SOL_SOCKET,
|
||||
socket.SO_REUSEADDR,
|
||||
1)
|
||||
container_socket.bind((container_ip, container_port))
|
||||
container_socket.close()
|
||||
except socket.error, err:
|
||||
log.error(_(u"Unable to listen for containers: {0}").format(err))
|
||||
else:
|
||||
container_server = cherrypy._cpserver.Server()
|
||||
container_server.socket_host = container_ip
|
||||
container_server.socket_port = container_port
|
||||
container_server.subscribe()
|
||||
|
||||
monitor = InotifyMonitor(engine, inotify_handler)
|
||||
monitor.subscribe()
|
||||
|
||||
monitor.watch.add_watch(_INOTIFY_EOL_DIRS, _INOTIFY_MASK, auto_add=True, rec=True)
|
||||
|
||||
if self.option.pidfile:
|
||||
cherrypy.process.plugins.PIDFile(engine,
|
||||
self.option.pidfile).subscribe()
|
||||
|
||||
if self.option.daemon:
|
||||
cherrypy.process.plugins.Daemonizer(engine).subscribe()
|
||||
|
||||
# Drop priviledges
|
||||
cherrypy.process.plugins.DropPrivileges(engine,
|
||||
uid = self.option.user,
|
||||
gid = self.option.group,
|
||||
umask = self.option.umask)
|
||||
|
||||
# Let's start the CherryPy engine so that
|
||||
# everything works
|
||||
engine.start()
|
||||
|
||||
# Tell systemd that we are ready
|
||||
daemon.notify(u'READY=1')
|
||||
|
||||
# Run the engine main loop
|
||||
engine.block()
|
||||
|
||||
@staticmethod
|
||||
def _kill():
|
||||
"""Exit the server with non zero exit code
|
||||
"""
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
daemon = CreoleDaemon()
|
||||
daemon.run()
|
|
@ -1,45 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pyeole.service import manage_services
|
||||
from pyeole.decorator import deprecated
|
||||
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def instance_services(container=None):
|
||||
"""
|
||||
instancie les services
|
||||
"""
|
||||
manage_services(u'configure', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def stop_services(container=None):
|
||||
"""Stop all services
|
||||
|
||||
The networking service is never stopped.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'stop', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def start_services(container=None):
|
||||
"""Start all services
|
||||
|
||||
The networking service is a special case.
|
||||
|
||||
@param container: name of the container
|
||||
@type container: C{str}
|
||||
"""
|
||||
manage_services(u'start', container=container)
|
||||
|
||||
|
||||
@deprecated(_(u'Use new API “manage_services()”'))
|
||||
def restart_services(container=None):
|
||||
"""
|
||||
redemarrage des services
|
||||
"""
|
||||
manage_services(u'restart', container=container)
|
|
@ -1,799 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
utilitaire d'importation de configuration config.eol 2.2 ou config.eol 2.3
|
||||
vers config.eol 2.4
|
||||
|
||||
usage :
|
||||
|
||||
%prog <config_file_name>
|
||||
|
||||
"""
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
from tiramisu.option import SymLinkOption, ChoiceOption
|
||||
from .eosfunc import is_empty
|
||||
from .var_loader import convert_value
|
||||
import re
|
||||
from itertools import product
|
||||
|
||||
from .i18n import _
|
||||
|
||||
# ____ logger utility ____
|
||||
# log_filename = u'/var/log/creole.log'
|
||||
# try:
|
||||
# from pyeole.log import init_logging
|
||||
# except:
|
||||
# # compatibilité pour Zéphir 2.3
|
||||
# from pyeole.log import make_logger
|
||||
# log = make_logger(u'creole3.upgrade',
|
||||
# logfile=log_filename,
|
||||
# loglevel='INFO')
|
||||
# else:
|
||||
# log = init_logging(name=u'creoleUpgrade',
|
||||
# level='info',
|
||||
# console=False,
|
||||
# syslog=None,
|
||||
# filename=log_filename)
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
KEYS = ['val', 'valprec', 'valdefault']
|
||||
|
||||
|
||||
def migration_23_to_tiramisu(opt, val):
|
||||
if not opt.impl_is_multi():
|
||||
if (val == [] or val == ['']) and not isinstance(opt, ChoiceOption):
|
||||
val = None
|
||||
else:
|
||||
if val == []:
|
||||
val = None
|
||||
else:
|
||||
try:
|
||||
val = convert_value(opt, val[0])
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
val = val[0]
|
||||
else:
|
||||
if val == ['']:
|
||||
val = []
|
||||
else:
|
||||
new_val = []
|
||||
for v in val:
|
||||
if v == '':
|
||||
new_val.append(None)
|
||||
else:
|
||||
try:
|
||||
new_val.append(convert_value(opt, v))
|
||||
except ValueError:
|
||||
#s'il y une erreur sur la conversion de la variable
|
||||
#met la valeur incorrect pour que la valeur soit
|
||||
#marquée en erreur dans tiramisu (donc affiché dans
|
||||
#l'interface)
|
||||
new_val.append(v)
|
||||
val = new_val
|
||||
return val
|
||||
|
||||
class Dico(ConfigParser):
|
||||
|
||||
def get_val(self, var, default=''):
|
||||
"""
|
||||
Renvoie la valeur d'une variable
|
||||
"""
|
||||
if self.has_section(var):
|
||||
return self.get(var, 'val')
|
||||
return default
|
||||
|
||||
def copy(self, old, new, keep=True):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
if self.has_section(old):
|
||||
if not self.has_section(new):
|
||||
self.add_section(new)
|
||||
for key in KEYS:
|
||||
value = self.get(old, key)
|
||||
self.set(new, key, value)
|
||||
if keep:
|
||||
log.info(_(u"Variable {0} has been copied in {1}").format(old, new))
|
||||
else:
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old, new))
|
||||
|
||||
def move(self, old, new):
|
||||
"""
|
||||
Renomme ou copie une variable
|
||||
vers une autre
|
||||
"""
|
||||
self.copy(old, new, keep=False)
|
||||
|
||||
def remove(self, old):
|
||||
if self.has_section(old):
|
||||
self.remove_section(old)
|
||||
log.info(_(u"Variable {0} has been removed").format(old))
|
||||
|
||||
def simple2multi(self, src, new):
|
||||
"""
|
||||
n variables simples => 1 multi
|
||||
"""
|
||||
res = []
|
||||
for var in src:
|
||||
if self.has_section(var):
|
||||
try:
|
||||
value = eval(self.get(var, 'val'))[0]
|
||||
if value != '':
|
||||
res.append(value)
|
||||
except:
|
||||
log.error(_(u"Source variable {0} invalid").format(var))
|
||||
if res != []:
|
||||
self.fill_var(new, res)
|
||||
|
||||
def fill_var(self, var, val, valprec=[], valdefault=[]):
|
||||
"""
|
||||
Crée ou met à jour une variable
|
||||
"""
|
||||
if type(val) != list:
|
||||
val = [val]
|
||||
if not self.has_section(var):
|
||||
self.add_section(var)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(var, val))
|
||||
self.set(var, 'val', str(val))
|
||||
self.set(var, 'valprec', valprec)
|
||||
self.set(var, 'valdefault', valdefault)
|
||||
|
||||
def save(self, fichier):
|
||||
"""
|
||||
Enregistre le résultat
|
||||
"""
|
||||
fic = file(fichier, 'w')
|
||||
self.write(fic)
|
||||
fic.close()
|
||||
|
||||
def upgrade(config, configfile):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.4
|
||||
ou de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
:param version: config.eol version ('2.3' ou '2.4')
|
||||
"""
|
||||
log.info(_(u"config.eol upgrade started"))
|
||||
dico = Dico()
|
||||
dico.read(configfile)
|
||||
version = get_version(dico)
|
||||
if version == '2.2':
|
||||
upgrade22to23(dico)
|
||||
upgrade23to24(dico)
|
||||
# FIXME do stuff on 2.4 variables
|
||||
# chargement des valeurs par default depuis les dicos XML 2.4
|
||||
owner = u"upgrade"
|
||||
store_dico = export_to_store(dico, config)
|
||||
return store_dico, version
|
||||
|
||||
def export_to_store(dico, config):
|
||||
"""
|
||||
exporte depuis un dico vers un dico qui a été mis à jour par les
|
||||
valeurs par défaut creole 2.4::
|
||||
|
||||
{"libelle_etab": {"owner": "gen_config", "val": "monchapet"},
|
||||
{"owner": "gen_config", "val": ["0.0.0.0"]}
|
||||
|
||||
:param dico: configparser dict
|
||||
:returns: config parser dico
|
||||
"""
|
||||
default_owner = u'upgrade'
|
||||
store = {}
|
||||
# modification des settings pour accéder aux options disabled
|
||||
config.cfgimpl_get_settings().remove('disabled')
|
||||
old_format = False
|
||||
for section in dico.sections():
|
||||
val = eval(dico.get_val(section))
|
||||
try:
|
||||
path = config.find_first(byname=section, type_='path', check_properties=False)
|
||||
if not path.startswith('creole.') or path.startswith('creole.containers.'):
|
||||
continue
|
||||
|
||||
opt = config.unwrap_from_path(path)
|
||||
if isinstance(opt, SymLinkOption):
|
||||
continue
|
||||
val = migration_23_to_tiramisu(opt, val)
|
||||
except AttributeError:
|
||||
log.error(_(u"Unknown variable: {0}").format(section))
|
||||
old_format = True
|
||||
if val is None or val == []:
|
||||
continue
|
||||
store[section] = {"owner": default_owner, "val": val}
|
||||
if old_format:
|
||||
store[section]['old_format'] = True
|
||||
return store
|
||||
|
||||
def upgrade22to23(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.2 vers 2.3
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.2', '2.3'))
|
||||
# famille General
|
||||
dico.move('proxy', 'activer_proxy_client')
|
||||
dico.move('proxy_server', 'proxy_client_adresse')
|
||||
dico.move('proxy_port', 'proxy_client_port')
|
||||
dico.simple2multi(['serveur_maj', 'serveur_maj2'], 'serveur_maj')
|
||||
# spécifique Amon
|
||||
domaine = dico.get_val('nom_domaine_academique')
|
||||
if domaine != '':
|
||||
if '.' in domaine:
|
||||
ac, dom = eval(domaine)[0].rsplit('.', 1)
|
||||
else:
|
||||
# gère le cas particulier de sphynx ou le suffixe n'était pas
|
||||
# dans le domaine académique (.fr par défaut)
|
||||
ac = eval(domaine)[0]
|
||||
dom = 'fr'
|
||||
dico.fill_var('nom_academie', ac)
|
||||
dico.fill_var('suffixe_domaine_academique', dom)
|
||||
# rien sur Zéphir 2.2
|
||||
if dico.has_section('ip_ssh_eth0'):
|
||||
# ip/netmask facultatifs sur Scribe-2.2
|
||||
if 'oui' in dico.get_val('ssh_eth0') and dico.get_val('ip_ssh_eth0') == '[]':
|
||||
dico.fill_var('ip_ssh_eth0', '0.0.0.0')
|
||||
dico.fill_var('netmask_ssh_eth0', '0.0.0.0')
|
||||
# pas de ssh_eth0 sur Horus-2.2
|
||||
if not dico.has_section('ssh_eth0'):
|
||||
# FIXME ip_ssh_eth0 semble faculatif
|
||||
dico.fill_var('ssh_eth0', 'oui')
|
||||
# familles Interface-X
|
||||
for num in range(0, 5):
|
||||
dico.copy('ssh_eth%s' % num, 'admin_eth%s' % num)
|
||||
dico.copy('ip_ssh_eth%s' % num, 'ip_admin_eth%s' % num)
|
||||
dico.copy('netmask_ssh_eth%s' % num, 'netmask_admin_eth%s' % num)
|
||||
dico.move('agregation', 'activer_agregation')
|
||||
|
||||
# famille Services
|
||||
dico.move('cups', 'activer_cups')
|
||||
dico.move('ftp_perso', 'activer_proftpd')
|
||||
dico.move('ead_web', 'activer_ead_web')
|
||||
dico.move('apache', 'activer_apache')
|
||||
dico.move('mysql', 'activer_mysql')
|
||||
dico.move('xinet_interbase', 'activer_interbase')
|
||||
if 'oui' in dico.get_val('sso'):
|
||||
dico.fill_var('activer_sso', 'local')
|
||||
else:
|
||||
dico.fill_var('activer_sso', 'non')
|
||||
|
||||
# migration DHCP
|
||||
dhcp = dico.get_val('dhcp', None)
|
||||
if dhcp is not None:
|
||||
dico.move('dhcp', 'activer_dhcp')
|
||||
if dico.get_val('adresse_network_dhcp', None) is None:
|
||||
#migration d'un Horus 2.2
|
||||
len_dhcp = len(eval(dico.get_val('ip_basse_dhcp', "[]")))
|
||||
#recuperation des variables a migrer
|
||||
adresse_network_dhcp = eval(dico.get_val("adresse_network_eth0"))
|
||||
dico.fill_var("adresse_network_dhcp", adresse_network_dhcp * len_dhcp)
|
||||
adresse_netmask_dhcp = eval(dico.get_val("adresse_netmask_eth0"))
|
||||
dico.fill_var("adresse_netmask_dhcp", adresse_netmask_dhcp * len_dhcp)
|
||||
adresse_ip_gw_dhcp = eval(dico.get_val("adresse_ip_gw", "[]"))
|
||||
if adresse_ip_gw_dhcp != []:
|
||||
dico.fill_var("adresse_ip_gw_dhcp", adresse_ip_gw_dhcp * len_dhcp)
|
||||
nom_domaine_dhcp = eval(dico.get_val("nom_domaine_local", "[]"))
|
||||
if nom_domaine_dhcp != []:
|
||||
dico.fill_var("nom_domaine_dhcp", nom_domaine_dhcp * len_dhcp)
|
||||
adresse_ip_dns_dhcp = eval(dico.get_val("adresse_ip_dns", "[]"))
|
||||
if adresse_ip_dns_dhcp != []:
|
||||
dico.fill_var("adresse_ip_dns_dhcp", [adresse_ip_dns_dhcp[0]] * len_dhcp)
|
||||
|
||||
# famille Messagerie
|
||||
dico.move('passerelle_smtp_aca', 'passerelle_smtp')
|
||||
dico.move('spamassassin', 'activer_spamassassin')
|
||||
if 'oui' in dico.get_val('courier_imap'):
|
||||
if 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop - imap')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'imap')
|
||||
elif 'oui' in dico.get_val('courier_pop'):
|
||||
dico.fill_var('activer_courier', 'pop')
|
||||
else:
|
||||
dico.fill_var('activer_courier', 'non')
|
||||
# Zéphir
|
||||
dico.move('serveur_smtp', 'passerelle_smtp')
|
||||
dico.move('compte_smtp', 'system_mail_from')
|
||||
if '465' in dico.get_val('port_smtp'):
|
||||
dico.fill_var('tls_smtp', 'port 465')
|
||||
|
||||
# famille Client_ldap
|
||||
dico.move('base_ldap', 'ldap_base_dn')
|
||||
serveur_ldap = dico.get_val('serveur_ldap', '[]')
|
||||
if serveur_ldap != '[]':
|
||||
dico.move('serveur_ldap', 'adresse_ip_ldap')
|
||||
if eval(serveur_ldap)[0] not in ['127.0.0.1', 'localhost']:
|
||||
dico.fill_var('activer_client_ldap', 'distant')
|
||||
|
||||
# famille Eole-sso
|
||||
dico.move('adresse_ip_sso', 'eolesso_adresse')
|
||||
dico.move('port_sso', 'eolesso_port')
|
||||
# eolesso_ldap (multi)
|
||||
dico.move('ldap_sso', 'eolesso_ldap')
|
||||
dico.move('port_ldap_sso', 'eolesso_port_ldap')
|
||||
dico.move('base_ldap_sso', 'eolesso_base_ldap')
|
||||
dico.move('sso_ldap_label', 'eolesso_ldap_label')
|
||||
dico.move('sso_ldap_reader', 'eolesso_ldap_reader')
|
||||
dico.move('sso_ldap_reader_passfile', 'eolesso_ldap_reader_passfile')
|
||||
# la "suite"
|
||||
dico.move('adresse_sso_parent', 'eolesso_adresse_parent')
|
||||
dico.move('port_sso_parent', 'eolesso_port_parent')
|
||||
dico.move('sso_pam_securid', 'eolesso_pam_securid')
|
||||
dico.move('sso_cert', 'eolesso_cert')
|
||||
dico.move('sso_ca_location', 'eolesso_ca_location')
|
||||
dico.move('sso_session_timeout', 'eolesso_session_timeout')
|
||||
dico.move('sso_css', 'eolesso_css')
|
||||
|
||||
# famille Applications web
|
||||
dico.move('phpmyadmin', 'activer_phpmyadmin')
|
||||
dico.move('posh', 'activer_envole')
|
||||
dico.move('web_domain', 'web_url')
|
||||
dico.move('web_default', 'web_redirection')
|
||||
posh_path = dico.get_val('posh_path', '[]')
|
||||
if posh_path != '[]' and eval(posh_path)[0] != '':
|
||||
dico.fill_var('alias_envole', '/' + eval(posh_path)[0])
|
||||
|
||||
# famille Bacula
|
||||
if 'oui' in "%s%s%s" % (dico.get_val('active_bacula_dir'),
|
||||
dico.get_val('active_bacula_fd'),
|
||||
dico.get_val('active_bacula_sd')):
|
||||
dico.fill_var('activer_bacula', 'oui')
|
||||
dico.move('active_bacula_dir', 'activer_bacula_dir')
|
||||
dico.move('active_bacula_sd', 'activer_bacula_sd')
|
||||
# bacula_fd n'est plus géré
|
||||
else:
|
||||
dico.fill_var('activer_bacula', 'non')
|
||||
|
||||
# famille Clamav
|
||||
dico.move('enable_clamd', 'dansguardian_clam')
|
||||
|
||||
# famille Certifs-ssl
|
||||
dico.move('ssl_serveur_name', 'ssl_server_name')
|
||||
|
||||
# famille Authentification
|
||||
dico.move('active_nufw', 'activer_nufw')
|
||||
dico.move('freeradius', 'activer_freeradius')
|
||||
|
||||
# famille Logs
|
||||
if 'Oui' in dico.get_val('activate_tls'):
|
||||
dico.fill_var('rsyslog_tls', 'oui')
|
||||
|
||||
# famille Reverse proxy
|
||||
revprox = dico.get_val('revprox_domainname', '[]')
|
||||
if revprox != '[]' and eval(revprox)[0] != '':
|
||||
dico.fill_var('revprox_activate_http', 'oui')
|
||||
|
||||
# famille réseau avancé
|
||||
route = dico.get_val('route_adresse', '[]')
|
||||
if route != '[]' and eval(route)[0] != '':
|
||||
dico.fill_var('activer_route', 'oui')
|
||||
|
||||
# famille Vpn-pki
|
||||
dico.simple2multi(['url_crl1', 'url_crl2'], 'url_crl')
|
||||
|
||||
|
||||
def upgrade23to24(dico):
|
||||
"""
|
||||
Mise à jour d'un fichier .eol
|
||||
de 2.3 vers 2.4
|
||||
|
||||
:param dico: configparser instance
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.3', '2.4'))
|
||||
cache_dir = dico.get_val('cache_dir', '[]')
|
||||
if cache_dir != '[]' and eval(cache_dir)[0] == '/var/spool/squid':
|
||||
dico.fill_var('cache_dir', '')
|
||||
|
||||
system_mail_to = dico.get_val('system_mail_to', '[]')
|
||||
if system_mail_to != '[]' and eval(system_mail_to)[0] == 'postmaster':
|
||||
dico.fill_var('system_mail_to', '')
|
||||
|
||||
varname = 'alias_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
for i in range(0, 5):
|
||||
dico.move('adresse_ip_vlan_eth{0}'.format(i), 'vlan_ip_eth{0}'.format(i))
|
||||
dico.move('adresse_netmask_vlan_eth{0}'.format(i), 'vlan_netmask_eth{0}'.format(i))
|
||||
dico.move('adresse_network_vlan_eth{0}'.format(i), 'vlan_network_eth{0}'.format(i))
|
||||
dico.move('adresse_broadcast_vlan_eth{0}'.format(i), 'vlan_broadcast_eth{0}'.format(i))
|
||||
dico.move('adresse_gw_vlan_eth{0}'.format(i), 'vlan_gw_eth{0}'.format(i))
|
||||
dico.move('id_vlan_eth{0}'.format(i), 'vlan_id_eth{0}'.format(i))
|
||||
|
||||
varname = 'vlan_gw_eth0'
|
||||
var = dico.get_val(varname, '[]')
|
||||
if var != '[]' and eval(var)[0] == 'aucun':
|
||||
dico.fill_var(varname, '')
|
||||
|
||||
dico.move('proxy_eth0_adresse', 'proxy_eth0_ip')
|
||||
dico.move('proxy_eth0_network', 'proxy_eth0_network')
|
||||
dico.move('nom_interface1', 'nom_zone_eth1')
|
||||
dico.move('era_proxy_bypass', 'era_proxy_bypass_eth1')
|
||||
dico.move('smb_adresse_ip_wins', 'smb_wins_server')
|
||||
|
||||
dico.remove('http_port')
|
||||
dico.remove('http_port_2')
|
||||
dico.remove('test_nutmaster')
|
||||
dico.remove('test_activer_routage_ipv6')
|
||||
dico.remove('test_activer_kerberos')
|
||||
dico.remove('test_activer_clam_proxy')
|
||||
dico.remove('test_activer_proxy_eth0')
|
||||
dico.remove('revprox_poshadmin')
|
||||
dico.remove('ip_client_logs_udp')
|
||||
dico.remove('adresse_ip_conteneur_dns')
|
||||
|
||||
dico.simple2multi(['test_distant_domaine1', 'test_distant_domaine2'], 'test_distant_domaine')
|
||||
dico.remove('test_distant_domaine1')
|
||||
dico.remove('test_distant_domaine2')
|
||||
dico.simple2multi(['ssl_subjectaltname_ip', 'ssl_subjectaltname_ns'], 'ssl_subjectaltname')
|
||||
dico.remove('ssl_subjectaltname_ip')
|
||||
dico.remove('ssl_subjectaltname_ns')
|
||||
|
||||
old_serveur_maj = eval(dico.get_val('serveur_maj', '[]'))
|
||||
if old_serveur_maj != []:
|
||||
serveur_maj = []
|
||||
for maj in old_serveur_maj:
|
||||
if maj == 'eoleng.ac-dijon.fr':
|
||||
maj = 'eole.ac-dijon.fr'
|
||||
if maj == 'test-eoleng.ac-dijon.fr':
|
||||
maj = 'test-eole.ac-dijon.fr'
|
||||
serveur_maj.append(maj)
|
||||
dico.fill_var('serveur_maj', serveur_maj)
|
||||
|
||||
ssl_country_name = eval(dico.get_val('ssl_country_name', '[""]'))[0].upper()
|
||||
dico.fill_var('ssl_country_name', ssl_country_name)
|
||||
|
||||
tmp_short_name = []
|
||||
tmp_long_name = []
|
||||
tmp_ip = []
|
||||
nom_domaine_local = eval(dico.get_val('nom_domaine_local', "['']"))[0]
|
||||
def _append_tmp_name(ip, long_name, short_name="NONE"):
|
||||
splitted_labels = long_name.split('.')
|
||||
if short_name == "NONE":
|
||||
short_name = splitted_labels[0]
|
||||
# ajout si non déjà défini dans Réseau avancé
|
||||
if long_name not in tmp_long_name:
|
||||
if short_name not in tmp_short_name:
|
||||
#le nom court n'existe pas dans la liste, donc l'ajoute
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
if '.'.join(splitted_labels[1:]) == nom_domaine_local:
|
||||
# le nom court est déjà présent
|
||||
# privilégie le nom court pour le nom de domaine local
|
||||
tmp_short_name[tmp_short_name.index(short_name)] = None
|
||||
tmp_short_name.append(short_name)
|
||||
else:
|
||||
# ne pas doublonner le nom court
|
||||
tmp_short_name.append(None)
|
||||
if len(splitted_labels) > 1:
|
||||
tmp_long_name.append(long_name)
|
||||
else:
|
||||
# si nom court, transforme en nom long
|
||||
tmp_long_name.append(long_name + '.' + nom_domaine_local)
|
||||
tmp_ip.append(ip)
|
||||
|
||||
if eval(dico.get_val('activer_ajout_hosts', '["non"]'))[0] == 'oui':
|
||||
# récupération et passage en minuscules des
|
||||
# nom_court_hosts et nom_long_hosts existants #11473
|
||||
ips = eval(dico.get_val('adresse_ip_hosts', '[]').lower())
|
||||
long_names = eval(dico.get_val('nom_long_hosts', '[]').lower())
|
||||
for idx, short_name in enumerate(eval(dico.get_val('nom_court_hosts', '[]').lower())):
|
||||
_append_tmp_name(ips[idx], long_names[idx], short_name)
|
||||
|
||||
# Migration des variables hosts #2795
|
||||
# noms d'hôtes forcés en minuscules #9790
|
||||
nom_host_dns = eval(dico.get_val('nom_host_dns', '[]').lower())
|
||||
if not is_empty(nom_host_dns):
|
||||
ips = eval(dico.get_val('ip_host_dns'))
|
||||
# transforme les nom_host_dns en nom_court_hosts et nom_long_hosts
|
||||
# donc force activer_ajout_hosts à oui
|
||||
dico.fill_var('activer_ajout_hosts', 'oui')
|
||||
for idx, long_name in enumerate(nom_host_dns):
|
||||
_append_tmp_name(ips[idx], long_name)
|
||||
|
||||
if not is_empty(tmp_short_name):
|
||||
dico.fill_var('adresse_ip_hosts', tmp_ip)
|
||||
dico.fill_var('nom_court_hosts', tmp_short_name)
|
||||
dico.fill_var('nom_long_hosts', tmp_long_name)
|
||||
dico.remove('nom_host_dns')
|
||||
dico.remove('ip_host_dns')
|
||||
|
||||
# Ajout du point devant chaque zone #7008
|
||||
old_nom_zone_dns_cache = eval(dico.get_val('nom_zone_dns_cache', '[]'))
|
||||
if not is_empty(old_nom_zone_dns_cache):
|
||||
nom_zone_dns_cache = []
|
||||
for old in old_nom_zone_dns_cache:
|
||||
nom_zone_dns_cache.append('.' + old)
|
||||
dico.fill_var('nom_zone_dns_cache', nom_zone_dns_cache)
|
||||
|
||||
# Modification du chemin de la corbeille Samba #7463
|
||||
smb_trash_dir = eval(dico.get_val('smb_trash_dir', '["/"]'))[0]
|
||||
if not smb_trash_dir.startswith('/'):
|
||||
dico.fill_var('smb_trash_dir', 'perso/{0}'.format(smb_trash_dir))
|
||||
|
||||
# antivirus temps réel => remis à default #19833
|
||||
if dico.get_val('smb_vscan', "['non']") == "['oui']":
|
||||
dico.remove('smb_vscan')
|
||||
|
||||
# Famille Proxy parent #7823
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere', '[]'))):
|
||||
dico.fill_var('activer_cache_pere', 'oui')
|
||||
if not is_empty(eval(dico.get_val('nom_cache_pere_zone', '[]'))):
|
||||
dico.fill_var('activer_cache_pere_zone', 'oui')
|
||||
if not is_empty(eval(dico.get_val('proxy_sibling_ip', '[]'))):
|
||||
dico.fill_var('activer_proxy_sibling', 'oui')
|
||||
|
||||
# Autorisation proxy eth0 #8167
|
||||
if not is_empty(eval(dico.get_val('proxy_eth0_ip', '[]'))):
|
||||
dico.fill_var('activer_supp_proxy_eth0', 'oui')
|
||||
|
||||
# Famille Rvp #8164
|
||||
if not is_empty(eval(dico.get_val('adresse_network_zone_rvp', '[]'))):
|
||||
dico.fill_var('acces_proxy_zone_rvp', 'oui')
|
||||
|
||||
# half_closed_clients => remise à default #19813
|
||||
if dico.get_val('half_closed_clients', "['off']") == "['on']":
|
||||
dico.remove('half_closed_clients')
|
||||
|
||||
##
|
||||
## Modification de la configuration exim
|
||||
##
|
||||
# passerelle SMTP
|
||||
log.info(_(u"Migrating SMTP parameters"))
|
||||
passerelle_smtp = dico.get_val('passerelle_smtp', '[]')
|
||||
dico.move('passerelle_smtp', 'exim_relay_smtp')
|
||||
if is_empty(passerelle_smtp):
|
||||
# No SMTP gateway
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
|
||||
# Type de serveur SMTP
|
||||
exim_mail_type = eval(dico.get_val('exim_mail_type', '["satellite"]'))[0]
|
||||
log.info("Migration de exim_mail_type: '{0}'".format(exim_mail_type))
|
||||
dico.remove('exim_mail_type')
|
||||
if exim_mail_type == 'satellite':
|
||||
# Nothing to do
|
||||
pass
|
||||
elif exim_mail_type == 'local':
|
||||
# Local is smarthost without relay, should not happen
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'smarthost':
|
||||
dico.fill_var('exim_smarthost', u'oui')
|
||||
elif exim_mail_type == 'mailhub':
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
elif exim_mail_type == 'internet':
|
||||
dico.fill_var('activer_exim_relay_smtp', u'non')
|
||||
dico.fill_var('exim_relay', u'oui')
|
||||
dico.fill_var('exim_relay_manual_routes', u'oui')
|
||||
else:
|
||||
log.warn(_(u'Mail configuration not recognised, not processed'))
|
||||
|
||||
# Réécriture
|
||||
mail_rewrite_domain = eval(dico.get_val('mail_rewrite_domain', '["non"]'))[0]
|
||||
dico.remove('mail_rewrite_domain')
|
||||
if mail_rewrite_domain == 'oui':
|
||||
dico.fill_var('exim_qualify_domain', 'nom de domaine local')
|
||||
|
||||
# Modèle Era utilisé (#9082)
|
||||
mapping = {'2zones-amonecole-nginx' : u'2zones-amonecole',
|
||||
'3zones-scribe-nginx' : u'3zones-dmz',
|
||||
'3zones-scribe' : u'3zones-dmz',
|
||||
'4zones-scribe-nginx' : u'4zones',
|
||||
'4zones-scribe-nufw' : u'4zones',
|
||||
'4zones-scribe' : u'4zones',
|
||||
'5zones-scribe-nginx' : u'5zones',
|
||||
'5zones-scribe' : u'5zones',
|
||||
}
|
||||
model = eval(dico.get_val('type_amon', '[""]'))[0]
|
||||
if model in mapping:
|
||||
dico.fill_var('type_amon', mapping[model])
|
||||
|
||||
# Migration des modules ecdl
|
||||
if dico.get_val('ecdl_regles_filtrage_supplementaires', 'Pas un eCDL') != 'Pas un eCDL':
|
||||
dico.move('ecdl_ldap_machine_suffix', 'ldap_machine_suffix')
|
||||
dico.move('ecdl_ldap_group_suffix', 'ldap_group_suffix')
|
||||
dico.move('ecdl_smb_share_model', 'smb_share_model')
|
||||
dico.move('ecdl_smb_vscan', 'smb_vscan')
|
||||
dico.move('ecdl_smb_ports', 'smb_ports')
|
||||
dico.move('ecdl_smb_server_string', 'smb_server_string')
|
||||
dico.move('ecdl_smb_trash', 'smb_trash')
|
||||
dico.move('ecdl_smb_trash_dir', 'smb_trash_dir')
|
||||
dico.move('ecdl_smb_trash_purge', 'smb_trash_purge')
|
||||
dico.move('ecdl_smb_quotawarn' , 'smb_quotawarn')
|
||||
dico.move('ecdl_smb_guest', 'smb_guest')
|
||||
dico.move('ecdl_smb_wins_support', 'smb_wins_support')
|
||||
dico.move('ecdl_smb_adresse_ip_wins', 'smb_wins_server')
|
||||
dico.move('ecdl_smb_dns_proxy', 'smb_dns_proxy')
|
||||
dico.move('ecdl_smb_oplocks', 'smb_oplocks')
|
||||
dico.move('ecdl_smb_dos_attributes', 'smb_dos_attributes')
|
||||
dico.move('ecdl_smb_unixextensions', 'smb_unixextensions')
|
||||
dico.move('ecdl_smb_partage_nom', 'smb_partage_nom')
|
||||
dico.move('ecdl_smb_partage_path', 'smb_partage_path')
|
||||
dico.move('ecdl_smb_partage_visibilite', 'smb_partage_visibilite')
|
||||
dico.move('ecdl_smb_partage_ecriture', 'smb_partage_ecriture')
|
||||
dico.move('ecdl_regles_filtrage_supplementaires', 'activer_regles_filtrage_port_source')
|
||||
dico.move('ecdl_smb_os_level', 'smb_os_level')
|
||||
dico.move('ecdl_smb_domain_master', 'smb_domain_master')
|
||||
dico.move('ecdl_ca_cert', 'ldap_ca_cert')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_passwd', 'ldap_nss_base_passwd_filter')
|
||||
dico.move('meddtl_suffixe_ldap_nss_base_group', 'ldap_nss_base_group_filter')
|
||||
dico.move('ecdl_ldap_timeout', 'ldap_timeout')
|
||||
dico.move('ecdl_smb_netbios_name', 'smb_netbios_name')
|
||||
dico.move('ecdl_smb_workgroup', 'smb_workgroup')
|
||||
dico.move('ecdl_smb_usershare_max_shares', 'smb_usershare_max_shares')
|
||||
dico.move('ecdl_smb_activer_partages', 'smb_activer_partages')
|
||||
dico.remove('ecdl_smb_log_level')
|
||||
# fin de migration des modules ecdl
|
||||
|
||||
# migration des modules esbl
|
||||
if dico.get_val('activer_lister_repertoires_apache', 'Pas un eSBL') != 'Pas un eSBL':
|
||||
dico.fill_var('smb_log_level', 0)
|
||||
smb_activer_ordre_resolution_nom = dico.get_val('smb_activer_ordre_resolution_nom', 'non')
|
||||
if smb_activer_ordre_resolution_nom == 'oui':
|
||||
smb_name_resolve_order = " ".join(eval(dico.get_val('smb_procede_recherche_nom')))
|
||||
dico.fill_var('smb_name_resolve_order', smb_name_resolve_order)
|
||||
smb_ad_nom_long_controleur = dico.get_val('smb_ad_nom_long_controleur', "['']")
|
||||
if smb_ad_nom_long_controleur != "['']":
|
||||
dico.fill_var('smb_ad_server', smb_ad_nom_long_controleur)
|
||||
smb_ad_realm = dico.get_val('smb_ad_realm', "['']")
|
||||
if smb_ad_realm != "['']":
|
||||
dico.fill_var('smb_realm', smb_ad_realm)
|
||||
dico.move('activer_lister_repertoires_apache', 'apache_lister_repertoires')
|
||||
|
||||
# répartition des variables pour les répertoires ftp
|
||||
ftps = {}
|
||||
for ftp_rep, ftp_anon in zip(eval(dico.get_val('acces_ftp', '[]')),
|
||||
eval(dico.get_val('acces_ftp_anonymous', '[]'))):
|
||||
ftps[ftp_anon] = ftps.get(ftp_anon, []) + [ftp_rep]
|
||||
# si len(ftps['oui']) > 1, pas de reprise automatique
|
||||
# sinon ftps['oui'] -> ftp_anonymous_directory
|
||||
# ftps['non'] -> ftp_access_directory
|
||||
|
||||
if 'oui' in ftps and len(ftps['oui']) == 1:
|
||||
dico.fill_var('ftp_anonymous_directory', ftps['oui'][0])
|
||||
dico.fill_var('activer_ftp_anonymous_access', 'oui')
|
||||
if 'non' in ftps:
|
||||
dico.fill_var('ftp_access_directory', ftps['non'])
|
||||
dico.fill_var('activer_ftp_access', 'oui')
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
|
||||
dico.move('activer_pare_feu', 'activer_firewall')
|
||||
# fin de migration des modules esbl
|
||||
|
||||
# migration des modules essl
|
||||
if dico.get_val('ecdl_serveurs_ip', "Pas un eSSL") != "Pas un eSSL":
|
||||
# variables ftp_max*
|
||||
ftp_maxretrievefilesize = dico.get_val('ftp_maxretrievefilesize', '')
|
||||
if ftp_maxretrievefilesize != '':
|
||||
ftp_maxretrievefilesize = re.search(r'[0-9]+', ftp_maxretrievefilesize).group()
|
||||
dico.fill_var('ftp_maxretrievefilesize', ftp_maxretrievefilesize)
|
||||
ftp_maxstorefilesize = dico.get_val('ftp_maxstorefilesize', '')
|
||||
if ftp_maxstorefilesize != '':
|
||||
ftp_maxstorefilesize = re.search(r'[0-9]+', ftp_maxstorefilesize).group()
|
||||
dico.fill_var('ftp_maxstorefilesize', ftp_maxstorefilesize)
|
||||
# variables renommées
|
||||
dico.move('sites_distants_morea_ip', 'sites_distants_ip')
|
||||
dico.move('sites_distants_morea_netmask', 'sites_distants_netmask')
|
||||
dico.move('nagios_morea_ip', 'nagios_dist_ip')
|
||||
dico.move('nagios_morea_netmask', 'nagios_dist_netmask')
|
||||
dico.move('morea_routeur_ip', 'wan_routeur_ip')
|
||||
dico.move('morea_interface', 'wan_interface')
|
||||
dico.move('surf_lan_ip', 'sites_dist_ip')
|
||||
dico.move('surf_lan_netmask', 'sites_dist_netmask')
|
||||
dico.move('morea_route_adresse', 'wan_route_adresse')
|
||||
dico.move('morea_route_netmask', 'wan_route_netmask')
|
||||
# conversions de valeurs
|
||||
variante_type_mapping = {'standard': 'production',
|
||||
'Applis Web': 'Applis_Web',
|
||||
'eSSL Morea': 'eSSL',
|
||||
'eSSL Internet': 'eSSL_Internet',
|
||||
'eSSL SPC': 'eSSL_SPC',
|
||||
'ppp': 'PPP',
|
||||
'': 'production'}
|
||||
variante_type = eval(dico.get_val('variante_type', "['']"))[0]
|
||||
dico.fill_var('variante_type', variante_type_mapping[variante_type])
|
||||
|
||||
# migration des variables dhcp
|
||||
exxl_dhcp = dico.has_section('dhcp_lease_max')
|
||||
if dico.get_val('activer_dhcp', "['non']") == "['oui']" and exxl_dhcp:
|
||||
# récupération des valeurs de la multi
|
||||
ip_basse = eval(dico.get_val('ip_basse_dhcp', '[""]'))
|
||||
ip_haute = eval(dico.get_val('ip_haute_dhcp', '[""]'))
|
||||
restriction = eval(dico.get_val('activer_dhcp_hotes_autorises', "['']"))
|
||||
lease_default = eval(dico.get_val('dhcp_lease_default', "['']"))
|
||||
lease_max = eval(dico.get_val('dhcp_lease_max', "['']"))
|
||||
# récupération des valeurs communes simples
|
||||
network = [eval(dico.get_val('adresse_network_dhcp', "['']"))[0]]
|
||||
netmask = [eval(dico.get_val('adresse_netmask_dhcp', "['']"))[0]]
|
||||
nom_domaine_dhcp = [eval(dico.get_val('nom_domaine_dhcp', "['']"))[0]]
|
||||
gateway_dhcp = [eval(dico.get_val('adresse_ip_gw_dhcp', "['']"))[0]]
|
||||
# récupération des valeurs communes multiples
|
||||
dns_dhcp = eval(dico.get_val('adresse_ip_dns_dhcp', "['']"))
|
||||
wins = eval(dico.get_val('adresse_ip_wins_dhcp', "['']"))
|
||||
wins_primaire = wins[0]
|
||||
if len(wins) > 1:
|
||||
wins_secondaire = wins[1]
|
||||
else:
|
||||
wins_secondaire = wins_primaire
|
||||
ntp_dhcp = eval(dico.get_val('adresse_ip_ntp_dhcp', "['']"))
|
||||
# création des nouvelles listes, produit cartésien
|
||||
ranges, dns_dhcp, ntp_dhcp = zip(*list(product(zip(ip_basse, ip_haute, restriction, lease_default, lease_max), dns_dhcp, ntp_dhcp)))
|
||||
dns_dhcp = list(dns_dhcp)
|
||||
ntp_dhcp = list(ntp_dhcp)
|
||||
ip_basse, ip_haute, restriction, lease_default, lease_max = [list(l) for l in zip(*ranges)]
|
||||
nb_ranges = len(ip_basse)
|
||||
nom_domaine_dhcp = nom_domaine_dhcp*nb_ranges
|
||||
gateway_dhcp = gateway_dhcp*nb_ranges
|
||||
wins_primaire = [wins_primaire]*nb_ranges
|
||||
wins_secondaire = [wins_secondaire]*nb_ranges
|
||||
network = network*nb_ranges
|
||||
netmask = netmask*nb_ranges
|
||||
# chargement des valeurs dans le dictionnaire
|
||||
dico.fill_var('adresse_network_dhcp', network)
|
||||
dico.fill_var('adresse_netmask_dhcp',netmask)
|
||||
dico.fill_var('ip_basse_dhcp', ip_basse)
|
||||
dico.fill_var('ip_haute_dhcp', ip_haute)
|
||||
dico.fill_var('nom_domaine_dhcp', nom_domaine_dhcp)
|
||||
dico.fill_var('adresse_ip_gw_dhcp', gateway_dhcp)
|
||||
dico.fill_var('adresse_ip_dns_dhcp', dns_dhcp)
|
||||
dico.fill_var('adresse_ip_wins_primaire_dhcp', wins_primaire)
|
||||
dico.fill_var('adresse_ip_wins_secondaire_dhcp', wins_secondaire)
|
||||
dico.fill_var('adresse_ip_ntp_dhcp', ntp_dhcp)
|
||||
dico.fill_var('interdire_hotes_inconnus', restriction)
|
||||
dico.fill_var('dhcp_lease_default', lease_default)
|
||||
dico.fill_var('dhcp_lease_max', lease_max)
|
||||
|
||||
#envole
|
||||
if dico.get_val('activer_envole', "['non']") == "['oui']" and dico.get_val('force_envole', "['non']") == "['oui']":
|
||||
alias_envole = eval(dico.get_val('alias_envole'))[0]
|
||||
if alias_envole != '/':
|
||||
dico.fill_var('web_redirection', alias_envole)
|
||||
dico.remove('alias_envole')
|
||||
|
||||
def get_version(dico):
|
||||
"""
|
||||
recupère la version en fonction de la présence ou non
|
||||
de la variable 'serveur_maj2'
|
||||
|
||||
:param dico: ConfigParser object
|
||||
:return version: '2.2' ou '2.3'
|
||||
"""
|
||||
# ________ version du config.eol ________
|
||||
|
||||
if dico.has_section('serveur_maj2') and not \
|
||||
dico.has_section('activer_bash_completion'):
|
||||
version = '2.2'
|
||||
else:
|
||||
version = '2.3'
|
||||
return version
|
||||
|
||||
|
||||
def main(config_file):
|
||||
"""main entry point"""
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print __doc__
|
||||
sys.exit(1)
|
||||
main(sys.argv[1])
|
|
@ -1,735 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding: utf-8 -*-
|
||||
"""
|
||||
|
||||
Utilitaire de mise à jour des variables
|
||||
pour les versions >= 2.4.1
|
||||
|
||||
"""
|
||||
from .upgrade import log, migration_23_to_tiramisu
|
||||
from .var_loader import convert_value
|
||||
from pyeole.i18n import i18n
|
||||
from tiramisu.setting import owners
|
||||
from tiramisu.setting import undefined
|
||||
from distutils.version import StrictVersion
|
||||
from pyeole.encode import normalize
|
||||
_ = i18n('creole')
|
||||
|
||||
class Upgrade():
|
||||
"""
|
||||
Méthodes pour la mise à niveau des variables
|
||||
"""
|
||||
def __init__(self, config):
|
||||
owner = u'upgrade'
|
||||
if owner not in dir(owners):
|
||||
owners.addowner(owner)
|
||||
self.config = config
|
||||
self.owner = getattr(owners, owner)
|
||||
self.unknown_options = config.impl_get_information(u'unknown_options')
|
||||
|
||||
def get_old_value(self, variable, old_variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "disparue"
|
||||
"""
|
||||
try:
|
||||
old_obj = self.unknown_options[old_variable]
|
||||
if old_obj.get('old_format', False):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
opt = self.config.unwrap_from_path(path)
|
||||
val = migration_23_to_tiramisu(opt, old_obj['val'])
|
||||
else:
|
||||
val = old_obj['val']
|
||||
return val
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def get_value(self, variable, default=None):
|
||||
"""
|
||||
Retourne la valeur d'une variable "connue"
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
return self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
|
||||
def get_unvalid_value(self, variable, default=None):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
try:
|
||||
return self.config.impl_get_information('orig_value_{}'.format(path))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_noncalculated_value_for_auto(self, variable):
|
||||
"""
|
||||
Retourne la valeur contenue dans le fichier config.eol dans le cas où la variable
|
||||
est calculée (auto), forcé à la valeur par défaut, ...
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('get_noncalculated_value_for_auto: unknown variable {}').format(variable))
|
||||
return None
|
||||
values = self.config.cfgimpl_get_values()
|
||||
if values._contains(path):
|
||||
idx = 0
|
||||
vals = []
|
||||
while True:
|
||||
val = values._p_.getvalue(path, values._p_.getsession(), idx)
|
||||
if val is undefined:
|
||||
break
|
||||
vals.append(val)
|
||||
idx += 1
|
||||
if len(vals) > 0:
|
||||
return vals
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def var_exists(self, variable):
|
||||
try:
|
||||
self.get_path(variable)
|
||||
return True
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def get_path(self, variable):
|
||||
"""
|
||||
Retourne le chemin complet d'une variable
|
||||
"""
|
||||
return self.config.find_first(byname=variable, type_='path')
|
||||
|
||||
def modify_owner(self, path, value=None, index=None):
|
||||
"""
|
||||
Modifie le propriétaire d'une variable
|
||||
"""
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if option.impl_is_master_slaves('slave'):
|
||||
if index is not None:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=index)
|
||||
elif value is not None:
|
||||
for idx in xrange(len(value)):
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner,
|
||||
index=idx)
|
||||
else:
|
||||
raise Exception('must have value or index for slave')
|
||||
|
||||
else:
|
||||
self.config.cfgimpl_get_values().setowner(option,
|
||||
self.owner)
|
||||
|
||||
def is_default(self, variable, default=True):
|
||||
"""
|
||||
Retourne True si la valeur n'a pas été personnalisée par l'utilisateur
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
return default
|
||||
option = self.config.unwrap_from_path(path)
|
||||
return self.config.cfgimpl_get_values().is_default_owner(option)
|
||||
|
||||
def set_value(self, variable, value):
|
||||
"""
|
||||
Modifie la valeur d'une variable
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_(u"Try to set value to unknown option: {0} = {1}").format(variable, value))
|
||||
else:
|
||||
try:
|
||||
self.config._setattr(path, value,
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
self.config.impl_del_information('error_msg_{}'.format(path), raises=False)
|
||||
self.config.impl_del_information('orig_value_{}'.format(path), raises=False)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_settings()[option].remove('load_error')
|
||||
except ValueError:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
try:
|
||||
# the value could be in Creole 2.3 format #13957
|
||||
if not option.impl_is_multi() and isinstance(value, list) and len(value) == 1:
|
||||
value = value[0]
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, variable))
|
||||
return
|
||||
self.config._setattr(path, convert_value(option, value),
|
||||
force_permissive=True)
|
||||
self.modify_owner(path, value)
|
||||
log.info(_(u"Variable updated: {0} = {1}").format(variable, value))
|
||||
except Exception, err:
|
||||
log.error(_(u"{0} for {1}").format(err, variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
except Exception, err:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
log.error(_("{0} for {1}").format(normalize(str(err)), variable))
|
||||
self.config.cfgimpl_get_settings()[option].append('load_error')
|
||||
|
||||
def del_value(self, variable):
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to delete an unknown option: {0}').format(variable))
|
||||
else:
|
||||
option = self.config.unwrap_from_path(path)
|
||||
self.config.cfgimpl_get_values().__delitem__(option)
|
||||
log.info(_(u"Variable {0} reinitialized").format(variable))
|
||||
|
||||
def append_value(self, variable, value):
|
||||
"""
|
||||
Ajoute une valeur à une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to append a value to an unknown option: {0} += {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi.append(value)
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def modify_last_value(self, variable, value):
|
||||
"""
|
||||
Modifie la dernière valeur d'une variable multi
|
||||
"""
|
||||
try:
|
||||
path = self.get_path(variable)
|
||||
except AttributeError:
|
||||
log.error(_('Try to modify last value of an unknown option: {0}[-1] = {1}').format(variable, value))
|
||||
else:
|
||||
multi = self.config.getattr(path,
|
||||
force_permissive=True)
|
||||
multi[-1] = value
|
||||
self.modify_owner(path, index=len(multi) - 1)
|
||||
|
||||
def move(self, old_variable, new_variable):
|
||||
"""
|
||||
Déplace les données d'une variable "disparue"
|
||||
vers une nouvelle variable
|
||||
"""
|
||||
if old_variable in self.unknown_options:
|
||||
value = self.unknown_options[old_variable][u'val']
|
||||
path = self.get_path(new_variable)
|
||||
option = self.config.unwrap_from_path(path)
|
||||
if value in ['', ['']]:
|
||||
err_msg = _(u"empty value")
|
||||
log.error(_(u"{0} for {1}").format(err_msg, old_variable))
|
||||
return
|
||||
if option.impl_is_multi() and isinstance(value, list):
|
||||
for val in value:
|
||||
self.append_value(new_variable, val)
|
||||
else:
|
||||
self.set_value(new_variable, value)
|
||||
del(self.unknown_options[old_variable])
|
||||
log.info(_(u"Variable {0} has been renamed to {1}").format(old_variable, new_variable))
|
||||
|
||||
def copy(self, old_variable, new_variable, only_if_modified=True):
|
||||
"""
|
||||
Copie la valeur d'une variable existante vers une autre
|
||||
Si la valeur "old" est une multi et pas la "new" => copie la 1er valeur de la liste
|
||||
Si la valeur "old" n'est pas une multi et la "new" ne l'est pas => transforme la valeur en liste
|
||||
only_if_modified: si True ne copie que les valeurs qui sont modifiées
|
||||
"""
|
||||
try:
|
||||
# si les deux variables existe => migration
|
||||
old_path = self.get_path(old_variable)
|
||||
new_path = self.get_path(new_variable)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
old_option = self.config.unwrap_from_path(old_path)
|
||||
new_option = self.config.unwrap_from_path(new_path)
|
||||
# si la nouvelle option n'est pas modifié et si la valeur est modifié ou only_if_modified est False
|
||||
if self.config.cfgimpl_get_values().is_default_owner(new_option) and \
|
||||
(not only_if_modified or
|
||||
not self.config.cfgimpl_get_values().is_default_owner(old_option)):
|
||||
old_value = self.config.getattr(old_path,
|
||||
force_permissive=True)
|
||||
if old_option.impl_is_multi() and not new_option.impl_is_multi():
|
||||
if len(old_value) != 0:
|
||||
old_value = old_value[0]
|
||||
else:
|
||||
old_value = None
|
||||
if not old_option.impl_is_multi() and new_option.impl_is_multi():
|
||||
if old_value is None:
|
||||
old_value = []
|
||||
else:
|
||||
old_value = [old_value]
|
||||
self.set_value(new_variable, old_value)
|
||||
|
||||
|
||||
|
||||
class Upgrade_2_4_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.0 vers 2.4.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.0', '2.4.1'))
|
||||
|
||||
# renommage des variables "era_proxy_bypass"
|
||||
for i in range(1, 5):
|
||||
self.move('era_proxy_bypass_eth{0}'.format(i), 'proxy_bypass_network_eth{0}'.format(i))
|
||||
|
||||
# fusion des variables "proxy_bypass" et "wpad_exclude"
|
||||
if 'adresse_ip_wpad_exclude' in self.unknown_options:
|
||||
#le 1er argument sert a récupérer les propriétés des option (choiceoption, multi, ...)
|
||||
#on lui passe la variable de la 1er interface
|
||||
old_interfaces = self.get_old_value('proxy_bypass_network_eth1', 'interface_wpad_exclude')
|
||||
netmasks = self.get_old_value('proxy_bypass_netmask_eth1', 'adresse_netmask_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_network_eth1', 'adresse_ip_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_network_eth{0}'.format(i), value)
|
||||
self.modify_last_value('proxy_bypass_netmask_eth{0}'.format(i), netmasks[idx])
|
||||
del(self.unknown_options['adresse_ip_wpad_exclude'])
|
||||
del(self.unknown_options['adresse_netmask_wpad_exclude'])
|
||||
del(self.unknown_options['interface_wpad_exclude'])
|
||||
|
||||
# passage à oui des variables "proxy_bypass_ethX" si nécessaire
|
||||
for i in range(1, 5):
|
||||
if len(self.get_value('proxy_bypass_network_eth{0}'.format(i), [])) > 0:
|
||||
self.set_value('proxy_bypass_eth{0}'.format(i), u'oui')
|
||||
|
||||
# transfert des variables nom_domaine_wpad_exclude
|
||||
if 'nom_domaine_wpad_exclude' in self.unknown_options:
|
||||
old_interfaces = self.get_old_value('proxy_bypass_domain_eth1', 'nom_interface_wpad_exclude')
|
||||
for idx, value in enumerate(self.get_old_value('proxy_bypass_domain_eth1', 'nom_domaine_wpad_exclude')):
|
||||
interface = old_interfaces[idx]
|
||||
if interface == 'Toutes':
|
||||
interfaces = range(1, 5)
|
||||
elif int(interface) in range(1, 5):
|
||||
interfaces = [interface]
|
||||
else:
|
||||
log.error(_(u"Invalid value : {0} in old variable {1}").format(interface, 'nom_interface_wpad_exclude'))
|
||||
continue
|
||||
for i in interfaces:
|
||||
self.append_value('proxy_bypass_domain_eth{0}'.format(i), value)
|
||||
del(self.unknown_options['nom_domaine_wpad_exclude'])
|
||||
del(self.unknown_options['nom_interface_wpad_exclude'])
|
||||
|
||||
# nom_serveur_scribe_dmz/ip_serveur_scribe_dmz => mandatory (#11713)
|
||||
if self.get_value('install_scribe_dmz') == u'oui':
|
||||
if self.get_value('nom_serveur_scribe_dmz') == None or self.get_value('ip_serveur_scribe_dmz') == None:
|
||||
self.set_value('install_scribe_dmz', u'non')
|
||||
|
||||
|
||||
class Upgrade_2_4_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.1 vers 2.4.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.1', '2.4.2'))
|
||||
# migration des variables eolesso vers client LDAP #10821
|
||||
self.copy('eolesso_port_ldap', 'ldap_port')
|
||||
self.copy('eolesso_ldap_reader', 'ldap_reader')
|
||||
self.copy('eolesso_ldap_reader_passfile', 'ldap_reader_passfile')
|
||||
self.copy('eolesso_ldap_match_attribute', 'ldap_match_attribute')
|
||||
self.copy('eolesso_ldap_filter_user', 'ldap_filter_user')
|
||||
self.copy('eolesso_ldap_filter_group', 'ldap_filter_group')
|
||||
self.copy('eolesso_ldap_dntree_user', 'ldap_dntree_user')
|
||||
self.copy('eolesso_ldap_dntree_group', 'ldap_dntree_group')
|
||||
self.copy('eolesso_ldap_fill_displayname', 'ldap_fill_displayname')
|
||||
self.copy('eolesso_ldap_fill_mail', 'ldap_fill_mail')
|
||||
self.copy('eolesso_ldap_fill_fonction', 'ldap_fill_fonction')
|
||||
self.copy('eolesso_ldap_fill_categorie', 'ldap_fill_categorie')
|
||||
self.copy('eolesso_ldap_fill_rne', 'ldap_fill_rne')
|
||||
self.copy('eolesso_ldap_fill_fredurne', 'ldap_fill_fredurne')
|
||||
self.copy('eolesso_ldap_fill_displaygroup', 'ldap_fill_displaygroup')
|
||||
|
||||
# migration des variables courier #10987
|
||||
courier_val = self.get_old_value('activer_recuperation_courriel', 'activer_courier')
|
||||
if courier_val is not None:
|
||||
if courier_val == 'non':
|
||||
self.set_value('activer_recuperation_courriel', 'non')
|
||||
elif not 'imap' in courier_val:
|
||||
self.set_value('activer_courier_imap', 'non')
|
||||
if 'pop' in courier_val:
|
||||
self.set_value('activer_courier_pop', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_5_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.4.X vers 2.5.0
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.4.X', '2.5.0'))
|
||||
|
||||
# migration des variables nut #11608
|
||||
monitor = self.get_value('nut_monitor_user')
|
||||
if monitor != []:
|
||||
self.set_value('nut_monitor', 'oui')
|
||||
|
||||
# migration des variables postgresql pour Zéphir #11974
|
||||
old_pg_shared_buffers = self.get_value('pg_shared_buffers')
|
||||
if old_pg_shared_buffers is not None:
|
||||
if int(old_pg_shared_buffers) == 3072:
|
||||
self.del_value('pg_shared_buffers')
|
||||
else:
|
||||
self.set_value('pg_shared_buffers_unit', u'kB')
|
||||
self.del_value('pg_effective_cache_size')
|
||||
|
||||
|
||||
class Upgrade_2_5_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.0 vers 2.5.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.0', '2.5.1'))
|
||||
|
||||
# migration des variables zone_forward (#11922)
|
||||
zone_forward = self.get_value('nom_zone_forward', [])
|
||||
if zone_forward != []:
|
||||
self.set_value('activer_zone_forward', 'oui')
|
||||
|
||||
# passage de bacula à bareos (#12425)
|
||||
for var in ['activer_bareos_dir', 'activer_bareos_sd',
|
||||
'bareos_dir_name', 'bareos_full_retention',
|
||||
'bareos_full_retention_unit', 'bareos_diff_retention',
|
||||
'bareos_diff_retention_unit', 'bareos_inc_retention',
|
||||
'bareos_inc_retention_unit', 'bareos_max_run_time',
|
||||
'bareos_compression', 'bareos_dir_password',
|
||||
'bareos_fd_password', 'bareos_sd_local',
|
||||
'bareos_sd_adresse', 'bareos_sd_password',
|
||||
'bareos_sd_name', 'bareos_sd_remote_dir_name',
|
||||
'bareos_sd_remote_ip', 'bareos_sd_remote_password']:
|
||||
self.move(var.replace('bareos', 'bacula'), var)
|
||||
|
||||
if self.get_value('activer_bareos_dir') == u'oui':
|
||||
#sauvegarde déjà programmé en sqlite3, ne gère pas la migration vers mysql
|
||||
self.set_value('bareos_db_type', 'sqlite3')
|
||||
|
||||
if self.get_value('ldap_ca_cert') == '/etc/certs/CA2008.pem':
|
||||
self.set_value('ldap_ca_cert', '/etc/certs/certificat.pem')
|
||||
|
||||
|
||||
class Upgrade_2_5_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.1 vers 2.5.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.1', '2.5.2'))
|
||||
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD sphynx #14881
|
||||
if self.var_exists('activer_resource_arv'):
|
||||
activer_haute_dispo = self.get_value('activer_haute_dispo')
|
||||
if activer_haute_dispo == 'maitre':
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
service_resource_startdelay = self.get_noncalculated_value_for_auto('service_resource_startdelay')
|
||||
need_update = False
|
||||
startdelay_index = 1
|
||||
need_disabled_arv = False
|
||||
if service_resource_startdelay is not None:
|
||||
if service_resource_name is not None:
|
||||
need_update = 'arv_rsc' in service_resource_name
|
||||
if need_update:
|
||||
startdelay_index = service_resource_name.index('arv_rsc')
|
||||
need_disabled_arv = not need_update
|
||||
else:
|
||||
need_update = True
|
||||
self.del_value('service_resource_name')
|
||||
self.del_value('service_resource_script')
|
||||
self.del_value('service_resource_interval')
|
||||
self.del_value('service_resource_timeout')
|
||||
self.del_value('service_resource_startdelay')
|
||||
if need_update and service_resource_startdelay[startdelay_index] != 15:
|
||||
self.set_value('service_resource_arv_startdelay', service_resource_startdelay[startdelay_index])
|
||||
if need_disabled_arv:
|
||||
self.set_value('activer_resource_arv', u'non')
|
||||
#
|
||||
vip_resource_adresseip = self.get_noncalculated_value_for_auto('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_name')
|
||||
self.del_value('vip_resource_if')
|
||||
self.del_value('vip_resource_adresseip')
|
||||
self.del_value('vip_resource_location')
|
||||
if vip_resource_adresseip is not None:
|
||||
if len(vip_resource_adresseip) > 0:
|
||||
self.set_value('vip_externe', vip_resource_adresseip[0])
|
||||
if len(vip_resource_adresseip) > 1:
|
||||
self.set_value('vip_interne', vip_resource_adresseip[1])
|
||||
# migration HD non Sphynx #14951
|
||||
else:
|
||||
vip_resource_if = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
vip_netmask = []
|
||||
for vip_if in vip_resource_if:
|
||||
netmask_var = 'adresse_netmask_{0}'.format(vip_if.lower())
|
||||
vip_netmask.append(self.get_value(netmask_var))
|
||||
if len(vip_netmask) > 0:
|
||||
self.set_value('vip_resource_netmask', vip_netmask)
|
||||
service_resource_name = self.get_noncalculated_value_for_auto('service_resource_name')
|
||||
if len(service_resource_name) > 0:
|
||||
self.set_value('activer_service_resource', 'oui')
|
||||
|
||||
|
||||
class Upgrade_2_6_0(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.5.X vers 2.6.0
|
||||
"""
|
||||
|
||||
def get_eth_no(self, eth):
|
||||
"""
|
||||
Retourne le numéro X du nom de l'interface ethX
|
||||
"""
|
||||
try:
|
||||
return eth.split("eth")[1]
|
||||
except:
|
||||
log.error(_(u"Interface {0} name has not an 'ethX' format").format(eth))
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.5.X', '2.6.0'))
|
||||
|
||||
# migration des variables faisant référence au nom des interfaces ethX
|
||||
eth_vars = ['route_int', 'fw_rule_int', 'dhcrelay_server_interface', 'freerad_listen_int',
|
||||
'sw_force_ip_src', 'corosync_dial_if', 'dhcrelay_interfaces']
|
||||
for eth_var in eth_vars:
|
||||
eth_name = self.get_unvalid_value(eth_var)
|
||||
if isinstance(eth_name, list):
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
if eth == 'all':
|
||||
eth_no.append(eth)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif isinstance(eth_name, dict):
|
||||
eth_no = []
|
||||
for eth_key, eth_value in eth_name.items():
|
||||
if eth_value == 'all':
|
||||
eth_no.append(eth_value)
|
||||
else:
|
||||
eth_no.append(self.get_eth_no(eth_value))
|
||||
if eth_no != [] and eth_no != eth_name:
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_name is not None:
|
||||
eth_no = self.get_eth_no(eth_name)
|
||||
self.set_value(eth_var, eth_no)
|
||||
elif eth_var == 'dhcrelay_server_interface' and self.get_value('adresse_ip_dhcp_dhcrelay') is not None:
|
||||
# migration de l'ancienne valeur par défaut de dhcrelay_server_interface #18329
|
||||
self.set_value(eth_var, u'3')
|
||||
# haute dispo présente
|
||||
if self.var_exists('activer_haute_dispo'):
|
||||
# migration HD non sphynx
|
||||
if not self.var_exists('activer_resource_arv'):
|
||||
eth_name = self.get_noncalculated_value_for_auto('vip_resource_if')
|
||||
eth_no = []
|
||||
for eth in eth_name:
|
||||
eth_no.append(self.get_eth_no(eth))
|
||||
self.set_value('vip_resource_if', eth_no)
|
||||
|
||||
|
||||
class Upgrade_2_6_1(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.0 vers 2.6.1
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.0', '2.6.1'))
|
||||
|
||||
# migration des variables NTLM/SMB : multi -> non multi (#18277)
|
||||
if self.var_exists('nom_serveur_smb'):
|
||||
for varname in ('nom_serveur_smb', 'nom_domaine_smb', 'ip_serveur_smb'):
|
||||
value = self.get_unvalid_value(varname)
|
||||
if isinstance(value, list) and len(value) > 1:
|
||||
self.set_value(varname, value[0])
|
||||
|
||||
# nom_carte_ethX => multi-valuées (#18609)
|
||||
for numint in range(0, 5):
|
||||
varname = 'nom_carte_eth{}'.format(numint)
|
||||
value = self.get_unvalid_value(varname)
|
||||
if value != None:
|
||||
self.set_value(varname, [value])
|
||||
|
||||
# migration variable 'module_type' pour le module esbl ('ESBL') -> ('eSBL') (#21677)
|
||||
if self.get_value('eole_module') == u'esbl':
|
||||
self.set_value('module_type', u'eSBL')
|
||||
|
||||
|
||||
class Upgrade_2_6_2(Upgrade):
|
||||
"""
|
||||
Mise à jour d'une configuration
|
||||
de 2.6.1 vers 2.6.2
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Lancement des traitements
|
||||
"""
|
||||
log.info(_(u"Starting {0} to {1} upgrade").format('2.6.1', '2.6.2'))
|
||||
|
||||
adresse_network_dhcp = self.get_value('adresse_network_dhcp')
|
||||
if adresse_network_dhcp:
|
||||
plages = []
|
||||
for idx in xrange(len(adresse_network_dhcp)):
|
||||
plages.append(u'plage{}'.format(idx))
|
||||
self.set_value('nom_plage_dhcp', plages)
|
||||
if self.var_exists('acces_distant_backend_ead'):
|
||||
self.set_value('acces_distant_backend_ead', 'oui')
|
||||
for interface in [str(n) for n in range(5)]:
|
||||
variable = 'frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, 'oui')
|
||||
variable = 'ip_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
variable = 'netmask_frontend_ead_distant_eth' + interface
|
||||
if self.var_exists(variable):
|
||||
self.set_value(variable, ['0.0.0.0'])
|
||||
# Upgrade Seth
|
||||
# AD firewall - mix old multi variables ad_clients_ip and
|
||||
# ad_servers_ip in ad_peer_ip
|
||||
ad_servers_ip = self.get_old_value('ad_peer_ip', 'ad_servers_ip')
|
||||
ad_clients_ip = self.get_old_value('ad_peer_ip', 'ad_clients_ip')
|
||||
if ad_servers_ip or ad_clients_ip:
|
||||
self.set_value('ad_filter_network', 'oui')
|
||||
if ad_servers_ip:
|
||||
ad_servers_netmask = self.get_old_value('ad_peer_netmask', 'ad_servers_netmask')
|
||||
for ip, netmask in zip(ad_servers_ip, [nm[1] for nm in sorted(ad_servers_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_servers_ip'])
|
||||
del(self.unknown_options['ad_servers_netmask'])
|
||||
if ad_clients_ip:
|
||||
ad_clients_netmask = self.get_old_value('ad_peer_netmask', 'ad_clients_netmask')
|
||||
for ip, netmask in zip(ad_clients_ip, [nm[1] for nm in sorted(ad_clients_netmask.items())]):
|
||||
self.append_value('ad_peer_ip', ip)
|
||||
self.modify_last_value('ad_peer_netmask', netmask)
|
||||
del(self.unknown_options['ad_clients_ip'])
|
||||
del(self.unknown_options['ad_clients_netmask'])
|
||||
# Force SID
|
||||
force_sid = self.get_value('ad_domain_sid')
|
||||
if force_sid:
|
||||
self.set_value('ad_force_domain_sid', 'oui')
|
||||
# Squid modified variables : minutes -> seconds
|
||||
for squidvar in ['forward_timeout', 'connect_timeout', 'read_timeout', 'request_timeout', 'persistent_request_timeout']:
|
||||
squidval = self.get_value(squidvar)
|
||||
if squidval is not None and not self.is_default(squidvar):
|
||||
self.set_value(squidvar, squidval*60)
|
||||
# Exim relay : force to "activate" when upgrade from Scribe 2.6.1 only
|
||||
if self.var_exists('synchro_aaf'):
|
||||
self.set_value('exim_relay', 'oui')
|
||||
if self.get_value('activer_dhcp') == 'oui' and self.is_default('exim_relay_dhcp'):
|
||||
self.set_value('exim_relay_dhcp', 'oui')
|
||||
# Autosign certificat modified by user must be manual
|
||||
if self.get_value('cert_type') == u'autosigné':
|
||||
cert_is_modified = False
|
||||
# set manuel to access to variable
|
||||
self.set_value('cert_type', u'manuel')
|
||||
for cert in ['server_cert', 'server_key', 'server_pem']:
|
||||
if not self.is_default(cert):
|
||||
cert_is_modified = True
|
||||
break
|
||||
if not cert_is_modified:
|
||||
self.set_value('cert_type', u'autosigné')
|
||||
# Store autosign certificat in manual type
|
||||
if self.get_value('cert_type') == u'manuel':
|
||||
for cert, filename in [('server_cert', u'/etc/ssl/certs/eole.crt'), ('server_pem', u'/etc/ssl/certs/eole.pem')]:
|
||||
if self.is_default(cert):
|
||||
self.set_value(cert, filename)
|
||||
# gaspacho agent needs to pass by port 8080 has in 2.6.1 and ealier
|
||||
if self.var_exists('gaspacho_https'):
|
||||
self.set_value('gaspacho_https', 'non')
|
||||
|
||||
|
||||
def upgrade2(major_version, old_release, current_release, config):
|
||||
"""
|
||||
major_version: version des scripts de migration (ex : 2.4)
|
||||
old_release: version du config.eol à migrer (ex : 2.4.0)
|
||||
current_release: version du serveur (ex : 2.5.1)
|
||||
config: objet de configuration Tiramisu
|
||||
"""
|
||||
def _get_max_release():
|
||||
"""
|
||||
Calcul du dernier numéro de release disponible pour la version majeure
|
||||
"""
|
||||
ends = 0
|
||||
for func in globals():
|
||||
if func.startswith(func_start):
|
||||
ends = max(ends, int(func.split('_')[-1]))
|
||||
return ends
|
||||
|
||||
old_version = '.'.join(old_release.split('.')[0:2])
|
||||
current_version = '.'.join(current_release.split('.')[0:2])
|
||||
func_start = 'Upgrade_' + "_".join(major_version.split('.'))
|
||||
if StrictVersion(current_version) == StrictVersion(old_version):
|
||||
# upgrade au sein d'une même version
|
||||
# ex : 2.5.1 -> 2.5.4 en 2.5
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = int(current_release.split('.')[-1])
|
||||
elif StrictVersion(major_version) == StrictVersion(old_version):
|
||||
# upgrade "de base" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.4
|
||||
starts = int(old_release.split('.')[-1])
|
||||
ends = _get_max_release()
|
||||
elif StrictVersion(major_version) == StrictVersion(current_version):
|
||||
# upgrade "final" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.6
|
||||
starts = -1
|
||||
ends = int(current_release.split('.')[-1])
|
||||
else:
|
||||
# upgrade "intermédiaire" vers une version supérieure
|
||||
# ex : 2.4.2 -> 2.6.1 en 2.5
|
||||
starts = -1
|
||||
ends = _get_max_release()
|
||||
|
||||
for i in xrange(starts + 1, ends + 1):
|
||||
func = func_start + '_' + str(i)
|
||||
if func in globals():
|
||||
upgrade = globals()[func](config)
|
||||
upgrade.run()
|
|
@ -1,28 +0,0 @@
|
|||
# -*- coding:utf-8 -*-
|
||||
|
||||
"""
|
||||
callbacks de validation personnalisés pour tiramisu
|
||||
|
||||
**utilisation**
|
||||
|
||||
faire des callbacks standards en cas de validation
|
||||
sur la configuration entière.
|
||||
la possibilité de validation personnalisable doit
|
||||
être utilisée *uniquement* pour des validations locales
|
||||
|
||||
**important**
|
||||
|
||||
la fonction ne doit pas lever d'exception, elle doit
|
||||
aboutir.
|
||||
|
||||
api
|
||||
:param value: premier paramètre, valeur de l'option
|
||||
les autres paramètres doivent être des
|
||||
paramètres **nommés**
|
||||
:return: True ou False suivant que l'option a été validée ou non
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
|
@ -1,12 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from formencode.validators import UnicodeString
|
||||
from formencode.api import Invalid
|
||||
|
||||
def valid_string(value, min=None, max=None, not_empty=True):
|
||||
try:
|
||||
UnicodeString(min=min, max=max, not_empty=not_empty
|
||||
).to_python(value)
|
||||
return True
|
||||
except Invalid:
|
||||
return False
|
1750
creole/var_loader.py
1750
creole/var_loader.py
|
@ -1,1750 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from pyeole.odict import OrderedDict
|
||||
from copy import copy
|
||||
from os.path import isdir, isfile, join, basename, dirname
|
||||
from os import listdir
|
||||
|
||||
from .error import FileNotFound, ConfigError
|
||||
from .config import dtdfilename, VIRTBASE, VIRTROOT, VIRTMASTER
|
||||
from .dtd_parser import parse_dtd
|
||||
#from .lxml_parser import parse_xml_file, parse_string
|
||||
#don't touch this, for variables with eosfunc value
|
||||
#import eosfunc
|
||||
#from .utils import normalize_family
|
||||
|
||||
from .i18n import _
|
||||
|
||||
import tiramisu.option
|
||||
|
||||
from tiramisu.option import UnicodeOption, OptionDescription, PortOption, \
|
||||
IntOption, ChoiceOption, BoolOption, SymLinkOption, IPOption, \
|
||||
NetworkOption, NetmaskOption, DomainnameOption, BroadcastOption, \
|
||||
URLOption, EmailOption, FilenameOption, UsernameOption, DateOption, \
|
||||
PasswordOption, Option, Leadership
|
||||
|
||||
from tiramisu import Config
|
||||
from tiramisu.setting import groups
|
||||
#from tiramisu.error import PropertiesOptionError
|
||||
|
||||
####################################################
|
||||
# FIXME : Ajout option adresse mac
|
||||
from tiramisu import RegexpOption
|
||||
import re
|
||||
class MACOption(RegexpOption):
|
||||
__slots__ = tuple()
|
||||
_regexp = re.compile(r"^([0-9A-Fa-f]{2}[:]){5}([0-9A-Fa-f]{2})$")
|
||||
_display_name = _('mac address')
|
||||
####################################################
|
||||
|
||||
|
||||
CONVERT_DATA = {IntOption: int, UnicodeOption: str, PortOption: str,
|
||||
DomainnameOption: str, EmailOption: str, URLOption: str,
|
||||
IPOption: str, NetmaskOption: str, NetworkOption: str,
|
||||
BroadcastOption: str, FilenameOption: str}
|
||||
COMMON_KEY = {'container': UnicodeOption, 'container_group': UnicodeOption,
|
||||
'real_container': UnicodeOption, 'instance_mode': None,
|
||||
'exists': None, 'redefine': UnicodeOption}
|
||||
|
||||
|
||||
CONVERT_OPTION = {'number': (IntOption, None, None),
|
||||
'string': (UnicodeOption, None, None),
|
||||
'password': (PasswordOption, None, None),
|
||||
'mail': (EmailOption, None, None),
|
||||
'filename': (FilenameOption, None, None),
|
||||
'date': (DateOption, None, None),
|
||||
#restriction approchante
|
||||
'unix_user': (UsernameOption, None, None),
|
||||
'ip': (IPOption, None, {'allow_reserved': True}),
|
||||
'local_ip': (IPOption, None, {'private_only': True, 'warnings_only': True}),
|
||||
'netmask': (NetmaskOption, None, None),
|
||||
'network': (NetworkOption, None, None),
|
||||
'broadcast': (BroadcastOption, None, None),
|
||||
'netbios': (DomainnameOption, None, {'type_': 'netbios', 'warnings_only': True}),
|
||||
'domain': (DomainnameOption, None, {'type_': 'domainname', 'allow_ip': True, 'allow_without_dot': True}),
|
||||
'domain_strict': (DomainnameOption, None, {'type_': 'domainname', 'allow_ip': False}),
|
||||
'hostname': (DomainnameOption, None, {'type_': 'hostname', 'allow_ip': True}),
|
||||
'hostname_strict': (DomainnameOption, None, {'type_': 'hostname', 'allow_ip': False}),
|
||||
'web_address': (URLOption, None, {'allow_ip': True, 'allow_without_dot': True}),
|
||||
'port': (PortOption, None, {'allow_private': True}),
|
||||
'oui/non': (ChoiceOption, [u'oui', u'non'], None),
|
||||
'on/off': (ChoiceOption, [u'on', u'off'], None),
|
||||
'yes/no': (ChoiceOption, [u'yes', u'no'], None),
|
||||
'schedule': (ChoiceOption, [u'none', u'daily', u'weekly', u'monthly'], None),
|
||||
'schedulemod': (ChoiceOption, [u'pre', u'post'], None)}
|
||||
|
||||
type_option = {UnicodeOption: 'str', ChoiceOption: 'choice', IntOption: 'int',
|
||||
OptionDescription: 'optiondescription', Leadership: 'optiondescription', IPOption: 'ip',
|
||||
DomainnameOption: 'str', NetworkOption: 'ip', NetmaskOption: 'ip',
|
||||
FilenameOption: 'str', DateOption: 'str', EmailOption: 'str', URLOption: 'str',
|
||||
BroadcastOption: 'str', PortOption: 'str', UsernameOption: 'str', MACOption: 'str', # FIXME YO
|
||||
PasswordOption:'password'}
|
||||
type_option_convert = {'int': int, 'str': str, 'ip': str,
|
||||
'password': str,
|
||||
}
|
||||
|
||||
|
||||
#def force_unicode(val):
|
||||
# if val is not None and type(val) != unicode:
|
||||
# return unicode(val, 'utf-8')
|
||||
# else:
|
||||
# return val
|
||||
|
||||
def convert_value(option, value, config=None):
|
||||
_type = type_option[type(option)]
|
||||
if _type in type_option_convert:
|
||||
if value is not None:
|
||||
return type_option_convert[_type](value)
|
||||
elif _type == 'choice':
|
||||
values = option.impl_get_values(config)
|
||||
if value is None and u'' in values:
|
||||
value = u''
|
||||
if value not in values:
|
||||
raise ValueError(_("option {0}'s value should be in {1}".format(option._name, str(values))))
|
||||
return value
|
||||
|
||||
#===DUPLIQUE DANS ANNOTATOR
|
||||
#mode order is important
|
||||
modes_level = ('basic', 'normal', 'expert')
|
||||
class Mode(object):
|
||||
def __init__(self, name, level):
|
||||
self.name = name
|
||||
self.level = level
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.level, other.level)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.level == other.level
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.level != other.level
|
||||
|
||||
def __gt__(self, other):
|
||||
return other.level < self.level
|
||||
|
||||
def __ge__(self, other):
|
||||
return not self.level < other.level
|
||||
|
||||
def __le__(self, other):
|
||||
return not other.level < self.level
|
||||
|
||||
|
||||
def mode_factory():
|
||||
mode_obj = {}
|
||||
for idx in range(len(modes_level)):
|
||||
name = modes_level[idx]
|
||||
mode_obj[name] = Mode(name, idx)
|
||||
return mode_obj
|
||||
|
||||
modes = mode_factory()
|
||||
#/===
|
||||
def convert_tiramisu_value(value, obj):
|
||||
"""
|
||||
convertit les variables dans le bon type si nécessaire
|
||||
"""
|
||||
def _convert_boolean(value):
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if value == 'True':
|
||||
return True
|
||||
elif value == 'False':
|
||||
return False
|
||||
elif value is None:
|
||||
return None
|
||||
else:
|
||||
raise Exception('unknown value {} while trying to cast {} to boolean'.format(value, obj))
|
||||
|
||||
if obj is BoolOption:
|
||||
if isinstance(value, list):
|
||||
# variable multi
|
||||
return [_convert_boolean(val) for val in value]
|
||||
else:
|
||||
return _convert_boolean(value)
|
||||
func = CONVERT_DATA.get(obj, None)
|
||||
if value == None or func == None:
|
||||
return value
|
||||
if type(value) is list:
|
||||
# variable multi
|
||||
return [func(val) for val in value]
|
||||
else:
|
||||
return func(value)
|
||||
|
||||
class CreoleGeneric():
|
||||
def gen_generic(self, name, paths, copy_requires=None,
|
||||
verify_exists_redefine=True):
|
||||
def _get_type(values):
|
||||
"""get type and values for ChoiceOption
|
||||
"""
|
||||
if values == None:
|
||||
return UnicodeOption, None
|
||||
elif set([True, False]) == set(values):
|
||||
return BoolOption, None
|
||||
else:
|
||||
return ChoiceOption, values
|
||||
|
||||
def build_key_type(name, pnode=''):
|
||||
#build key_type and choice_constrainte with 'needs' and 'optionals'
|
||||
#attribut
|
||||
key_type = {}
|
||||
for mode in ['needs', 'optionals']:
|
||||
for key, value in self.dtd[name][mode].items():
|
||||
#don't load COMMON_KEY and xxxlist and parentnodelist
|
||||
if key not in COMMON_KEY and key != '{0}list'.format(name) and key != '{0}list'.format(pnode):
|
||||
choice = None
|
||||
if value['type'] is not None:
|
||||
type_ = value['type']
|
||||
else:
|
||||
type_, choice = _get_type(value['values'])
|
||||
if choice != None:
|
||||
choice_constrainte[key] = choice
|
||||
key_type[key] = type_
|
||||
return key_type
|
||||
|
||||
containers = self._get_containers()
|
||||
tgeneric_vars = self.generic.get(name, [])
|
||||
generic_vars = []
|
||||
for data in tgeneric_vars:
|
||||
if data['container'] == 'all':
|
||||
# Generate per container
|
||||
for container in containers.values():
|
||||
if container['name'] in ['all', VIRTMASTER]:
|
||||
continue
|
||||
tdata = copy(data)
|
||||
tdata['container'] = container['name']
|
||||
generic_vars.append(tdata)
|
||||
else:
|
||||
generic_vars.append(data)
|
||||
#remove last 's' in name (hosts => host)
|
||||
if name[-1] == 's':
|
||||
name = name[:-1]
|
||||
#if name is a key of self.requires set requires_key to 'activate'
|
||||
if name in self.requires:
|
||||
requires_key = 'activate'
|
||||
else:
|
||||
requires_key = None
|
||||
choice_constrainte = {}
|
||||
key_type = build_key_type(name)
|
||||
#if sub node add subkeys to key_type, be carefull, all sub node
|
||||
#are mixed, 'node_name' is it's node name
|
||||
for option in self.dtd[name]['options']:
|
||||
key_type.update(build_key_type(option, name))
|
||||
key_type['node_name'] = UnicodeOption
|
||||
key_type['level'] = UnicodeOption
|
||||
return self._gen_tiramisu_config(paths, name, generic_vars, key_type,
|
||||
choice_constrainte, requires_key,
|
||||
copy_requires=copy_requires,
|
||||
verify_exists_redefine=verify_exists_redefine)
|
||||
|
||||
def _check_instance_mode(self, data):
|
||||
"""Verify if the resource is to be instanciated
|
||||
|
||||
A resource can tagged to be instanciate only when containers
|
||||
is enabled or disabled.
|
||||
|
||||
We check if the tagged instance mode match the current state
|
||||
of the containers activation.
|
||||
|
||||
:param data: resource informations
|
||||
:type data: `dict`
|
||||
:return: resource instance mode match containers activation
|
||||
:rtype: `bool`
|
||||
|
||||
"""
|
||||
check = True
|
||||
if 'instance_mode' in data:
|
||||
mode = data['instance_mode']
|
||||
if self.containers_enabled and mode == 'when_no_container':
|
||||
check = False
|
||||
elif not self.containers_enabled and mode == 'when_container':
|
||||
check = False
|
||||
return check
|
||||
|
||||
def _config_list_to_dict(self, gvariables, verify_exists_redefine):
|
||||
"""
|
||||
valid variables in container context and return a dict
|
||||
(with variable's name has key)
|
||||
variables: list of variables
|
||||
"""
|
||||
def _test_new_variable(variable):
|
||||
"""
|
||||
test if variable redefine and exists attribut
|
||||
variable: attribute of the variable
|
||||
"""
|
||||
return
|
||||
if variable.get('redefine', False):
|
||||
raise ConfigError(
|
||||
_(u"{0} {1} redefined but unexistent.").format(gtype, name))
|
||||
if not variable.get('exists', True):
|
||||
raise ConfigError(_(u'{0} {1} existent.').format(gtype, name))
|
||||
|
||||
|
||||
variables = OrderedDict()
|
||||
containers = self._get_containers()
|
||||
for variable in gvariables:
|
||||
# Check if we activate the variable or not
|
||||
if not self._check_instance_mode(variable):
|
||||
continue
|
||||
name = variable['name']
|
||||
if variable.has_key('container'):
|
||||
#add container group
|
||||
variable['container_group'] = containers[variable['container']]['group']
|
||||
if self.containers_enabled:
|
||||
tcontainer = self.get_real_container_name(containers, variable['container_group'])
|
||||
variable['real_container'] = tcontainer
|
||||
else:
|
||||
variable['real_container'] = VIRTMASTER
|
||||
else:
|
||||
variable['container_group'] = variable['group']
|
||||
if self.containers_enabled:
|
||||
variable['real_container'] = variable['group']
|
||||
else:
|
||||
variable['real_container'] = VIRTMASTER
|
||||
#if variable already exist, verify if not in same container
|
||||
#if same container, verify redefine and exists attributs
|
||||
if variable.has_key('container') and name in variables:
|
||||
if verify_exists_redefine:
|
||||
is_exists = False
|
||||
for test in variables[name]:
|
||||
if test['container'] == variable['container']:
|
||||
is_exists = True
|
||||
break
|
||||
#if variable exists in same container
|
||||
if is_exists:
|
||||
if not variable.get('exists', True):
|
||||
continue
|
||||
if not variable.get('redefine', False):
|
||||
#var already exists
|
||||
raise ConfigError(_(u"Name ({0}) already used.").format(name))
|
||||
else:
|
||||
#variable exists in an other container
|
||||
_test_new_variable(variable)
|
||||
#FIXME : ajoute mais je modifie pas si exists !
|
||||
variables[name].append(variable)
|
||||
else:
|
||||
#var does not exists
|
||||
if verify_exists_redefine:
|
||||
_test_new_variable(variable)
|
||||
variables[name] = [variable]
|
||||
return variables
|
||||
|
||||
def _gen_tiramisu_config(self, paths, gtype, gvariables, key_type={},
|
||||
choice_constrainte={}, requires_key=None, copy_requires=None,
|
||||
verify_exists_redefine=True):
|
||||
"""
|
||||
Generate tiramisu's config for container's attributs
|
||||
|
||||
paths: paths of all Creole variables
|
||||
gtype: type of Creole attributs (file, service, ...)
|
||||
gvariables: attributs for generate tiramisu config
|
||||
key_type: type of each attribut key
|
||||
choice_constrainte:
|
||||
requires_key: apply requires for this key
|
||||
copy_requires: copy all requires for Symlink to OptionDescription
|
||||
"""
|
||||
variables = self._config_list_to_dict(gvariables, verify_exists_redefine)
|
||||
|
||||
#add common key type
|
||||
key_type.update(COMMON_KEY)
|
||||
key_type['{0}list'.format(gtype)] = UnicodeOption
|
||||
var = []
|
||||
|
||||
#parse dictionary generated by _config_list_to_dict
|
||||
for name, var_datas in variables.items():
|
||||
#parse attributs of variable
|
||||
for var_data in var_datas:
|
||||
force_requires = []
|
||||
properties = tuple()
|
||||
if var_data.get('{0}list'.format(gtype), None) in \
|
||||
self.requires.get(gtype, {}):
|
||||
props, req = self.update_requires(
|
||||
self.requires[gtype][
|
||||
var_data['{0}list'.format(gtype)]]['list'], namespace='creole', option=True)
|
||||
if props != []:
|
||||
properties = tuple(props)
|
||||
requires = None
|
||||
else:
|
||||
requires = req
|
||||
else:
|
||||
requires = None
|
||||
options = []
|
||||
#add option in tiramisu for a specified attribut
|
||||
for option_type, option_value in var_data.items():
|
||||
#if option's type is define in key_type
|
||||
if option_type in key_type:
|
||||
#get tiramisu's object
|
||||
option_obj = key_type[option_type]
|
||||
if isinstance(option_obj, str):
|
||||
option_obj = getattr(tiramisu.option, var_data[option_obj])
|
||||
elif option_type == 'name':
|
||||
#default option_obj
|
||||
option_obj = UnicodeOption
|
||||
#if type is set, get type
|
||||
if self.dtd[gtype]['type']:
|
||||
option_obj = self.dtd[gtype]['type']
|
||||
elif 'node_name' in var_data:
|
||||
#if no type, search node_name and get type in node (this it's a str, not an option)
|
||||
option_obj = getattr(tiramisu.option, var_data[self.dtd[var_data['node_name']]['type']])
|
||||
else:
|
||||
raise Exception(_(u'Unknown key {0}').format(option_type))
|
||||
option_value = convert_tiramisu_value(option_value, option_obj)
|
||||
#if value is None, don't generate tiramisu's option
|
||||
if option_obj and option_value is not None:
|
||||
#if option_type is requires_key, unset requires_key
|
||||
#and add requires for this key
|
||||
if option_type == requires_key:
|
||||
requires_key = None
|
||||
r = requires
|
||||
p = properties
|
||||
requires = None
|
||||
properties = tuple()
|
||||
else:
|
||||
r = None
|
||||
p = None
|
||||
|
||||
#gen tiramisu object
|
||||
if option_obj == ChoiceOption:
|
||||
options.append(option_obj(option_type, '',
|
||||
tuple(choice_constrainte[option_type]),
|
||||
default=option_value, requires=r,
|
||||
properties=p))
|
||||
elif option_obj == SymLinkOption:
|
||||
if r != None:
|
||||
raise Exception(
|
||||
_(u'No requires for SymLinkOption'))
|
||||
try:
|
||||
path = paths[option_value]
|
||||
except KeyError:
|
||||
raise Exception(
|
||||
_(u"SymLinkOption targetting unexistent variable: {0}.").format(option_value))
|
||||
namespace = path.split('.')[0]
|
||||
for descr in self.space:
|
||||
if descr._name == namespace:
|
||||
bopt = OptionDescription('baseconfig',
|
||||
'baseconfigdescr',
|
||||
[descr])
|
||||
opt = bopt
|
||||
for p in path.split('.'):
|
||||
opt = getattr(opt, p)
|
||||
if option_type == copy_requires:
|
||||
#aggrege tous les requirements des familles/option
|
||||
#pour les appliquer aussi sur l'OptionDescription
|
||||
opt_path = path.split('.')
|
||||
for p in opt_path[:-1]:
|
||||
try:
|
||||
force_requires.extend(self.update_requires(self.requires['family'][p]['list'], 'creole', option=True)[1])
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
force_requires.extend(self.update_requires(self.requires['variable'][opt_path[-1]]['list'],'creole', option=True)[1])
|
||||
not_mandatory = False
|
||||
for req_ in force_requires:
|
||||
if req_[2] == 'disabled' and req_[3] != False:
|
||||
not_mandatory = True
|
||||
if not not_mandatory and 'mandatory' in opt._properties:
|
||||
force_requires.append((opt, None, 'disabled', False, True, False))
|
||||
except KeyError:
|
||||
pass
|
||||
break
|
||||
|
||||
options.append(option_obj(option_type, opt))
|
||||
else:
|
||||
options.append(option_obj(option_type, '',
|
||||
default=option_value, requires=r, properties=p))
|
||||
|
||||
#if requires_key is not already set
|
||||
if requires_key:
|
||||
options.append(BoolOption(requires_key, '', default=True,
|
||||
requires=requires, properties=properties))
|
||||
requires = None
|
||||
properties = tuple()
|
||||
level = len(var)
|
||||
if force_requires != []:
|
||||
if requires == None:
|
||||
requires = force_requires
|
||||
else:
|
||||
requires.extend(force_requires)
|
||||
|
||||
var.append(OptionDescription(gtype + str(level),
|
||||
'', options, requires=requires, properties=properties))
|
||||
return OptionDescription('{0}s'.format(gtype), '', var)
|
||||
|
||||
def gen_container(self, paths, namespace):
|
||||
ret = []
|
||||
if 'gen_networks' in dir(self):
|
||||
ret.append(self.gen_networks(paths))
|
||||
for name in self.generic:
|
||||
func_name = 'gen_{0}'.format(name)
|
||||
if func_name in dir(self):
|
||||
ret.append(getattr(self, func_name)(paths))
|
||||
else:
|
||||
ret.append(self.gen_generic(name, paths))
|
||||
return ret
|
||||
|
||||
def _get_containers(self):
|
||||
"""
|
||||
Load container's description
|
||||
"""
|
||||
containers = OrderedDict()
|
||||
containers_id = OrderedDict()
|
||||
for container in self.generic.get('containers', []):
|
||||
name = container['name']
|
||||
if not containers.has_key(name):
|
||||
containers[name] = {'name': name, 'group': name}
|
||||
if container.has_key('id') and container['id'] is not None:
|
||||
id_ = container['id']
|
||||
if id_ in containers_id and containers_id[id_] != name:
|
||||
raise ConfigError(_(u"Two containers with the same id ({0})").format(id_))
|
||||
if name in containers_id.values() and containers_id.get(id_) != name:
|
||||
raise ConfigError(_(u"Multiple ids for the container {0}").format(name))
|
||||
containers_id[id_] = name
|
||||
containers[name]['id'] = id_
|
||||
if container.has_key('group') and container['group'] is not None:
|
||||
containers[name]['group'] = container['group']
|
||||
|
||||
for name, container in containers.items():
|
||||
group = container['group']
|
||||
if name != group and group in containers:
|
||||
containers[name]['id'] = containers[group]['id']
|
||||
return containers
|
||||
|
||||
def gen_containers_creole(self, paths, namespace):
|
||||
"""
|
||||
Generate fake config.creole.containers hidden family.
|
||||
Each container has two UnicodeOption:
|
||||
container_ip_//name// and container_path_//name//
|
||||
|
||||
:paths: paths variables (for added new option in paths's dictionnary)
|
||||
"""
|
||||
if self.containers_enabled:
|
||||
ip_br0 = u'192.0.2.1'
|
||||
mask_br0 = u'255.255.255.0'
|
||||
network_br0 = u'192.0.2.0'
|
||||
bcast_br0 = u'192.0.2.255'
|
||||
else:
|
||||
ip_br0 = u'127.0.0.1'
|
||||
mask_br0 = u'255.0.0.0'
|
||||
network_br0 = u'127.0.0.0'
|
||||
bcast_br0 = u'127.255.255.255'
|
||||
|
||||
variables = []
|
||||
args = {'name': 'adresse_ip_br0', 'doc': _(u"Bridge IP address"), 'default': ip_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_netmask_br0', 'doc': _(u"Bridge IP subnet mask"), 'default': mask_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_network_br0', 'doc': _(u"Bridge IP network_br0 address"), 'default': network_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
args = {'name': 'adresse_broadcast_br0', 'doc': _(u"Bridge broadcast IP address"), 'default': bcast_br0, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
for name in ['adresse_ip_br0', 'adresse_netmask_br0',
|
||||
'adresse_network_br0', 'adresse_broadcast_br0']:
|
||||
paths[name] = 'creole.containers.{0}'.format(name)
|
||||
|
||||
containers = self._get_containers()
|
||||
for name, container in containers.items():
|
||||
if name == 'all':
|
||||
ip = None
|
||||
path = None
|
||||
real_name = u'all'
|
||||
elif not self.containers_enabled or name == VIRTMASTER:
|
||||
path = u''
|
||||
ip = u'127.0.0.1'
|
||||
real_name = unicode(VIRTMASTER)
|
||||
else:
|
||||
tcontainer = self.get_real_container_name(containers, container['name'])
|
||||
real_name = unicode(tcontainer)
|
||||
path = unicode(join(VIRTROOT, real_name, VIRTBASE))
|
||||
#FIXME : pas toujours ca l'IP
|
||||
ip = u"192.0.2." + container['id']
|
||||
# Variable : container_path_<conteneur>
|
||||
path_name = 'container_path_{0}'.format(name)
|
||||
args = {'name': path_name, 'doc': _(u'Path of container {0}').format(name), 'default': path, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[path_name] = 'creole.containers.{0}'.format(path_name)
|
||||
# Variable : container_ip_<conteneur>
|
||||
ip_name = 'container_ip_{0}'.format(name)
|
||||
args = {'name': ip_name, 'doc': _(u'IP address of container {0}').format(name), 'default': ip, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[ip_name] = 'creole.containers.{0}'.format(ip_name)
|
||||
# Variable : container_name_<conteneur>
|
||||
name_name = 'container_name_{0}'.format(name)
|
||||
args = {'name': name_name, 'doc': _(u'Group name of container {0}').format(name), 'default': real_name, 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
paths[name_name] = 'creole.containers.{0}'.format(name_name)
|
||||
# Variable : adresse_ip_<conteneur>
|
||||
# adresse_ip_<container> added for compat 2.3 (#5701, #5868)
|
||||
adresse_name = 'adresse_ip_{0}'.format(name)
|
||||
if adresse_name not in self.variables:
|
||||
if not self.containers_enabled:
|
||||
# hack to have "localhost" in non container mode #7183
|
||||
args = {'name': adresse_name, 'doc': _(u'Path of container {0}').format(name), 'default': u'localhost',
|
||||
'properties': ('frozen', 'force_default_on_freeze'), 'requires': None}
|
||||
variables.append({'optiontype': 'option', 'obj': UnicodeOption, 'args': args, 'option': None})
|
||||
else:
|
||||
variables.append({'optiontype': 'symlinkoption', 'obj': SymLinkOption, 'path': paths[ip_name], 'args': {'name': adresse_name}, 'option': None})
|
||||
paths[adresse_name] = 'creole.containers.{0}'.format(adresse_name)
|
||||
variables_path = []
|
||||
for var in variables:
|
||||
path = 'containers.' + var['args']['name']
|
||||
self.options[namespace][path] = var
|
||||
variables_path.append(path)
|
||||
fname = 'containers'
|
||||
self.options[namespace][fname] = {'optiontype': 'optiondescription',
|
||||
'args': {'name': fname,
|
||||
'doc': _('Containers informations'),
|
||||
'children': variables_path,
|
||||
'properties': ('hidden', 'normal'),
|
||||
'requires': None},
|
||||
'group_type': 'family',
|
||||
'informations': {'icon': 'puzzle-piece'},
|
||||
'option': None}
|
||||
return fname
|
||||
|
||||
|
||||
class CreoleFamily():
|
||||
"""
|
||||
charge les familles, les variables, les aides et séparateurs
|
||||
"""
|
||||
def _init_creole_family(self):
|
||||
"""
|
||||
initialise les variables pour les familles
|
||||
"""
|
||||
self.families = OrderedDict()
|
||||
#only for find old variable
|
||||
self.variables = {}
|
||||
self.helps = {'variables':{}, 'families':{}}
|
||||
self.separators = {}
|
||||
self.groups = {}
|
||||
|
||||
def populate_families(self, families, namespace):
|
||||
for family, fdata in families.items():
|
||||
nfamily = normalize_family(family)
|
||||
lvars = OrderedDict()
|
||||
for var, vdata in fdata['vars'].items():
|
||||
variable = self.get_variable(var, vdata, nfamily, namespace)
|
||||
if variable is not None:
|
||||
lvars[var] = variable
|
||||
if vdata.get('remove_check', False):
|
||||
try:
|
||||
self.valid_enum.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
self.consistency.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
self.check.pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
if vdata.get('remove_condition', False):
|
||||
try:
|
||||
self.requires['variable'].pop(var)
|
||||
except KeyError:
|
||||
pass
|
||||
# si famille inexistant, on l'ajoute
|
||||
if not self.families.has_key(nfamily):
|
||||
# définition de la famille
|
||||
fdata['vars'] = OrderedDict()
|
||||
self.families[nfamily] = copy(fdata)
|
||||
self.families[nfamily]['mode'] = modes_level[0]
|
||||
self.families[nfamily]['hidden'] = False
|
||||
self.families[nfamily]['doc'] = str(family.encode('utf8'))
|
||||
self.families[nfamily]['vars'].update(lvars)
|
||||
#ne pas remettre a normal dans ce cadre d'un redefine
|
||||
if 'mode' in fdata and fdata['mode'] not in [modes_level[0], None]:
|
||||
self.families[nfamily]['mode'] = fdata['mode']
|
||||
if 'icon' in fdata and fdata['icon'] is not None:
|
||||
self.families[nfamily]['icon'] = fdata['icon']
|
||||
if 'hidden' in fdata:
|
||||
self.families[nfamily]['hidden'] = fdata['hidden']
|
||||
self.families[nfamily]['vars'].update(lvars)
|
||||
|
||||
|
||||
def get_variable(self, var, vdata, family, namespace):
|
||||
#si la derniere variable ne devait pas etre prise en compte
|
||||
#existe == False => on quitte brutalement
|
||||
#et il ne faut pas prendre en compte la suite
|
||||
if namespace == 'creole' and var in self.variables:
|
||||
if not vdata['exists']:
|
||||
return None
|
||||
if not vdata['redefine']:
|
||||
# on ne devrait pas avoir 2 fois la meme variable
|
||||
raise ConfigError(_(u"Two variables with the same name ({0})").format(var))
|
||||
elif vdata['redefine']:
|
||||
raise ConfigError(_(u"Attempt to redefine unexistent variable: {0}.").format(var))
|
||||
#si c'est une nouvelle variable
|
||||
if not vdata['redefine']:
|
||||
# Be sure to have defaults on new variables
|
||||
tvar = self._update_variable_attributes(var, vdata)
|
||||
#uniquement dans le cadre de redefine
|
||||
else:
|
||||
old_family = self.variables[var]
|
||||
if old_family != family:
|
||||
tvar = self.families[old_family]['vars'][var]
|
||||
self.families[old_family]['vars'].pop(var)
|
||||
else:
|
||||
tvar = self.families[family]['vars'][var]
|
||||
if vdata['value'] != None:
|
||||
tvar['value'] = vdata['value']
|
||||
tvar = self._update_variable_attributes(var, tvar, vdata)
|
||||
self.variables[var] = family
|
||||
return tvar
|
||||
|
||||
def _update_variable_attributes(self, var, vdata, newdata=None):
|
||||
"""Update variable attributes.
|
||||
|
||||
If :data:`newdata` is ``None``, set default, update to new
|
||||
value otherwise.
|
||||
|
||||
:param var: variable name
|
||||
:type var: `str`
|
||||
:param vdata: variable attributes
|
||||
:type vdata: `dict`
|
||||
:param newdata: new variable attributes
|
||||
:type newdata: `dict`
|
||||
:return: variable attributes
|
||||
|
||||
"""
|
||||
attrs = vdata.copy()
|
||||
|
||||
if newdata and newdata['multi']:
|
||||
raise ValueError(_(u"Redefining multi attribute is not allowed"
|
||||
" for variable {0}").format(var))
|
||||
if newdata and newdata['type'] != 'string':
|
||||
raise ValueError(_(u"Redefining type attribute is not allowed"
|
||||
" for variable {0}").format(var))
|
||||
for attr in ['auto_freeze', 'auto_save', 'hidden', 'mandatory', 'redefine']:
|
||||
# Default value is False
|
||||
if attr not in vdata or vdata[attr] is None:
|
||||
attrs[attr] = False
|
||||
elif newdata is not None and attr in newdata \
|
||||
and newdata[attr] is not None \
|
||||
and vdata[attr] != newdata[attr]:
|
||||
attrs[attr] = newdata[attr]
|
||||
|
||||
if 'exists' not in vdata or vdata['exists'] is None:
|
||||
attrs['exists'] = True
|
||||
elif newdata is not None and 'exists' in newdata \
|
||||
and newdata['exists'] is not None \
|
||||
and vdata['exists'] != newdata['exists']:
|
||||
attrs['exists'] = newdata['exists']
|
||||
|
||||
if 'mode' not in vdata or vdata['mode'] is None:
|
||||
attrs['mode'] = 'normal'
|
||||
elif newdata is not None and 'mode' in newdata \
|
||||
and newdata['mode'] is not None \
|
||||
and vdata['mode'] != newdata['mode']:
|
||||
attrs['mode'] = newdata['mode']
|
||||
|
||||
if newdata is not None and 'description' in newdata \
|
||||
and newdata['description'] is not None \
|
||||
and vdata['description'] != newdata['description']:
|
||||
attrs['description'] = newdata['description']
|
||||
|
||||
if vdata['disabled'] is True or (newdata is not None and newdata['disabled'] is True):
|
||||
attrs['disabled'] = True
|
||||
|
||||
return attrs
|
||||
|
||||
def populate_helps(self, helps, namespace):
|
||||
"""
|
||||
"""
|
||||
for key, values in helps['variables'].items():
|
||||
vdata = self.families[self.variables[key]]['vars'][key]
|
||||
if self.helps['variables'].has_key(key) and not vdata['redefine']:
|
||||
raise ConfigError(_(u"help already set for {0}").format(key))
|
||||
else:
|
||||
self.helps['variables'][key] = values
|
||||
for key, values in helps['families'].items():
|
||||
key = normalize_family(key)
|
||||
fdata = self.families[key]
|
||||
if self.helps['families'].has_key(key) and not fdata['redefine']:
|
||||
raise ConfigError(_(u"help already set for {0}").format(key))
|
||||
else:
|
||||
self.helps['families'][key] = values
|
||||
|
||||
def populate_separators(self, separators, namespace):
|
||||
"""
|
||||
"""
|
||||
#devrait être dans la variable plutot que dans self.separators
|
||||
for var, value in separators.items():
|
||||
if self.separators.has_key(var):
|
||||
raise ConfigError(_(u"More than one separator for "
|
||||
"{0}").format(var))
|
||||
else:
|
||||
self.separators[var] = value
|
||||
|
||||
def populate_groups(self, groups_, namespace):
|
||||
for grp_name, grps in groups_.items():
|
||||
self.groups.setdefault(grp_name, []).extend(grps)
|
||||
|
||||
class CreoleConstraint():
|
||||
"""
|
||||
charge les contraintes
|
||||
"""
|
||||
def _init_creole_constrainte(self):
|
||||
self.valid_enum = {}
|
||||
self.mandatory = []
|
||||
self.fill = {}
|
||||
self.auto = {}
|
||||
self.check = {}
|
||||
self.consistency = {}
|
||||
|
||||
def populate_conditions(self, conditions, namespace):
|
||||
#FIXME juste les conditions/hidden_if_in|hidden_if_not_in
|
||||
for var, _conditions in conditions.items():
|
||||
for condition in _conditions:
|
||||
if condition['name'] in ['hidden_if_in', 'disabled_if_in']:
|
||||
conds = [('disabled', False)]
|
||||
elif condition['name'] in ['hidden_if_not_in',
|
||||
'disabled_if_not_in']:
|
||||
conds = [('disabled', True)]
|
||||
elif condition['name'] == 'frozen_if_in':
|
||||
conds = [('frozen', False), ('hidden', False), ('force_default_on_freeze', False)]
|
||||
elif condition['name'] == 'frozen_if_not_in':
|
||||
conds = [('frozen', True), ('hidden', True), ('force_default_on_freeze', True)]
|
||||
elif condition['name'] in ['mandatory_if_in']:
|
||||
conds = [('mandatory', False)]
|
||||
elif condition['name'] in ['mandatory_if_not_in']:
|
||||
conds = [('mandatory', True)]
|
||||
else:
|
||||
raise Exception(_(u'Unknown condition type for {0}').format(
|
||||
condition['name']))
|
||||
families = condition['family']
|
||||
variables = condition['variable']
|
||||
for params in condition['param']:
|
||||
if params['type']:
|
||||
raise Exception(_(u'Unknown type {0}').format(
|
||||
params['type']))
|
||||
if params['hidden']:
|
||||
raise Exception(_(u'Unknown hidden {0}').format(
|
||||
params['hidden']))
|
||||
if params['name']:
|
||||
raise Exception(_(u'Unknown name {0}').format(
|
||||
params['name']))
|
||||
if params['optional']:
|
||||
raise Exception(_(u'Unknown optional {0}').format(
|
||||
params['optional']))
|
||||
value = params['value']
|
||||
tconditions = []
|
||||
for cond in conds:
|
||||
tconditions.append((var, value, cond[0], cond[1]))
|
||||
for variable, optional in variables:
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires['variable'].setdefault(variable, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires['variable'][variable]['optional'] = optional
|
||||
self.requires['variable'][variable]['list'].extend(tconditions)
|
||||
for family, optional in families:
|
||||
#FIXME optional not used
|
||||
family = normalize_family(family)
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires['family'].setdefault(family, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires['family'][family]['optional'] = optional
|
||||
self.requires['family'][family]['list'].extend(tconditions)
|
||||
for list_name, list_value, optional in condition['list']:
|
||||
#FIXME optional not used
|
||||
#if optional is not set for only one condition, always not optional
|
||||
self.requires[list_name].setdefault(list_value, {'optional': True, 'list': []})
|
||||
if not optional:
|
||||
self.requires[list_name][list_value]['optional'] = optional
|
||||
self.requires[list_name][list_value]['list'].extend(tconditions)
|
||||
self.fallback[var] = condition['fallback']
|
||||
|
||||
def _populate_func(self, datas, _type, namespace):
|
||||
"""
|
||||
to populate auto or fill
|
||||
"""
|
||||
data = {}
|
||||
for target, funcs in datas.items():
|
||||
if len(funcs) != 1:
|
||||
raise Exception(_(u'More than one function for target: {0}').format(target))
|
||||
func_name = funcs[0][0]
|
||||
func_params = funcs[0][1]
|
||||
func_level = funcs[0][2]
|
||||
if func_level != 'error':
|
||||
raise Exception(_(u"Can not set level to {0} for this kind of callback").format(func_level))
|
||||
params = {}
|
||||
for param in func_params:
|
||||
name = {None: ''}.get(param['name'], param['name'])
|
||||
if param['type'] == None:
|
||||
params.setdefault(name, []).append(unicode(param['value']))
|
||||
elif param['type'] == 'eole':
|
||||
check_disabled = param['hidden'] == "False"
|
||||
optional = param['optional'] == 'True'
|
||||
value = param['value']
|
||||
if '.' in value:
|
||||
ns, value = value.split('.', 1)
|
||||
if ns != namespace:
|
||||
raise Exception(_('Namespace different in param not allowed: {} - {}').format(ns, namespace))
|
||||
params.setdefault(name, []).append({'optional': optional,
|
||||
'check_disabled': check_disabled,
|
||||
'value': value})
|
||||
elif param['type'] == 'number':
|
||||
params.setdefault(name, []).append(int(param['value']))
|
||||
elif param['type'] == 'container':
|
||||
#pour compatibilté dicos 2.3 (#6240)
|
||||
# remplace le dictionnaire d'infos conteneur
|
||||
# par l'ip du conteneur demandé
|
||||
params.setdefault(name, []).append({'optional': False,
|
||||
'check_disabled': False,
|
||||
'value': 'container_ip_' + param['value']})
|
||||
elif param['type'] == 'context':
|
||||
params.setdefault(name, []).append((None,))
|
||||
else:
|
||||
raise Exception(_(u'Type {0} not yet implemented '
|
||||
u'for {1} for {2}').format(param['type'], _type,
|
||||
target))
|
||||
if namespace != 'creole' and '.' in target:
|
||||
#if extra and variable in extra (so with complet path)
|
||||
#don't support redefine
|
||||
vdata = {'redefine': False}
|
||||
else:
|
||||
vdata = self.families[self.variables[target]]['vars'][target]
|
||||
#6016
|
||||
if _type in ['auto', 'fills'] and vdata.get('value') is not None and \
|
||||
vdata['redefine']:
|
||||
vdata['value'] = None
|
||||
if (_type == 'check' and target in self.check.keys()) or \
|
||||
(_type != 'check' and (target in self.fill.keys() or
|
||||
target in self.auto.keys()) and not vdata['redefine']):
|
||||
raise Exception(_(u"Computing function already defined for {0}").format(
|
||||
target))
|
||||
if _type != 'check':
|
||||
if target in self.fill:
|
||||
del(self.fill[target])
|
||||
if target in self.auto:
|
||||
del(self.auto[target])
|
||||
data[target] = (func_name, params)
|
||||
return data
|
||||
|
||||
def populate_checks(self, checks, namespace):
|
||||
#FIXME faudrait voir pour supprimer les anciens comme avant
|
||||
for var, _checks in checks.items():
|
||||
for check in _checks:
|
||||
if check[0] == 'valid_enum':
|
||||
open_values = False
|
||||
for param in check[1]:
|
||||
if param['name'] == 'checkval':
|
||||
open_values = not {'True': True,
|
||||
'False': False}.get(param['value'])
|
||||
tvalues = eval(check[1][0]['value'])
|
||||
values = []
|
||||
for value in tvalues:
|
||||
if type(value) == str:
|
||||
values.append(unicode(value, 'utf-8'))
|
||||
else:
|
||||
values.append(value)
|
||||
self.valid_enum[var] = (values, open_values)
|
||||
elif check[0] == 'obligatoire':
|
||||
self.mandatory.append(var)
|
||||
elif check[0] == 'valid_differ' and check[1][0]['type'] == 'eole':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_differ length should be 1'))
|
||||
self.consistency.setdefault(var, []).append(('not_equal', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_networknetmask':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_networknetmask length should be 1'))
|
||||
if check[1][0]['type'] != 'eole':
|
||||
raise Exception(_(u'valid_networknetmask must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('network_netmask', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_ipnetmask':
|
||||
if len(check[1]) != 1:
|
||||
raise Exception(_(u'valid_ipnetmask length should be 1'))
|
||||
if check[1][0]['type'] != 'eole':
|
||||
raise Exception(_(u'valid_ipnetmask must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('ip_netmask', check[1][0], check[2]))
|
||||
elif check[0] == 'valid_broadcast':
|
||||
if len(check[1]) != 2:
|
||||
raise Exception(_(u'valid_broadcast length should be 2'))
|
||||
error = False
|
||||
try:
|
||||
if check[1][0]['type'] != 'eole' or check[1][1]['type'] != 'eole':
|
||||
error = True
|
||||
except IndexError:
|
||||
error = True
|
||||
if error:
|
||||
raise Exception(_(u'valid_broadcast must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('broadcast', check[1][0], check[1][1], check[2]))
|
||||
elif check[0] == 'valid_in_network':
|
||||
if len(check[1]) != 2:
|
||||
raise Exception(_(u'valid_in_network length should be 2'))
|
||||
error = False
|
||||
try:
|
||||
if check[1][0]['type'] != 'eole' or check[1][1]['type'] != 'eole':
|
||||
error = True
|
||||
except IndexError:
|
||||
error = True
|
||||
if error:
|
||||
raise Exception(_(u'valid_in_network must have only eole variable'))
|
||||
self.consistency.setdefault(var, []).append(('in_network', check[1][0], check[1][1], check[2]))
|
||||
else:
|
||||
self.check.update(self._populate_func({var: [check]},
|
||||
'check', namespace))
|
||||
|
||||
def populate_fills(self, fills, namespace):
|
||||
self.fill.update(self._populate_func(fills, 'fill', namespace))
|
||||
|
||||
def populate_autos(self, autos, namespace):
|
||||
self.auto.update(self._populate_func(autos, 'auto', namespace))
|
||||
|
||||
class CreoleVarLoader(CreoleFamily, CreoleConstraint, CreoleGeneric):
|
||||
def __init__(self, no_auto_store=False):
|
||||
self.space = []
|
||||
self._config = None
|
||||
self.is_lint = False
|
||||
self.dtd = parse_dtd(dtdfilename)
|
||||
self.containers_enabled = None
|
||||
self.options = {}
|
||||
self.paths = {}
|
||||
self.no_auto_store = no_auto_store
|
||||
self.force_store_vars = set()
|
||||
self.actions = {}
|
||||
|
||||
def _init_creole_varloader(self):
|
||||
self.variables = OrderedDict()
|
||||
self.generic = {}
|
||||
# Generate empty trees
|
||||
for opt in self.dtd['container']['options']:
|
||||
self.generic[opt + 's'] = []
|
||||
|
||||
def read_string(self, data_dicts, namespace, test_duplicate):
|
||||
"""
|
||||
lecture d'un ensemble de dictionnaires et d'un
|
||||
configuration passés en paramètres (Zéphir)
|
||||
data_dicts : données des dictionnaires encodés en base64 et ordonnés
|
||||
"""
|
||||
self._pre_populate(namespace)
|
||||
# parsing des dictionnaires fournis
|
||||
for dico in data_dicts:
|
||||
is_creole_constrainte = 'gen_container' in dir(self)
|
||||
parse_result = parse_string(dico, self.dtd, is_creole_constrainte, test_duplicate)
|
||||
#FIXME: voir pour autre chose que 'module'
|
||||
self._populate(parse_result, namespace, 'module')
|
||||
self._post_populate(namespace)
|
||||
# chargement des valeurs depuis le format json
|
||||
self._gen_descr(namespace)
|
||||
|
||||
def read_dir(self, dir_config, namespace, force_test_duplicate=None):
|
||||
"""
|
||||
lecture d'un répertoire entier de dictionnaires
|
||||
"""
|
||||
self._pre_populate(namespace)
|
||||
if type(dir_config) != list:
|
||||
#if dir_config is not a list, add subdirectory 'local'
|
||||
#and 'variante'
|
||||
orig_dir = dir_config
|
||||
dir_config = [dir_config]
|
||||
for tdir in [join(orig_dir, 'local'),
|
||||
join(orig_dir, 'variante')]:
|
||||
if isdir(tdir):
|
||||
dir_config.append(tdir)
|
||||
if namespace == 'creole':
|
||||
if force_test_duplicate is not None:
|
||||
test_duplicate = force_test_duplicate
|
||||
else:
|
||||
test_duplicate = True
|
||||
else:
|
||||
test_duplicate = False
|
||||
for mydir in dir_config:
|
||||
if type(mydir) in (list, tuple):
|
||||
# directory group : collect files from each
|
||||
# directory and sort them before loading
|
||||
group_files = []
|
||||
for idx, subdir in enumerate(mydir):
|
||||
if isdir(subdir):
|
||||
for filename in listdir(subdir):
|
||||
group_files.append((filename, idx, subdir))
|
||||
else:
|
||||
group_files.append(basename(subdir), idx, dirname(subdir))
|
||||
def sort_group(file1, file2):
|
||||
if file1[0] == file2[0]:
|
||||
# sort by initial mydir order if same name
|
||||
return file1[1].__cmp__(file2[1])
|
||||
# sort by filename
|
||||
elif file1[0] > file2[0]:
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
group_files.sort(sort_group)
|
||||
filenames = [join(f[2], f[0]) for f in group_files]
|
||||
elif isdir(mydir):
|
||||
filenames = []
|
||||
for filename in listdir(mydir):
|
||||
filenames.append(join(mydir, filename))
|
||||
filenames.sort()
|
||||
else:
|
||||
filenames = [mydir]
|
||||
for filename in filenames:
|
||||
if filename.endswith('.xml'):
|
||||
if not isfile(filename):
|
||||
raise FileNotFound(_(u"File {0} does not exist").format(filename))
|
||||
# level indicates the level of dictionary (module, variante or local)
|
||||
level = {'local': 'local',
|
||||
'variante': 'variante'}.get(basename(dirname(filename)), 'module')
|
||||
#print filename
|
||||
#hack to detect if CreoleVarLoader or CreoleLoader is used
|
||||
is_creole_constrainte = 'gen_files' in dir(self)
|
||||
parse = parse_xml_file(filename, self.dtd, is_creole_constrainte, test_duplicate)
|
||||
self._populate(parse, namespace, level)
|
||||
self._post_populate(namespace)
|
||||
self._gen_descr(namespace)
|
||||
|
||||
def _pre_populate(self, namespace):
|
||||
# initialisation avant chargement des données d'un dictionnaire
|
||||
if self._config is not None:
|
||||
raise Exception(_(u'Unable to run read_dir if Config already exists.'))
|
||||
#Re init all variables
|
||||
for func in dir(self):
|
||||
if func.startswith('_init_creole_'):
|
||||
getattr(self, func)()
|
||||
# chargement des dictionnaires
|
||||
#FIXME devrait être automatique ...
|
||||
self.requires = {'variable': {}, 'family': {}, 'service': {},
|
||||
'interface': {}, 'file': {}, 'filelist': {}, 'fstab': {},
|
||||
'host': {}, 'service_restriction': {}, 'service_access': {}, "action": {}}
|
||||
# this information should be a self.requires, but we need to change
|
||||
# too much code to do that (#5717)
|
||||
self.fallback = {}
|
||||
self.options[namespace] = {}
|
||||
|
||||
def _populate(self, parse, namespace, level):
|
||||
parse_keys = parse.keys()
|
||||
#families always in first place
|
||||
parse_keys.remove('families')
|
||||
parse_keys.insert(0, 'families')
|
||||
for keys in parse_keys:
|
||||
func_name = 'populate_' + keys
|
||||
if func_name in dir(self):
|
||||
try:
|
||||
getattr(self, 'populate_' + keys)(parse[keys], namespace)
|
||||
except Exception as err:
|
||||
raise ConfigError(_(u"Unable to populate {0}: {1}").format(keys, err))
|
||||
else:
|
||||
for var in parse[keys]:
|
||||
var['level'] = level
|
||||
self.generic.setdefault(keys, []).append(var)
|
||||
|
||||
def populate_families_action(self, var, namespace):
|
||||
for family_name, family in var.items():
|
||||
if family_name not in self.actions.keys():
|
||||
self.actions[family_name] = {}
|
||||
for key, value in family.items():
|
||||
if key == 'action':
|
||||
if 'actions' not in self.actions[family_name]:
|
||||
self.actions[family_name]['actions'] = []
|
||||
value['name'] = namespace
|
||||
self.actions[family_name]['actions'].append(value)
|
||||
else:
|
||||
self.actions[family_name][key] = value
|
||||
|
||||
def _post_populate(self, namespace):
|
||||
if namespace == 'creole':
|
||||
if self.families['general']['vars']['mode_conteneur_actif']['value'] == 'oui':
|
||||
self.containers_enabled = True
|
||||
else:
|
||||
self.containers_enabled = False
|
||||
|
||||
def gen_actions(self):
|
||||
objactions = []
|
||||
#name = 'actions'
|
||||
#for name_family, families in self.actions.items():
|
||||
# opts = []
|
||||
# for type_, infos in families.items():
|
||||
# if isinstance(infos, str):
|
||||
# opts.append(UnicodeOption(type_, '', unicode(infos)))
|
||||
# elif isinstance(infos, unicode):
|
||||
# opts.append(UnicodeOption(type_, '', infos))
|
||||
# elif infos == None:
|
||||
# pass
|
||||
# else:
|
||||
# for index, info in enumerate(infos):
|
||||
# optstype = []
|
||||
# for key, val in info.items():
|
||||
# if key == 'type':
|
||||
# optstype.append(ChoiceOption(key, '', ('form', 'custom', 'external'), unicode(val)))
|
||||
# elif isinstance(val, list):
|
||||
# lst = []
|
||||
# for val_ in val:
|
||||
# lst.append(unicode(val_['name']))
|
||||
# if lst != []:
|
||||
# optstype.append(UnicodeOption(key, '', default=lst, default_multi=lst[0], multi=True))
|
||||
# else:
|
||||
# optstype.append(UnicodeOption(key, '', unicode(val)))
|
||||
|
||||
# opts.append(OptionDescription(type_[:-1] + str(index), '', optstype))
|
||||
# objactions.append(OptionDescription(str(normalize_family(name_family)), name_family, opts))
|
||||
|
||||
descr = OptionDescription('actions', 'actions', objactions)
|
||||
return descr
|
||||
|
||||
def gen_paths(self, namespace):
|
||||
if namespace in self.paths:
|
||||
return self.paths[namespace]
|
||||
paths = {}
|
||||
all_slaves = {}
|
||||
for master, slaves in self.groups.items():
|
||||
for slave in slaves:
|
||||
all_slaves[slave] = master
|
||||
for fname, fdata in self.families.items():
|
||||
for vname in fdata['vars']:
|
||||
if vname in self.groups:
|
||||
paths[vname] = '{0}.{1}.{2}.{2}'.format(namespace,
|
||||
fname, vname)
|
||||
else:
|
||||
if vname in all_slaves:
|
||||
paths[vname] = '{0}.{1}.{2}.{3}'.format(
|
||||
namespace, fname, all_slaves[vname], vname)
|
||||
else:
|
||||
paths[vname] = '{0}.{1}.{2}'.format(namespace,
|
||||
fname, vname)
|
||||
self.paths[namespace] = paths
|
||||
return paths
|
||||
|
||||
def update_requires(self, values, namespace, option=False):
|
||||
"""
|
||||
replace variable name with paths in self.requires
|
||||
"""
|
||||
force_properties = []
|
||||
requires = []
|
||||
for value in values:
|
||||
try:
|
||||
if not '.' in value[0]:
|
||||
ns = 'creole'
|
||||
#path without namespace
|
||||
path = '.'.join(self.paths[ns][value[0]].split('.')[1:])
|
||||
else:
|
||||
ns = namespace
|
||||
path = '.'.join(value[0].split('.')[1:])
|
||||
opt = self.options[ns][path]
|
||||
except KeyError:
|
||||
if self.fallback[value[0]]:
|
||||
force_properties.append(value[2])
|
||||
continue
|
||||
else:
|
||||
raise Exception(_(u"Condition using unexistent variable {0} as parameter.").format(value[0]))
|
||||
val = value[1]
|
||||
if opt['obj'] is ChoiceOption:
|
||||
if val not in opt['args']['values']:
|
||||
if value[3]:
|
||||
force_properties.append(value[2])
|
||||
else:
|
||||
continue
|
||||
val = convert_tiramisu_value(val, opt['obj'])
|
||||
if option:
|
||||
ropt = self._get_option(ns, path)
|
||||
else:
|
||||
ropt = (ns, value[0])
|
||||
|
||||
requires.append({'option': ropt, 'expected': val, 'action': value[2], 'inverse': value[3]})
|
||||
return force_properties, requires
|
||||
|
||||
def _populate_requires(self, namespace):
|
||||
for vname, values in self.requires['variable'].items():
|
||||
try:
|
||||
if not '.' in vname:
|
||||
ns = 'creole'
|
||||
#path without namespace
|
||||
path = '.'.join(self.paths[ns][vname].split('.')[1:])
|
||||
else:
|
||||
ns = namespace
|
||||
path = '.'.join(vname.split('.')[1:])
|
||||
opt = self.options[ns][path]
|
||||
except KeyError:
|
||||
if values['optional']:
|
||||
continue
|
||||
raise Exception(_(u"Condition targetting unexistent variable {0}").format(vname))
|
||||
props, req = self.update_requires(values['list'], namespace)
|
||||
if props != []:
|
||||
if opt['args']['requires'] is not None:
|
||||
raise Exception(_(u'requires already set for this option preventing changing properties {0}').format(vname))
|
||||
opt['args']['properties'] = tuple(list(opt['args']['properties']) + props)
|
||||
else:
|
||||
if opt['args']['requires'] is not None:
|
||||
raise Exception(_(u'requires already set for this option {0}').format(vname))
|
||||
#if force_store_value is set, remove force_default_on_freeze #7854
|
||||
if 'force_store_value' in opt['args']['properties']:
|
||||
new_rep = []
|
||||
for nreq in req:
|
||||
if nreq['action'] != 'force_default_on_freeze':
|
||||
new_rep.append(nreq)
|
||||
req = new_rep
|
||||
opt['args']['requires'] = req
|
||||
calc_properties = set()
|
||||
for r in req:
|
||||
calc_properties.add(r['action'])
|
||||
opt['args']['properties'] = tuple(set(opt['args']['properties']) - calc_properties)
|
||||
|
||||
def _get_option(self, namespace, vname):
|
||||
option = self.options[namespace][vname]
|
||||
if option['option'] is None:
|
||||
if option['optiontype'] == 'option':
|
||||
if option['args']['requires'] is not None:
|
||||
for require in option['args']['requires']:
|
||||
name = require['option'][1]
|
||||
if "." in name:
|
||||
path = name
|
||||
else:
|
||||
path = self.paths[namespace][require['option'][1]]
|
||||
path = '.'.join(path.split('.')[1:])
|
||||
require['option'] = self._get_option(require['option'][0], path)
|
||||
if 'callback_params' in option['args'] and option['args']['callback_params'] is not None:
|
||||
new_call_params = option['args']['callback_params']
|
||||
for key, callback_params in option['args']['callback_params'].items():
|
||||
new_cp = []
|
||||
for callback_param in callback_params:
|
||||
if isinstance(callback_param, tuple) and len(callback_param) == 2:
|
||||
path = callback_param[0][1]
|
||||
if '.' not in path:
|
||||
path = '.'.join(self.paths['creole'][path].split('.')[1:])
|
||||
new_cp.append((self._get_option(callback_param[0][0], path), callback_param[1]))
|
||||
else:
|
||||
new_cp.append(callback_param)
|
||||
new_call_params[key] = tuple(new_cp)
|
||||
option['args']['callback_params'] = new_call_params
|
||||
opt = option['obj'](**option['args'])
|
||||
elif option['optiontype'] == 'optiondescription':
|
||||
children = []
|
||||
for child in option['args']['children']:
|
||||
children.append(self._get_option(namespace, child))
|
||||
option['args']['children'] = children
|
||||
if option['args']['requires'] is not None:
|
||||
for require in option['args']['requires']:
|
||||
opt_name = require['option'][1]
|
||||
if '.' not in opt_name:
|
||||
path = '.'.join(self.paths['creole'][opt_name].split('.')[1:])
|
||||
require['option'] = self._get_option(require['option'][0], path)
|
||||
opt = OptionDescription(**option['args'])
|
||||
if option['group_type'] == 'master':
|
||||
opt.impl_set_group_type(groups.master)
|
||||
elif option['group_type'] == 'family':
|
||||
opt.impl_set_group_type(groups.family)
|
||||
else:
|
||||
raise Exception('Unknown group {}'.format(option['group_type']))
|
||||
elif option['optiontype'] == 'symlinkoption':
|
||||
sym_path = option['path'].split('.')
|
||||
sym_opt = self._get_option(sym_path[0], '.'.join(sym_path[1:]))
|
||||
option['args']['opt'] = sym_opt
|
||||
opt = option['obj'](**option['args'])
|
||||
else:
|
||||
raise Exception('unknown type {0}'.format(option['optiontype']))
|
||||
try:
|
||||
for key, info in self.options[namespace][vname]['informations'].items():
|
||||
opt.impl_set_information(key, info)
|
||||
except KeyError:
|
||||
pass
|
||||
self.options[namespace][vname]['option'] = opt
|
||||
return self.options[namespace][vname]['option']
|
||||
|
||||
def _gen_consistencies(self, namespace):
|
||||
for vname, params in self.consistency.items():
|
||||
path = '.'.join(self.paths[namespace][vname].split('.')[1:])
|
||||
opt = self._get_option(namespace, path)
|
||||
for param in params:
|
||||
dopt = []
|
||||
c_params = {}
|
||||
if param[-1] == 'warning':
|
||||
c_params['warnings_only'] = True
|
||||
for dvdict in param[1:-1]:
|
||||
dvname = dvdict['value']
|
||||
try:
|
||||
path = '.'.join(self.paths[namespace][dvname].split('.')[1:])
|
||||
dopt.append(self._get_option(namespace, path))
|
||||
except KeyError:
|
||||
if dvdict['optional'] != 'True':
|
||||
raise Exception(_(u"Check using unexistent variable {0} as parameter.").format(dvname))
|
||||
if dvdict['hidden'] == 'False':
|
||||
c_params['transitive'] = False
|
||||
opt.impl_add_consistency(param[0], *dopt, **c_params)
|
||||
|
||||
def _is_hidden(self, vname, vdata):
|
||||
#si la variable est hidden mais pas disabled
|
||||
if not vname in self.requires['variable'] and vdata['hidden']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_multi(self, vname, vdata, group_master):
|
||||
#if not a list
|
||||
if not vdata['multi'] and (group_master == None or
|
||||
(group_master != None and \
|
||||
vname not in self.groups[group_master])):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_mandatory(self, vname, vdata):
|
||||
if vname in self.mandatory or vdata['mandatory']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_auto(self, vname):
|
||||
if vname in self.auto:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _gen_func(self, path, obj, callback, callback_params, namespace):
|
||||
if callback_params is None:
|
||||
callback_params = {}
|
||||
if namespace == 'creole':
|
||||
vname = path.split('.')[-1]
|
||||
else:
|
||||
vname = path
|
||||
if vname in obj:
|
||||
callback, params = obj[vname]
|
||||
try:
|
||||
callback = getattr(eosfunc, callback)
|
||||
except AttributeError:
|
||||
raise ValueError(_(u'unknown function {0} in eosfunc').format(callback))
|
||||
for param, pvalues in params.items():
|
||||
for pvalue in pvalues:
|
||||
if type(pvalue) == dict:
|
||||
if namespace == 'creole':
|
||||
ns = 'creole'
|
||||
#it's a Tiramisu's **Option**, that is, a variable
|
||||
#optional could be None, False or True
|
||||
if pvalue['optional'] == True and \
|
||||
pvalue['value'] not in self.variables and \
|
||||
pvalue['value'] not in self.options[namespace]:
|
||||
continue
|
||||
path = '.'.join(self.paths[namespace][pvalue['value']].split('.')[1:])
|
||||
if not path in self.options[namespace]:
|
||||
if self.is_lint:
|
||||
return None, {}
|
||||
else:
|
||||
raise Exception(_(u"Variable computing function"
|
||||
u" using unknown variable "
|
||||
u"{0}").format(pvalue['value']))
|
||||
else:
|
||||
#Support extra
|
||||
try:
|
||||
# when we don't deal with the 'creole' namespace
|
||||
# the pvalues are paths, ex: schedule.bacula.day
|
||||
if namespace != 'creole' and not '.' in pvalue['value']:
|
||||
ns = 'creole'
|
||||
else:
|
||||
ns = namespace
|
||||
except KeyError:
|
||||
raise Exception(_(u"Variable computing function"
|
||||
u" using unknown variable "
|
||||
u"{0}").format(pvalue['value']))
|
||||
callback_params.setdefault(param, []).append(((ns, pvalue['value']),
|
||||
pvalue['check_disabled']))
|
||||
else:
|
||||
callback_params.setdefault(param, []).append(pvalue)
|
||||
normalize_callback_params = {}
|
||||
for callback_name, parameters in callback_params.items():
|
||||
normalize_callback_params[callback_name] = tuple(parameters)
|
||||
return callback, normalize_callback_params
|
||||
|
||||
def _gen_callback(self, namespace):
|
||||
for path, option in self.options[namespace].items():
|
||||
if option['optiontype'] != 'option':
|
||||
continue
|
||||
callback = None
|
||||
callback_params = {}
|
||||
if namespace != 'creole':
|
||||
path = namespace + '.' + path
|
||||
callback, callback_params = self._gen_func(path, self.fill, callback,
|
||||
callback_params, namespace)
|
||||
callback, callback_params = self._gen_func(path, self.auto, callback,
|
||||
callback_params, namespace)
|
||||
#pas de callback_params => None
|
||||
if callback_params == {}:
|
||||
callback_params = None
|
||||
if callback is not None:
|
||||
option['args']['callback'] = callback
|
||||
option['args']['callback_params'] = callback_params
|
||||
|
||||
|
||||
def _gen_check(self, namespace):
|
||||
for path, option in self.options[namespace].items():
|
||||
validator = self._gen_func(path, self.check, None, None, namespace=namespace)
|
||||
if validator[0] is not None:
|
||||
option['args']['validator'] = validator[0]
|
||||
if validator[1] is not None:
|
||||
option['args']['validator_params'] = validator[1]
|
||||
|
||||
def _gen_option(self, fname, vname, vdata, group_master, family_mode, namespace, goptions):
|
||||
"""
|
||||
generate an option with given information
|
||||
|
||||
:vname: variable name
|
||||
:vdata: variable informations load in XML file
|
||||
:group_master: name of master
|
||||
"""
|
||||
informations = {}
|
||||
#FIXME master_slaves
|
||||
if group_master is not None:
|
||||
path = '.'.join([fname, group_master, vname])
|
||||
else:
|
||||
path = '.'.join([fname, vname])
|
||||
|
||||
if namespace == 'creole':
|
||||
cname = vname
|
||||
else:
|
||||
cname = namespace + '.' + path
|
||||
has_callback = cname in self.fill or cname in self.auto
|
||||
if not has_callback:
|
||||
value = vdata['value']
|
||||
else:
|
||||
value = None
|
||||
multi = self._is_multi(vname, vdata, group_master)
|
||||
if value != None and multi and type(value) != list:
|
||||
value = [value]
|
||||
default_multi = None
|
||||
if multi and value is not None and vname != group_master:
|
||||
default_multi = value[0]
|
||||
#il n'y a pas de valeur pour les esclaves
|
||||
if value is not None and self._is_a_masterslave(vname, group_master):
|
||||
if len(value) != 1:
|
||||
# exception à la règle pas d'esclave pour maître sans valeur
|
||||
# certains dictionnaires définissent une valeur esclave
|
||||
# par défaut : on tolère une et une seule valeur.
|
||||
raise Exception(_(u"Slave value length can not be greater "
|
||||
u"than 1."))
|
||||
if vname != group_master:
|
||||
value = []
|
||||
if vdata['description'] is None:
|
||||
doc = vname
|
||||
else:
|
||||
doc = vdata['description']
|
||||
args = {'name': vname, 'doc': doc,
|
||||
'multi': multi}
|
||||
#args['callback'], args['callback_params'] = self._gen_callback(path, paths, namespace)
|
||||
args['properties'] = self._gen_properties(vname, value, vdata,
|
||||
has_callback, family_mode,
|
||||
default_multi, group_master,
|
||||
goptions, namespace, path)
|
||||
is_choiceoption = False
|
||||
ovalue = None
|
||||
if namespace == 'creole':
|
||||
valid_enum_path = vname
|
||||
else:
|
||||
valid_enum_path = namespace + '.' + path
|
||||
valid_enum_path = vname
|
||||
if self.valid_enum.has_key(valid_enum_path):
|
||||
valid_enum = self.valid_enum[valid_enum_path]
|
||||
ovalue = valid_enum[0][0]
|
||||
open_values = valid_enum[1]
|
||||
if open_values:
|
||||
informations['proposed_value'] = tuple(valid_enum[0])
|
||||
else:
|
||||
obj = ChoiceOption
|
||||
olist = tuple(valid_enum[0])
|
||||
forceargs = None
|
||||
is_choiceoption = True
|
||||
if not is_choiceoption:
|
||||
obj, olist, forceargs = CONVERT_OPTION.get(vdata['type'], (None, None, None))
|
||||
if olist is not None:
|
||||
ovalue = olist[0]
|
||||
if obj is None:
|
||||
raise Exception(_(u'Unknown type {0}').format(vdata['type']))
|
||||
#args['validator'], args['validator_params'] = self._gen_check(vname, namespace)
|
||||
args['default'] = convert_tiramisu_value(value, obj)
|
||||
args['default_multi'] = convert_tiramisu_value(default_multi, obj)
|
||||
if olist:
|
||||
args['values'] = tuple(olist)
|
||||
if ovalue is not None:
|
||||
#if default list dans no value
|
||||
if args['default'] is None and not args['multi'] and not has_callback:
|
||||
args['default'] = ovalue
|
||||
#if value but not in list
|
||||
if args['default'] != None and args['multi'] and type(args['default']) != list:
|
||||
args['default'] = [args['default']]
|
||||
if forceargs is not None:
|
||||
args.update(forceargs)
|
||||
if vname in self.helps['variables']:
|
||||
informations['help'] = self.helps['variables'][vname]
|
||||
if vname in self.separators:
|
||||
informations['separator'] = self.separators[vname]
|
||||
args['requires'] = None
|
||||
option = {'optiontype': 'option', 'obj': obj, 'args': args,
|
||||
'informations': informations, 'option': None}
|
||||
self.options[namespace][path] = option
|
||||
return path
|
||||
|
||||
def _gen_master_group(self, namespace, fname, group_master, goptions):
|
||||
path = '.'.join((fname, group_master))
|
||||
properties = []
|
||||
mode = False
|
||||
for mode in modes_level:
|
||||
if mode in self.options[namespace][goptions[0]]['args']['properties']:
|
||||
properties.append(mode)
|
||||
mode = True
|
||||
if not mode:
|
||||
properties.append(modes_level[1])
|
||||
self.options[namespace][path] = {'optiontype': 'optiondescription',
|
||||
'args': {'name': group_master,
|
||||
'doc': 'Master {0}'.format(group_master),
|
||||
'children': goptions,
|
||||
'properties': tuple(properties),
|
||||
'requires': None},
|
||||
'group_type': 'master',
|
||||
'option': None}
|
||||
return path
|
||||
|
||||
def _gen_properties(self, vname, value, vdata, has_callback, family_mode,
|
||||
default_multi, group_master, goptions, namespace, path):
|
||||
if self._is_hidden(vname, vdata) or self._is_auto(vname):
|
||||
properties = ['hidden', 'frozen']
|
||||
#7854
|
||||
if vdata['auto_save'] is False and not self.no_auto_store:
|
||||
properties.append('force_default_on_freeze')
|
||||
else:
|
||||
properties = []
|
||||
mode = vdata['mode']
|
||||
#mandatory variable with no value is a basic value
|
||||
if self._is_mandatory(vname, vdata):
|
||||
properties.append('mandatory')
|
||||
if value in (None, []) and vname not in self.auto and \
|
||||
vname not in self.fill:
|
||||
mode = modes_level[0]
|
||||
#non mandatory variable with a value becomes mandatory (#7141)
|
||||
elif value not in (None, []) or default_multi is not None:
|
||||
properties.append('mandatory')
|
||||
|
||||
if vdata['auto_freeze'] == True:
|
||||
if self._is_auto(vname):
|
||||
raise Exception(_('{0} is auto, so must not be auto_freeze or auto_save').format(vname))
|
||||
if not self.no_auto_store:
|
||||
properties.extend(['auto_freeze'])
|
||||
if mode != 'expert':
|
||||
mode = modes_level[0]
|
||||
self.force_store_vars.add(self.paths[namespace][vname])
|
||||
if vdata['auto_save'] is True:
|
||||
if self._is_auto(vname):
|
||||
raise Exception(_('{0} is auto, so must not be auto_freeze or auto_save').format(vname))
|
||||
if not self.no_auto_store:
|
||||
properties.append('force_store_value')
|
||||
if mode != 'expert':
|
||||
mode = modes_level[0]
|
||||
self.force_store_vars.add(self.paths[namespace][vname])
|
||||
if self._is_a_masterslave(vname, group_master) and goptions != []:
|
||||
master_mode = 'normal'
|
||||
for mod in self.options[namespace][goptions[0]]['args']['properties']:
|
||||
if mod in modes_level:
|
||||
master_mode = mod
|
||||
break
|
||||
if modes[mode] < modes[master_mode]:
|
||||
properties.append(master_mode)
|
||||
else:
|
||||
properties.append(mode)
|
||||
else:
|
||||
if modes[mode] < modes[family_mode]:
|
||||
properties.append(family_mode)
|
||||
else:
|
||||
properties.append(mode)
|
||||
if vdata.get('disabled') == True:
|
||||
properties.append('disabled')
|
||||
return tuple(properties)
|
||||
|
||||
def _is_a_masterslave(self, vname, group_master):
|
||||
return group_master != None and (vname == group_master or
|
||||
vname in self.groups[group_master])
|
||||
|
||||
def _gen_options_by_family(self, fname, fdata, namespace):
|
||||
#if var is in a group
|
||||
options = []
|
||||
family_mode = fdata['mode']
|
||||
slaves = []
|
||||
for vname, vdata in fdata['vars'].items():
|
||||
goptions = []
|
||||
if vname in self.groups:
|
||||
slaves.extend(self.groups[vname])
|
||||
goptions.append(self._gen_option(fname, vname, vdata, vname, family_mode, namespace, goptions))
|
||||
for sname in self.groups[vname]:
|
||||
sdata = fdata['vars'][sname]
|
||||
goptions.append(self._gen_option(fname, sname, sdata, vname, family_mode, namespace, goptions))
|
||||
options.append(self._gen_master_group(namespace, fname, vname, goptions))
|
||||
elif vname in slaves:
|
||||
pass
|
||||
else:
|
||||
options.append(self._gen_option(fname, vname, vdata, None, family_mode, namespace, goptions))
|
||||
#family
|
||||
fname = unicode.encode(unicode(fname), 'utf-8')
|
||||
properties = [fdata['mode']]
|
||||
if fname in self.requires['family']:
|
||||
props, req = self.update_requires(self.requires['family'][fname]['list'], namespace)
|
||||
if props != []:
|
||||
properties.extend(props)
|
||||
requires = None
|
||||
else:
|
||||
requires = req
|
||||
else:
|
||||
requires = None
|
||||
if fdata['hidden'] == True:
|
||||
#if hidden_if_in or hidden_if_not_in for this family, don't
|
||||
#hidden family
|
||||
hide = True
|
||||
for var, val, act, inv in self.requires['family'].get(fname, {'list': []})['list']:
|
||||
if act == 'disabled':
|
||||
hide = False
|
||||
break
|
||||
if hide:
|
||||
properties.append('hidden')
|
||||
|
||||
informations = {}
|
||||
if 'icon' in fdata:
|
||||
informations['icon'] = fdata['icon']
|
||||
if fname in self.helps['families']:
|
||||
informations['help'] = self.helps['families'][fname]
|
||||
family = {'optiontype': 'optiondescription',
|
||||
'args': {'name': fname, 'doc': fdata['doc'],
|
||||
'children': options, 'requires': requires,
|
||||
'properties': tuple(properties),
|
||||
'requires': requires},
|
||||
'group_type': 'family',
|
||||
'informations': informations,
|
||||
'option': None}
|
||||
self.options[namespace][fname] = family
|
||||
return fname
|
||||
|
||||
def _gen_descr(self, namespace):
|
||||
is_creole_constrainte = 'gen_files' in dir(self)
|
||||
paths = self.gen_paths(namespace)
|
||||
if namespace == 'creole':
|
||||
flist = [self.gen_containers_creole(paths, namespace)]
|
||||
else:
|
||||
flist = []
|
||||
for fname in self.requires['family']:
|
||||
if fname not in self.families and not self.requires['family'][fname]['optional']:
|
||||
raise Exception(_(u'Unknown family {0} has requires').format(fname))
|
||||
for fname, fdata in self.families.items():
|
||||
flist.append(self._gen_options_by_family(fname, fdata, namespace))
|
||||
self.families = {}
|
||||
self._populate_requires(namespace)
|
||||
self._gen_callback(namespace)
|
||||
self._gen_check(namespace)
|
||||
self._gen_consistencies(namespace)
|
||||
options = []
|
||||
for fl in flist:
|
||||
options.append(self._get_option(namespace, fl))
|
||||
|
||||
self.space.append(OptionDescription(namespace, '', options))
|
||||
if namespace == 'creole' and is_creole_constrainte:
|
||||
containers = self.gen_container(paths, namespace='containers')
|
||||
self.space.append(OptionDescription('containers', '',
|
||||
containers))
|
||||
|
||||
def get_config(self):
|
||||
if self._config is None:
|
||||
if self.actions != {}:
|
||||
self.space.append(self.gen_actions())
|
||||
descr = OptionDescription('baseconfig', 'baseconfigdescr',
|
||||
self.space)
|
||||
self._config = Config(descr)
|
||||
self._config.impl_set_information('force_store_vars', self.force_store_vars)
|
||||
self._config.impl_set_information('force_store_values', list(self.force_store_vars))
|
||||
self._config.cfgimpl_get_settings().remove('hidden')
|
||||
_modes = list(modes_level)
|
||||
_modes.append('hidden')
|
||||
self._config.cfgimpl_get_settings().setpermissive(tuple(_modes))
|
||||
return self._config
|
||||
|
||||
def get_real_container_name(self, containers, cont):
|
||||
while containers[cont]['group'] != cont:
|
||||
cont = containers[cont]['group']
|
||||
return cont
|
|
@ -1,67 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
|
||||
import base64
|
||||
|
||||
KEY_LENGTH = 40
|
||||
KEYS = [
|
||||
0x50,
|
||||
0xF7,
|
||||
0x82,
|
||||
0x69,
|
||||
0xEA,
|
||||
0x2D,
|
||||
0xDD,
|
||||
0x2D,
|
||||
0x6A,
|
||||
0xB4,
|
||||
0x33,
|
||||
0x8F,
|
||||
0xD5,
|
||||
0xC7,
|
||||
0x90,
|
||||
0x9C,
|
||||
0x22,
|
||||
0x95,
|
||||
0x61,
|
||||
0xE5,
|
||||
0x65,
|
||||
0xF6,
|
||||
0xB0,
|
||||
0x4B,
|
||||
0x94,
|
||||
0x47,
|
||||
0xB0,
|
||||
0xBD,
|
||||
0x73,
|
||||
0x58,
|
||||
0x56,
|
||||
0x87,
|
||||
0x79,
|
||||
0x7B,
|
||||
0xE6,
|
||||
0xB0,
|
||||
0xD2,
|
||||
0x20,
|
||||
0x28,
|
||||
0xE1
|
||||
]
|
||||
|
||||
def bitwise(s):
|
||||
res = ''
|
||||
idx = 0
|
||||
for i in range(len(s)):
|
||||
res += chr(ord(s[i]) ^ KEYS[idx])
|
||||
idx+=1
|
||||
if idx > (KEY_LENGTH - 1):
|
||||
idx = 0
|
||||
return res
|
||||
|
||||
def wcrypt(s):
|
||||
s = bitwise(s)
|
||||
return base64.encodestring(s)[:-1] # encodestring renvoie la chaine avec un '\n', on le vire
|
||||
|
||||
def wdecrypt(s):
|
||||
s = base64.decodestring(s)
|
||||
return bitwise(s)
|
|
@ -38,7 +38,7 @@
|
|||
<!-- root element -->
|
||||
<!-- =============== -->
|
||||
|
||||
<!ELEMENT creole (containers | files | family_action | variables | constraints | help)*>
|
||||
<!ELEMENT creole (containers | family_action | variables | constraints | help)*>
|
||||
|
||||
<!-- ============== -->
|
||||
<!-- files element -->
|
||||
|
@ -64,7 +64,6 @@
|
|||
<!ATTLIST action url_type (URLOption|SymLinkOption) "URLOption">
|
||||
<!-- for form action -->
|
||||
<!ATTLIST action save (True|False) "False">
|
||||
<!ELEMENT files ((service* | service_access* | service_restriction* | package* | file*)*)>
|
||||
|
||||
<!ELEMENT containers ((container* | all*)*)>
|
||||
|
||||
|
@ -77,7 +76,6 @@
|
|||
|
||||
<!ELEMENT service (#PCDATA)>
|
||||
<!ATTLIST service servicelist CDATA #IMPLIED >
|
||||
<!ATTLIST service instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST service method (systemd|upstart|apache|network) "systemd">
|
||||
<!ATTLIST service redefine (True|False) "False">
|
||||
|
||||
|
@ -131,7 +129,6 @@
|
|||
<!ATTLIST host ip CDATA #REQUIRED > <!--SymLinkOption-->
|
||||
<!ATTLIST host ip_type (SymLinkOption) "SymLinkOption">
|
||||
<!ATTLIST host crossed (True|False) "True" >
|
||||
<!ATTLIST host instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST host comment CDATA #IMPLIED >
|
||||
|
||||
<!ELEMENT fstab EMPTY >
|
||||
|
@ -144,10 +141,9 @@
|
|||
<!ATTLIST fstab options CDATA #IMPLIED>
|
||||
<!ATTLIST fstab checks CDATA #IMPLIED>
|
||||
<!ATTLIST fstab fstablist CDATA #IMPLIED>
|
||||
<!ATTLIST fstab instance_mode (when_container|when_no_container|always) "when_container">
|
||||
|
||||
<!ELEMENT package (#PCDATA)>
|
||||
<!ATTLIST package instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST package packagelist CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT disknod (#PCDATA)>
|
||||
|
||||
|
@ -159,7 +155,6 @@
|
|||
<!ATTLIST file group CDATA #IMPLIED >
|
||||
<!ATTLIST file filelist CDATA #IMPLIED >
|
||||
<!ATTLIST file mkdir (True|False) "False">
|
||||
<!ATTLIST file instance_mode (when_container|when_no_container|always) "always">
|
||||
<!ATTLIST file rm (True|False) "False">
|
||||
<!ATTLIST file del_comment CDATA #IMPLIED >
|
||||
<!ATTLIST file redefine (True|False) "False">
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
echo "La bibliothèque shell FonctionsEoleNg ne doit plus être utilisée." >&2
|
||||
if [ -n "${0}" ]
|
||||
then
|
||||
echo "Merci de corriger le code de '${0}'" >&2
|
||||
fi
|
||||
echo ''
|
||||
echo "Voir la documentation http://dev-eole.ac-dijon.fr/projects/eole/wiki/PrepareEOLE24" >&2
|
||||
exit 255
|
|
@ -1,100 +0,0 @@
|
|||
|
||||
/* Body color */
|
||||
body { background: #ffffff; color: #000000; }
|
||||
|
||||
/* Tables */
|
||||
table.summary, table.details, table.index
|
||||
{ background: #e8f0f8; color: #000000; }
|
||||
tr.summary, tr.details, tr.index
|
||||
{ background: #70b0f0; color: #000000;
|
||||
text-align: left; font-size: 120%; }
|
||||
tr.group { background: #c0e0f8; color: #000000;
|
||||
text-align: left; font-size: 120%;
|
||||
font-style: italic; }
|
||||
|
||||
/* Documentation page titles */
|
||||
h2.module { margin-top: 0.2em; }
|
||||
h2.class { margin-top: 0.2em; }
|
||||
|
||||
/* Headings */
|
||||
h1.heading { font-size: +140%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h2.heading { font-size: +125%; font-style: italic;
|
||||
font-weight: bold; }
|
||||
h3.heading { font-size: +110%; font-style: italic;
|
||||
font-weight: normal; }
|
||||
|
||||
/* Base tree */
|
||||
pre.base-tree { font-size: 80%; margin: 0; }
|
||||
|
||||
/* Details Sections */
|
||||
table.func-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.func-detail { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
table.var-details { background: #e8f0f8; color: #000000;
|
||||
border: 2px groove #c0d0d0;
|
||||
padding: 0 1em 0 1em; margin: 0.4em 0 0 0; }
|
||||
h3.var-details { background: transparent; color: #000000;
|
||||
margin: 0 0 1em 0; }
|
||||
|
||||
/* Function signatures */
|
||||
.sig { background: transparent; color: #000000;
|
||||
font-weight: bold; }
|
||||
.sig-name { background: transparent; color: #006080; }
|
||||
.sig-arg, .sig-kwarg, .sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
.sig-default { background: transparent; color: #602000; }
|
||||
.summary-sig { background: transparent; color: #000000; }
|
||||
.summary-sig-name { background: transparent; color: #204080; }
|
||||
.summary-sig-arg, .summary-sig-kwarg, .summary-sig-vararg
|
||||
{ background: transparent; color: #008060; }
|
||||
|
||||
/* Doctest blocks */
|
||||
.py-src { background: transparent; color: #000000; }
|
||||
.py-prompt { background: transparent; color: #005050;
|
||||
font-weight: bold;}
|
||||
.py-string { background: transparent; color: #006030; }
|
||||
.py-comment { background: transparent; color: #003060; }
|
||||
.py-keyword { background: transparent; color: #600000; }
|
||||
.py-output { background: transparent; color: #404040; }
|
||||
pre.doctestblock { background: #f4faff; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
table pre.doctestblock
|
||||
{ background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 1em;
|
||||
border: 1px solid #708890; }
|
||||
|
||||
/* Variable values */
|
||||
pre.variable { background: #dce4ec; color: #000000;
|
||||
padding: .5em; margin: 0;
|
||||
border: 1px solid #708890; }
|
||||
.variable-linewrap { background: transparent; color: #604000; }
|
||||
.variable-ellipsis { background: transparent; color: #604000; }
|
||||
.variable-quote { background: transparent; color: #604000; }
|
||||
.re { background: transparent; color: #000000; }
|
||||
.re-char { background: transparent; color: #006030; }
|
||||
.re-op { background: transparent; color: #600000; }
|
||||
.re-group { background: transparent; color: #003060; }
|
||||
.re-ref { background: transparent; color: #404040; }
|
||||
|
||||
/* Navigation bar */
|
||||
table.navbar { background: #a0c0ff; color: #0000ff;
|
||||
border: 2px groove #c0d0d0; }
|
||||
th.navbar { background: #a0c0ff; color: #0000ff; }
|
||||
th.navselect { background: #70b0ff; color: #000000; }
|
||||
.nomargin { margin: 0; }
|
||||
|
||||
/* Links */
|
||||
a:link { background: transparent; color: #0000ff; }
|
||||
a:visited { background: transparent; color: #204080; }
|
||||
a.navbar:link { background: transparent; color: #0000ff;
|
||||
text-decoration: none; }
|
||||
a.navbar:visited { background: transparent; color: #204080;
|
||||
text-decoration: none; }
|
||||
|
||||
/* Lists */
|
||||
ul { margin-top: 0; }
|
|
@ -1,60 +0,0 @@
|
|||
génération des certificats
|
||||
==========================
|
||||
|
||||
mode opératoire
|
||||
|
||||
|
||||
au premier lancement de ``gen_certif.py``
|
||||
------------------------------------------
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère la CA
|
||||
- génère les certificats par défaut (clef privée, requète de certificat)
|
||||
- signature des certificats
|
||||
|
||||
aux lancements ultérieurs
|
||||
-------------------------
|
||||
|
||||
|
||||
- vérifie l'existence d'une CA ou non
|
||||
- génère le certificat passé en argument
|
||||
|
||||
::
|
||||
|
||||
gen_certif.py (-f) [nom_certif]
|
||||
|
||||
si [nom_certif] non renseigné, regénère tous les certificats par défaut
|
||||
ainsi que la CA locale. Sinon, ne génère que [nom_certif]
|
||||
-f :force la regénération du (ou des) certificat(s) s'il(s) existe(nt)
|
||||
|
||||
|
||||
``regen``
|
||||
|
||||
attribut permettant de forcer (ou pas) la regénération
|
||||
si ``regen==True`` alors les cerficats sont regénérés même s'ils existent
|
||||
si ``regen==False`` alors les cerficats ne sont générés que s'ils
|
||||
n'existent pas.
|
||||
|
||||
api
|
||||
----
|
||||
|
||||
- génération d'un seul certificat :
|
||||
|
||||
``cert.gen_certif(certfile,regen=regen, copy_key=copy)``
|
||||
|
||||
|
||||
- génération de tous les certificats :
|
||||
|
||||
``cert.gen_certs(regen=regen)``
|
||||
|
||||
|
||||
|
||||
::
|
||||
|
||||
gen_certs()
|
||||
|-> gen_ca()
|
||||
|-> certif_loader()
|
||||
|-> gen_certif()
|
||||
|-> finalise_certs()
|
||||
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
rm -f *.html
|
||||
rm -f api/*.html
|
|
@ -1,15 +0,0 @@
|
|||
|
||||
|
||||
process
|
||||
-------
|
||||
|
||||
- point d'entrée : `process.py` méthode *run()*
|
||||
- lecture des fichiers dictionnaires *xml*
|
||||
- lecture du fichier */etc/eole/config.eol* pour remplir l'objet
|
||||
dictionnaire
|
||||
|
||||
|
||||
mapping avec la ligne de commande
|
||||
---------------------------------
|
||||
|
||||
.. TODO
|
377
doc/default.css
377
doc/default.css
|
@ -1,377 +0,0 @@
|
|||
/*
|
||||
:Author: David Goodger
|
||||
:Contact: goodger at users.sourceforge.net
|
||||
:date: $Date: 2004/11/11 23:11:44 $
|
||||
:version: $Revision: 1.1 $
|
||||
:copyright: This stylesheet has been placed in the public domain.
|
||||
|
||||
Default cascading style sheet for the HTML output of Docutils.
|
||||
*/
|
||||
|
||||
/* "! important" is used here to override other ``margin-top`` and
|
||||
``margin-bottom`` styles that are later in the stylesheet or
|
||||
more specific. See <http://www.w3.org/TR/CSS1#the-cascade>. */
|
||||
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: Georgia, arial, sans-serif;
|
||||
padding: 3em;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 130%;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
width: 70%;
|
||||
margin: 2em auto;
|
||||
padding: 1em;
|
||||
background-color: #FFEEEE;
|
||||
border: 1px solid #EEDDDD;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.title {
|
||||
font-size: 180%;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 ! important }
|
||||
|
||||
.last {
|
||||
margin-bottom: 0 ! important }
|
||||
|
||||
.hidden {
|
||||
display: none }
|
||||
|
||||
a.toc-backref {
|
||||
text-decoration: none ;
|
||||
color: black }
|
||||
|
||||
blockquote.epigraph {
|
||||
margin: 2em 5em ; }
|
||||
|
||||
dd {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
/* Uncomment (& remove this text!) to get bold-faced definition list terms
|
||||
dt {
|
||||
font-weight: bold }
|
||||
*/
|
||||
|
||||
div.abstract {
|
||||
margin: 2em 5em }
|
||||
|
||||
div.abstract p.topic-title {
|
||||
font-weight: bold ;
|
||||
text-align: center }
|
||||
|
||||
div.admonition, div.attention, div.caution, div.danger, div.error,
|
||||
div.hint, div.important, div.note, div.tip, div.warning {
|
||||
margin: 2em ;
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.admonition p.admonition-title, div.hint p.admonition-title,
|
||||
div.important p.admonition-title, div.note p.admonition-title,
|
||||
div.tip p.admonition-title {
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.attention p.admonition-title, div.caution p.admonition-title,
|
||||
div.danger p.admonition-title, div.error p.admonition-title,
|
||||
div.warning p.admonition-title {
|
||||
color: red ;
|
||||
font-weight: bold ;
|
||||
font-family: sans-serif }
|
||||
|
||||
div.compound .compound-first, div.compound .compound-middle {
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
div.compound .compound-last, div.compound .compound-middle {
|
||||
margin-top: 0.5em }
|
||||
|
||||
div.dedication {
|
||||
margin: 2em 5em ;
|
||||
text-align: center ;
|
||||
font-style: italic }
|
||||
|
||||
div.dedication p.topic-title {
|
||||
font-weight: bold ;
|
||||
font-style: normal }
|
||||
|
||||
div.figure {
|
||||
margin-left: 2em }
|
||||
|
||||
div.footer, div.header {
|
||||
font-size: smaller }
|
||||
|
||||
div.line-block {
|
||||
display: block ;
|
||||
margin-top: 1em ;
|
||||
margin-bottom: 1em }
|
||||
|
||||
div.line-block div.line-block {
|
||||
margin-top: 0 ;
|
||||
margin-bottom: 0 ;
|
||||
margin-left: 1.5em }
|
||||
|
||||
div.sidebar {
|
||||
margin-left: 1em ;
|
||||
border: medium outset ;
|
||||
padding: 0em 1em ;
|
||||
background-color: #ffffee ;
|
||||
width: 40% ;
|
||||
float: right ;
|
||||
clear: right }
|
||||
|
||||
div.sidebar p.rubric {
|
||||
font-family: sans-serif ;
|
||||
font-size: medium }
|
||||
|
||||
div.system-messages {
|
||||
margin: 5em }
|
||||
|
||||
div.system-messages h1 {
|
||||
color: red }
|
||||
|
||||
div.system-message {
|
||||
border: medium outset ;
|
||||
padding: 1em }
|
||||
|
||||
div.system-message p.system-message-title {
|
||||
color: red ;
|
||||
font-weight: bold }
|
||||
|
||||
div.topic {
|
||||
margin: 2em }
|
||||
|
||||
h1.title {
|
||||
text-align: center }
|
||||
|
||||
h2.subtitle {
|
||||
text-align: center }
|
||||
|
||||
hr {
|
||||
width: 75% }
|
||||
|
||||
ol.simple, ul.simple {
|
||||
margin-bottom: 1em }
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal }
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha }
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha }
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman }
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman }
|
||||
|
||||
p.attribution {
|
||||
text-align: right ;
|
||||
margin-left: 50% }
|
||||
|
||||
p.caption {
|
||||
font-style: italic }
|
||||
|
||||
p.credits {
|
||||
font-style: italic ;
|
||||
font-size: smaller }
|
||||
|
||||
p.label {
|
||||
white-space: nowrap }
|
||||
|
||||
p.rubric {
|
||||
font-weight: bold ;
|
||||
font-size: larger ;
|
||||
color: maroon ;
|
||||
text-align: center }
|
||||
|
||||
p.sidebar-title {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold ;
|
||||
font-size: larger }
|
||||
|
||||
p.sidebar-subtitle {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
p.topic-title {
|
||||
font-weight: bold }
|
||||
|
||||
pre.address {
|
||||
margin-bottom: 0 ;
|
||||
margin-top: 0 ;
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
pre.line-block {
|
||||
font-family: serif ;
|
||||
font-size: 100% }
|
||||
|
||||
.literal {
|
||||
color: #333;
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
pre.literal-block, pre.doctest-block {
|
||||
margin-left: 2em ;
|
||||
margin-right: 2em ;
|
||||
padding: 1em;
|
||||
color: #333;
|
||||
background-color: #EEE;}
|
||||
|
||||
span.classifier {
|
||||
font-family: sans-serif ;
|
||||
font-style: oblique }
|
||||
|
||||
span.classifier-delimiter {
|
||||
font-family: sans-serif ;
|
||||
font-weight: bold }
|
||||
|
||||
span.interpreted {
|
||||
font-family: sans-serif }
|
||||
|
||||
span.option {
|
||||
white-space: nowrap }
|
||||
|
||||
span.option-argument {
|
||||
font-style: italic }
|
||||
|
||||
span.pre {
|
||||
white-space: pre }
|
||||
|
||||
span.problematic {
|
||||
color: red }
|
||||
|
||||
table {
|
||||
margin-top: 0.5em ;
|
||||
margin-bottom: 0.5em }
|
||||
|
||||
table.citation {
|
||||
border-left: solid thin gray }
|
||||
|
||||
table.docinfo {
|
||||
margin: 2em 4em }
|
||||
|
||||
table.footnote {
|
||||
border-left: solid thin black }
|
||||
|
||||
td, th {
|
||||
padding-left: 0.5em ;
|
||||
padding-right: 0.5em ;
|
||||
vertical-align: top }
|
||||
|
||||
th.docinfo-name, th.field-name {
|
||||
font-weight: bold ;
|
||||
text-align: left ;
|
||||
white-space: nowrap }
|
||||
|
||||
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
||||
font-size: 100% }
|
||||
|
||||
tt {
|
||||
background-color: #eeeeee
|
||||
}
|
||||
|
||||
ul.auto-toc {
|
||||
list-style-type: none }
|
||||
|
||||
.code-block {
|
||||
font-family: Courier New, Courier, monospace;
|
||||
font-size: 14px;
|
||||
margin: 0 2em;
|
||||
padding: 1em;
|
||||
color: #000;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #DDD;
|
||||
}
|
||||
|
||||
/* Python markup *********************************************/
|
||||
/*Python keyword*/
|
||||
.p_word {
|
||||
color: #036;
|
||||
}
|
||||
/*Python identifier*/
|
||||
.p_identifier {
|
||||
color: #36C;
|
||||
}
|
||||
/*Python number*/
|
||||
.p_number {
|
||||
color: #36C;
|
||||
}
|
||||
/*other text*/
|
||||
.p_default {
|
||||
color: #036;
|
||||
}
|
||||
/*Python operator*/
|
||||
.p_operator {
|
||||
color: #036;
|
||||
}
|
||||
/*Python comment*/
|
||||
.p_commentline {
|
||||
color: #036;
|
||||
}
|
||||
/*function name*/
|
||||
.p_defname {
|
||||
color: #F63;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*class name*/
|
||||
.p_classname {
|
||||
color: #F00;
|
||||
font-weight: bold;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_character {
|
||||
color: green;
|
||||
}
|
||||
/*string literals*/
|
||||
.p_string {
|
||||
color: green;
|
||||
}
|
||||
/*triple-quoted strings*/
|
||||
.p_triple {
|
||||
color: green;
|
||||
}
|
||||
|
||||
/* HTML markup *********************************************/
|
||||
/*an html tag*/
|
||||
.h_tag {
|
||||
color: #36C;
|
||||
}
|
||||
/*text in a tag*/
|
||||
.h_default {
|
||||
color: #036;
|
||||
}
|
||||
/*attribute name*/
|
||||
.h_attribute {
|
||||
color: #6C3;
|
||||
}
|
||||
/*a double-quoted attribute value*/
|
||||
.h_doublestring {
|
||||
color: green;
|
||||
}
|
||||
/*attribute equals sign, for example*/
|
||||
.h_other {
|
||||
color: #036;
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
buildhtml.py --embed --stylesheet default.css --output-encoding iso-8859-1 --prune .svn --prune api/ --prune pydoctor --prune data .
|
|
@ -1,3 +0,0 @@
|
|||
cd ../creole
|
||||
epydoc --html --no-private --output ../doc/api .
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
Templates créole
|
||||
================
|
||||
|
||||
comportement des templates
|
||||
--------------------------
|
||||
|
||||
Template_
|
||||
|
||||
.. _Template: api/creole.template.Template-class.html
|
||||
|
||||
validations
|
||||
-----------
|
||||
|
||||
Template.verify_
|
||||
|
||||
.. _Template.verify: api/creole.template.Template-class.html#verify
|
||||
|
||||
|
||||
|
||||
fichiers cibles
|
||||
fichiers modèle qui vont être instanciés au final (fichier destination)
|
||||
|
||||
|
||||
- le fichier source (templatisé) *doit* exister ainsi que le
|
||||
fichier de destination (le fichier de configuration effectif)
|
||||
portant le même nom :
|
||||
|
||||
- le fichier cible, c'est-à-dire le fichier de configuration
|
||||
instancié, doit être présent
|
||||
|
||||
|
||||
>>> import creole
|
||||
>>> from creole.template import Template
|
||||
|
||||
>>> try:
|
||||
... t = Template('nexistepas.txt', templatedir= '/tmp')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /tmp/nexistepas.txt n'existe pas
|
||||
>>>
|
||||
|
||||
.. note:: les deux vérifications (template source et fichier
|
||||
destination) sont faites en même temps
|
||||
|
||||
- le répertoire source *doit* exister
|
||||
|
||||
>>> try:
|
||||
... t = Template('/etc/group', templatedir= '/reperoire/qui/n/existe/pas')
|
||||
... t.verify()
|
||||
... except creole.error.FileNotFound, e:
|
||||
... print e
|
||||
...
|
||||
le fichier /reperoire/qui/n/existe/pas/group n'existe pas
|
||||
>>>
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
utilitaires techniques créole
|
||||
=============================
|
||||
|
||||
utilitaire de tests
|
||||
-------------------
|
||||
|
||||
|
||||
creolecat_
|
||||
|
||||
.. _creolecat: api/creole.creolecat-module.html
|
||||
|
||||
|
||||
un utilitaire de tests est mis à disposition pour ceux qui
|
||||
souhaitent tester leur fichiers de template sans pour autant lancer
|
||||
une instanciation:
|
||||
|
||||
usage::
|
||||
|
||||
creolecat.py -x <path>/eole.xml -o <path>/test.txt testtemplate.tmpl
|
||||
|
||||
testtemplate est le fichier à instancier
|
||||
|
||||
lancer l'option --help pour plus de détails
|
||||
|
||||
utilitaire de conversion
|
||||
------------------------
|
||||
|
||||
conversion dans l'ancien langage de templating (notations *[%*)
|
||||
|
||||
pour ceux qui avaient déjà commencé leur activités de templating pour
|
||||
créole 2 (donc avec une autre notation), un utilitaire de conversion
|
||||
est mis à disposition.
|
||||
Il est dans la lib python creole et s'appelle creole2cheetah_
|
||||
|
||||
.. _creole2cheetah: api/creole.creole2cheetah-module.html
|
||||
|
||||
usage :
|
||||
|
||||
cd creole
|
||||
[creole] ./creole2cheetah.py [nom du fichier source] > [nom du fichier destination]
|
||||
|
|
@ -1,201 +0,0 @@
|
|||
Variables créole
|
||||
================
|
||||
|
||||
typeole_
|
||||
|
||||
.. _typeole: api/creole.typeole-module.html
|
||||
|
||||
variable créole
|
||||
|
||||
instance d'un objet type eole, à un nom de variable correspond
|
||||
peut-être plusieurs valeurs
|
||||
|
||||
>>> from creole import typeole
|
||||
>>> var = typeole.EoleVar('mavariable')
|
||||
>>> var.val
|
||||
[]
|
||||
>>> var.set_value('valeur')
|
||||
>>> var.set_value('defaut', default=True)
|
||||
>>> var.val
|
||||
['valeur']
|
||||
>>> var.valdefault
|
||||
['defaut']
|
||||
>>> var.description = 'variable de test'
|
||||
>>> var.description
|
||||
'variable de test'
|
||||
>>>
|
||||
|
||||
il est possible de créer une variable Eole à l'aide
|
||||
d'une factory :
|
||||
|
||||
>>> var2 = typeole.type_factory('string', 'test_string', valeole=["eole"], valdefault=["def"])
|
||||
>>> var2.get_value()
|
||||
['def']
|
||||
>>>
|
||||
|
||||
des vérifications de type sont faites au moment du *set_value()*
|
||||
|
||||
collecte des variables créole
|
||||
-----------------------------
|
||||
|
||||
collecte
|
||||
|
||||
Récupération de variables qui serviront a la constitution du dictionnaire Eole
|
||||
|
||||
Les données du dictionnaire sont collectées à partir de différents fichiers dans un premier format XML.
|
||||
|
||||
sur une machine cible, une collecte des variables eole est faite avec parse_dico_::
|
||||
|
||||
from creole.parsedico import parse_dico
|
||||
parse_dico()
|
||||
|
||||
.. ce test n'est pas lancé car il peut y avoir un dico sur le poste
|
||||
de travail
|
||||
|
||||
.. _parse_dico: api/creole.parsedico-module.html
|
||||
|
||||
Le dictionnaire créole est vide. Pour le remplir, il faut
|
||||
récupérer des données depuis un fichier xml initial::
|
||||
|
||||
my_dict = EoleDict()
|
||||
my_dict.read(join('/etc/eole/','eole.xml'))
|
||||
|
||||
.. TODO: read_string(self, xml_string)
|
||||
|
||||
Utilisation du dictionnaire
|
||||
---------------------------
|
||||
|
||||
dictionnaire
|
||||
|
||||
fichier au format xml contenant :
|
||||
- une liste de fichiers
|
||||
- une liste de variables
|
||||
|
||||
famille
|
||||
|
||||
Il s'agit d'un regroupement de variable utilisé pour la saisie : on parle alors de famille de variables
|
||||
|
||||
groupe
|
||||
|
||||
Il s'agit de variables de type `liste` dont les éléments sont liées aux éléments correspondants des autres
|
||||
eth[2] aura un lien avec netmask[2] et network[2].
|
||||
|
||||
Plutôt que d'utiliser `parsedico`, construisons un dictionnaire creole EoleDict_ :
|
||||
|
||||
>>> from creole import cfgparser
|
||||
>>> from creole import typeole
|
||||
>>>
|
||||
>>> dico = cfgparser.EoleDict()
|
||||
>>> dico.variables['ip_eth'] = typeole.type_factory('string', 'ip_eth', val=['ip0', 'ip1', 'ip2'])
|
||||
>>> dico.variables['nom_etab'] = typeole.type_factory('string', 'nom_etab', val=['etab'])
|
||||
>>> dico.variables['vrai'] = typeole.type_factory('boolean', 'vrai', val=[True])
|
||||
>>> dico.variables['faux'] = typeole.type_factory('string', 'faux', val=['faux'])
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string', 'toto', val=['toto'])
|
||||
|
||||
voici comment accéder aux variables créole
|
||||
|
||||
>>> assert dico.get_value('ip_eth') == ['ip0', 'ip1', 'ip2']
|
||||
>>> assert dico.get_value('nom_etab') == ['etab']
|
||||
|
||||
.. _EoleDict : api/creole.cfgparser.EoleDict-class.html
|
||||
|
||||
|
||||
variables de template
|
||||
-----------------------
|
||||
|
||||
|
||||
lorsqu'on utilise un appel de bas niveau de traitement de template,
|
||||
c'est-à-dire l'appel direct à la
|
||||
méthode process_ d'un template, il faut vérifier qu'une variable
|
||||
est bien instanciée avec le bon contexte de dictionnaire :
|
||||
|
||||
.. _process: api/creole.template.Template-class.html
|
||||
|
||||
>>> from creole.cfgparser import EoleDict
|
||||
>>> from creole import typeole
|
||||
>>> from creole.template import Template
|
||||
>>> dico = EoleDict()
|
||||
>>> dico.variables['toto'] = typeole.type_factory('string',
|
||||
... 'toto', val=['toto'], context=dico)
|
||||
>>> t = Template('data/dst/test.tmpl', templatedir= 'data/src')
|
||||
>>> t.verify()
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' not in res
|
||||
>>> dico.set_value('toto', 'titi')
|
||||
>>> t.process(dico)
|
||||
>>> f = open('data/dst/test.tmpl')
|
||||
>>> res = f.read()
|
||||
>>> f.close()
|
||||
>>> assert 'titi' in res
|
||||
|
||||
le contexte `dico` est passé à la variable `toto`::
|
||||
|
||||
dico.variables['toto'] = typeole.type_factory('string',
|
||||
'toto', val=['toto'], context=dico)
|
||||
|
||||
variables automatiques
|
||||
----------------------
|
||||
|
||||
variable automatique
|
||||
|
||||
variable présente dans le dictionnaire xml mais pas dans le fichier *.ini* de configuration.
|
||||
la valeur de cette variable (sont appel à *.get_value()* est soumis à une fonction de traitement
|
||||
spécifiée dans le xml, qui calcule la variable au lieu de formater sa valeur.
|
||||
|
||||
Une variable automatique simple n'est pas traitée différemment d'une variable dont la valeur est présente dans le dictionnaire et qui est soumise à une condition de vérification de sa valeur. Simplement, aucune vérification n'est effectuée et la valeur est calculée.
|
||||
|
||||
déclaration de la variable::
|
||||
|
||||
<variable name='server_mem' type='string' description='memoire du serveur' auto='True' />
|
||||
|
||||
déclaration de la fonction de remplissage::
|
||||
|
||||
<fill name='server_mem' target='server_mem' />
|
||||
|
||||
deux fonctions strictement automatiques sont implémentées: `server_mem` et `kernel_version`
|
||||
|
||||
variable semi-automatique
|
||||
|
||||
variable remplit automatiquement dans le cas d'une condition sur une autre variable,
|
||||
si cette condition n'est pas remplie, c'est l'uilisateur qui la remplit (ou une autre fonction).
|
||||
la condition est traitée à deux niveaux, dans la fonction de remplissage, et au niveau de l'affichage.
|
||||
On remplit donc deux fonctions pour ce conditionnement (une fonction fill avec la variable
|
||||
conditionnante en paramètre et une fonction condition qui conditionne l'affichage de la variable.
|
||||
exemple : récupération des adresses eth dans le cas où l'on a pas de dhcp.
|
||||
|
||||
déclaration de la variable semi-auto::
|
||||
|
||||
<variable name='eth0' type='string' auto='True'/>
|
||||
|
||||
déclaration de la variable qui définit la condition::
|
||||
|
||||
<variable name='dhcp' type='boolean' description='Activation du dhcp' >
|
||||
<value>non</value>
|
||||
</variable>
|
||||
<check name='valid_enum' target='dhc'>
|
||||
<param>['oui','non']</param>
|
||||
</check>
|
||||
|
||||
déclaration de la fonction de contrôle d'automatisme, la variable eth0 est remplie automatiquement par la fonction
|
||||
*auto_eth* si le paramètre dhcp est égal à la condition *oui*::
|
||||
|
||||
<fill name='auto_eth' target='eth0'>
|
||||
<param>eth0</param>
|
||||
<param name='condition'>oui</param>
|
||||
<param type='eole' name='parametre'>dhcp</param>
|
||||
</fill>
|
||||
|
||||
déclaration de la fonction de contrôle d'éditabilité::
|
||||
|
||||
<condition name='hidden_if_in' source='dhc'>
|
||||
<param>oui</param>
|
||||
<target type='variable'>eth0</target>
|
||||
</condition>
|
||||
|
||||
pour l'instant sont diposnible auto_eth, auto_netmask, auto_broadcast et auto_network.
|
||||
|
||||
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
le fichier de configuration créole
|
||||
==================================
|
||||
|
||||
format xml
|
||||
----------
|
||||
|
||||
Pour plus de documentation sur le format xml du dictionnaire créole,
|
||||
se référer à la documentation l'éditeur xml créole ( *exc*)
|
|
@ -1,187 +0,0 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Auto command
|
||||
.\"
|
||||
.TH Maj-Auto 8 "2016 september" "Maj-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Maj-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from security updates and critical bugfixes, but not the latest improvements.
|
||||
.br
|
||||
To take advantage of feature additions to the current version of the server, use the \fBMaj-Release\fR.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fR command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Auto.8 2.5.2
|
|
@ -1,52 +0,0 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Maj-Release 8 "2015 december" "Maj-Release 2.5.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Maj-Release \- Automatic release update for EOLE servers
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Maj-Release
|
||||
command allows you to manually initiate the release update to the last stables releases.
|
||||
.br
|
||||
It is not an upgrade to a new version.
|
||||
.br
|
||||
You benefit from the latest improvements for the actual server version as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B Upgrade-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Release.8 2.5.0
|
|
@ -1,185 +0,0 @@
|
|||
.\"
|
||||
.\" Manual page for Query-Auto command
|
||||
.\"
|
||||
.TH Query-Auto 8 "2015 september" "Query-Auto 2.6.0" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Query-Auto \- Automatic update for EOLE servers
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Query-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
.B Query-Auto
|
||||
command allows you to manually initiate the update of all packages changed since the release of the latest stable version.
|
||||
.br
|
||||
You benefit from the latest improvements , updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH OPTIONS
|
||||
The following options are supported:
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force_update\fR
|
||||
update your server without any confirmation.
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
ask apt\-get to simulate packages installation
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
Envole repository server.
|
||||
.TP
|
||||
fB\-c\fR, \fB\-\-cdrom\fR
|
||||
use CDROM as source.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore local configuration if creoled not responding.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode, equivalent to -l info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode, equivalent to -l debug
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
Use testing packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use testing packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for Envole repository and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for EOLE and Envole repositories and keep stable ones for other repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Use development packages for all repositories:
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Query-Auto.8 2.5.2
|
|
@ -1,48 +0,0 @@
|
|||
.\"
|
||||
.\" Manual page for Maj-Release command
|
||||
.\"
|
||||
.TH Upgrade-Auto 8 "2015 december" "Version 2.4.2" "Ceole command - EOLE"
|
||||
|
||||
.SH NAME
|
||||
Upgrade-Auto \- EOLE distribution upgrade tool
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B Upgrade-Auto
|
||||
command allows you to manually initiate a module upgrade to the lastest stables versions.
|
||||
.br
|
||||
You benefit from the latest improvements as well as updates and security bugfixes.
|
||||
.br
|
||||
For it the apt-get source file must be set up properly and the network must operate (\fBdiagnose\fP command).
|
||||
.br
|
||||
Mirror address is :
|
||||
\fIhttp://eole.ac-dijon.fr/eole\fB/eole\fR : EOLE repository
|
||||
.RS 48
|
||||
.br
|
||||
\fB/ubuntu\fR : Ubuntu repository
|
||||
.br
|
||||
\fB/envole\fR : Envole repository
|
||||
.SH "SEE ALSO"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
To report a bug , check the following address \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTHORS"
|
||||
.PP
|
||||
.B EOLE team
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Upgrade-Auto.8 2.4.2
|
|
@ -1,73 +0,0 @@
|
|||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.4.
|
||||
.TH MAJ-AUTO "1" "October 2014" "Maj-Auto 2.4.1" "User Commands"
|
||||
.SH NAME
|
||||
Maj-Auto \- manual page for Maj-Auto 2.4.1
|
||||
.SH DESCRIPTION
|
||||
usage: Maj\-Auto|Query\-Auto [\-h] [\-c CONTAINER]
|
||||
.IP
|
||||
[\-l {debug,info,warning,error,critical}] [\-v] [\-d]
|
||||
[\-n] [\-f] [\-C | \fB\-D]\fR [\-r] [\-R] [\-\-download]
|
||||
[\-S EOLE_MIRROR] [\-U UBUNTU_MIRROR] [\-W]
|
||||
.PP
|
||||
Manage EOLE server automatic update
|
||||
.SS "optional arguments:"
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
run in dry\-run mode (force to True when using QueryAuto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
bypass Zephir authorizations.
|
||||
.TP
|
||||
\fB\-C\fR, \fB\-\-candidat\fR
|
||||
use testing packages.
|
||||
.TP
|
||||
\fB\-D\fR, \fB\-\-devel\fR
|
||||
use development packages.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
run reconfigure on successful upgrade.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
run reconfigure on successful upgrade and reboot if
|
||||
necessary (implies \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
only download packages in cache.
|
||||
.TP
|
||||
\fB\-S\fR EOLE_MIRROR, \fB\-\-eole\-mirror\fR EOLE_MIRROR
|
||||
EOLE repository server.
|
||||
.TP
|
||||
\fB\-U\fR UBUNTU_MIRROR, \fB\-\-ubuntu\-mirror\fR UBUNTU_MIRROR
|
||||
Ubuntu repository server.
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
specific output for EAD.
|
||||
.SS "container:"
|
||||
.TP
|
||||
\fB\-c\fR CONTAINER, \fB\-\-container\fR CONTAINER
|
||||
Name of LXC container
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Log level
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Verbose mode
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Debug mode
|
||||
.SH "SEE ALSO"
|
||||
The full documentation for
|
||||
.B Maj-Auto
|
||||
is maintained as a Texinfo manual. If the
|
||||
.B info
|
||||
and
|
||||
.B Maj-Auto
|
||||
programs are properly installed at your site, the command
|
||||
.IP
|
||||
.B info Maj-Auto
|
||||
.PP
|
||||
should give you access to the complete manual.
|
|
@ -1 +0,0 @@
|
|||
Upgrade-Auto.8
|
|
@ -1,64 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<family_action name="Tâches planifiées"
|
||||
description="Gestion des tâches planifiées"
|
||||
color="#8cd98c"
|
||||
image="icons/appointment-new.svg">
|
||||
<action type="form"
|
||||
title="Tâches planifiées"
|
||||
save="True"
|
||||
description="Paramétrer les tâches planifiées (heure, jour)"
|
||||
image="icons/x-office-calendar.svg">
|
||||
<input>Programmer</input>
|
||||
<profile>ead_admin</profile>
|
||||
<ewtapp>ead</ewtapp>
|
||||
<tag>maj</tag>
|
||||
<tag>schedule</tag>
|
||||
</action>
|
||||
</family_action>
|
||||
<variables>
|
||||
<family name="schedule" description="Heure et jour d'exécution des tâches planifiées">
|
||||
<variable description="Heure" name='hour' type='number' auto_save='True'/>
|
||||
<variable description="Minute" name='minute' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches hebdomadaires (1 : lundi)" name='weekday' type='number' auto_save='True'/>
|
||||
<variable description="Jour des tâches mensuelles la première semaine du mois (1 : lundi)" name='monthday' type='number' auto_save='True'/>
|
||||
</family>
|
||||
</variables>
|
||||
<constraints>
|
||||
<check name='valid_enum' target='schedule.schedule.weekday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.monthday'>
|
||||
<param>[1, 2, 3, 4, 5, 6, 7]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.hour'>
|
||||
<param>[1, 2, 3, 4, 5]</param>
|
||||
</check>
|
||||
<check name='valid_enum' target='schedule.schedule.minute'>
|
||||
<param type='python'>range(0, 60)</param>
|
||||
</check>
|
||||
<fill name="random_int" target='schedule.schedule.hour'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>5</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.minute'>
|
||||
<param type='number'>0</param>
|
||||
<param type='number'>59</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.weekday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
</fill>
|
||||
<fill name="random_int" target='schedule.schedule.monthday'>
|
||||
<param type='number'>1</param>
|
||||
<param type='number'>7</param>
|
||||
<param name='exclude' type='eole'>schedule.schedule.weekday</param>
|
||||
</fill>
|
||||
<check name='valid_differ' target='schedule.schedule.monthday'>
|
||||
<param type='eole'>schedule.schedule.weekday</param>
|
||||
</check>
|
||||
</constraints>
|
||||
<help/>
|
||||
</creole>
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='majauto'
|
||||
description="Mise à jour automatique">
|
||||
<variable name="description" type="string" hidden="True"><value>Mise à jour du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>weekly</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -1,13 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name='z_rebootauto'
|
||||
description="Redémarrage automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Redémarrage du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -1,13 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="z_shutdownauto"
|
||||
description="Arrêt automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Arrêt du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -1,13 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<creole>
|
||||
<variables>
|
||||
<family name="y_reconfigureauto"
|
||||
description="Reconfigure automatique"
|
||||
hidden="True">
|
||||
<variable name="description" type="string" hidden="True"><value>Reconfigure du serveur</value></variable>
|
||||
<variable name="day" type="schedule" description="Périodicité d'exécution"><value>none</value></variable>
|
||||
<variable name="mode" type="schedulemod" hidden="True"><value>post</value></variable>
|
||||
</family>
|
||||
</variables>
|
||||
</creole>
|
|
@ -1,3 +0,0 @@
|
|||
cron:
|
||||
eole.file:
|
||||
- name: /etc/cron.d/schedule
|
|
@ -1,3 +0,0 @@
|
|||
include:
|
||||
- schedule.cron
|
||||
- schedule.manage
|
|
@ -1,3 +0,0 @@
|
|||
schedule:
|
||||
cmd.run:
|
||||
- name: /usr/share/eole/sbin/manage_schedule --apply
|
|
@ -1,109 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleGet.
|
||||
.\"
|
||||
.TH CreoleGet 8 "04 Avril 2017" "Version 2.6.1" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleGet \- Récupération de la valeur d'une variable Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B VARIABLE
|
||||
[
|
||||
.B DEFAULT
|
||||
]
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --groups
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --list
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
[
|
||||
.B OPTIONS
|
||||
]
|
||||
.B --reload-eol
|
||||
|
||||
.br
|
||||
.B CreoleGet
|
||||
.B \-h
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleGet
|
||||
est un utilitaire très pratique pour récupérer la valeur d'une
|
||||
variable Creole
|
||||
|
||||
.SH ARGUMENTS
|
||||
.TP
|
||||
\fBVARIABLE\fP
|
||||
nom de la variable à lire
|
||||
.TP
|
||||
\fBDEFAULT\fP
|
||||
valeur à renvoyer en cas d'erreur (variable inconnue ou désactivée)
|
||||
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-d\fP
|
||||
active le mode de débogage
|
||||
.TP
|
||||
\fB-l\fP
|
||||
paramètrage du niveau de log (debug|info|warning|error|critical)
|
||||
.TP
|
||||
\fB-v\fP
|
||||
active le mode verbeux
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH ACTIONS
|
||||
.TP
|
||||
\fB--groups\fP
|
||||
liste les groupes de conteneurs
|
||||
|
||||
.TP
|
||||
\fB--list\fP
|
||||
liste l'ensemble des variables creole
|
||||
|
||||
.TP
|
||||
\fB--reload\fP
|
||||
recharge toute la configuration creole (dictionnaires et valeurs)
|
||||
|
||||
.TP
|
||||
\fB--reload-eol\fP
|
||||
recharge les valeurs de configuration creole
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleLint.
|
||||
.\"
|
||||
.TH CreoleLint 8 "11 octobre 2013" "Version 2.4" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleLint \- Outil de validation des dictionnaires et templates Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleLint
|
||||
[
|
||||
.B \-t TMPL DIR
|
||||
] [
|
||||
.B \-l info|warning|error
|
||||
] [
|
||||
.B \-n LINT_NAME
|
||||
] [
|
||||
.B \-d
|
||||
] [
|
||||
.B \-h
|
||||
]
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleLint
|
||||
est un utilitaire très pratique pour valider la syntaxe du dictionnaire et des templates. L'outil effectue une série de tests dans le but de détecter les erreurs les plus fréquentes.
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-t\fP
|
||||
répertoire des templates
|
||||
.TP
|
||||
\fB-l\fP
|
||||
niveau des messages (info, warning ou error)
|
||||
.TP
|
||||
\fB-n\fP
|
||||
n'exécuter qu'un lint
|
||||
.TP
|
||||
\fB-d\fP
|
||||
dico-only, ne lance le lint que sur les dictionnaires (et pas sur les templates, donc)
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
.\" Maj-Cd.8 1.0
|
|
@ -1,67 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleService.
|
||||
.\"
|
||||
.TH CreoleService 8 "4 septembre 2013" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleService \- Outil de gestion des services définis dans Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleService
|
||||
[
|
||||
.B \-c CONTAINER
|
||||
] [
|
||||
.B \-d
|
||||
] [
|
||||
.B \-l LEVEL
|
||||
] [
|
||||
.B \-v
|
||||
]
|
||||
.B SERVICE
|
||||
.B ACTION
|
||||
|
||||
.br
|
||||
.B CreoleService
|
||||
.B \-h
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleService
|
||||
est un utilitaire très pratique pour démarrer, arrêter et redémarrer les services définis dans Creole.
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-c\fP
|
||||
conteneur dans lequel se trouve le service
|
||||
.TP
|
||||
\fB-d\fP
|
||||
active le mode de débogage
|
||||
.TP
|
||||
\fB-l\fP
|
||||
paramètrage du niveau de log (debug|info|warning|error|critical)
|
||||
.TP
|
||||
\fB-v\fP
|
||||
active le mode verbeux
|
||||
.TP
|
||||
\fBservice\fP
|
||||
nom du service sur lequel effectuer l'action
|
||||
.TP
|
||||
\fBaction\fP
|
||||
action à réaliser sur le service (start|stop|restart|reload|status)
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
.\" Maj-Cd.8 1.0
|
|
@ -1,69 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande CreoleSet.
|
||||
.\"
|
||||
.TH CreoleSet 8 "18 novembre 2013" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
CreoleSet \- Modification de la valeur d'une variable Creole
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B CreoleSet
|
||||
[
|
||||
.B \-d
|
||||
] [
|
||||
.B \-l LEVEL
|
||||
] [
|
||||
.B \-v
|
||||
] [
|
||||
.B \-\-default
|
||||
]
|
||||
.B VARIABLE
|
||||
[
|
||||
.B VALUE
|
||||
]
|
||||
|
||||
.br
|
||||
.B CreoleSet
|
||||
.B \-h
|
||||
|
||||
.SH DESCRIPTION
|
||||
.B CreoleSet
|
||||
est un utilitaire très pratique pour modifier la valeur d'une variable Creole
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-d\fP
|
||||
active le mode de débogage
|
||||
.TP
|
||||
\fB-l\fP
|
||||
paramètrage du niveau de log (debug|info|warning|error|critical)
|
||||
.TP
|
||||
\fB-v\fP
|
||||
active le mode verbeux
|
||||
.TP
|
||||
\fB--default\fP
|
||||
réinitialiser la variable à sa valeur par défaut
|
||||
.TP
|
||||
\fBvariable\fP
|
||||
nom de la variable à modifier
|
||||
.TP
|
||||
\fBvalue\fP
|
||||
valeur à injecter
|
||||
.TP
|
||||
\fB-h\fP
|
||||
Affiche de l'aide
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://dev-eole.ac-dijon.fr/projects/creole\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B creole
|
||||
(8).
|
||||
.\" CreoleSet.8 1.0
|
|
@ -1,183 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande Maj-Auto.
|
||||
.\"
|
||||
.TH Maj-Auto 8 "septembre 2016" "Version 2.6.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Maj-Auto \- Mise à jour automatisée des modules EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Maj-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B Maj-Auto
|
||||
vous permet de lancer manuellement la mise à jour des paquets correctifs de la release actuelle du serveur.
|
||||
.br
|
||||
Vous profitez ainsi des mises à jour de sécurité et des corrections de bogues critiques, sans ajout de fonctionnalité.
|
||||
|
||||
.P
|
||||
Pour profiter des ajouts de fonctionnalité de la version actuelle du serveur, utilisez la commande \fBMaj-Release\fR.
|
||||
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées :
|
||||
.TP
|
||||
\fB-h\fR, \fB--help\fR
|
||||
afficher ce message d'aide et quitter.
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
exécuter en mode simulation (option forcée en
|
||||
invoquant Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
passer outre les autorisations Zéphir.
|
||||
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force\-update\fR
|
||||
mettre à jour le serveur sans confirmation
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
demande à apt\-get de simuler l'installation des paquets
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
utiliser les paquets candidats.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
utiliser les paquets en développement.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
exécuter reconfigure après une mise à jour réussie.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
exécuter reconfigure après une mise à jour réussie et
|
||||
redémarrer si nécessaire (implique l'option \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
procéder uniquement au téléchargement des paquets en
|
||||
cache.
|
||||
.TP
|
||||
\fB\-S\fR \fIEOLE_MIRROR\fR, \fB\-\-eole\-mirror\fR \fIEOLE_MIRROR\fR
|
||||
adresse des dépôts EOLE.
|
||||
.TP
|
||||
\fB\-U\fR \fIUBUNTU_MIRROR\fR, \fB\-\-ubuntu\-mirror\fR \fIUBUNTU_MIRROR\fR
|
||||
adresse des dépôts Ubuntu.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
adresse des dépôts Envole.
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-cdrom\fR
|
||||
utiliser une source de type CDROM
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
sortie formatée pour l'EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore la configuration locale si creoled ne répond pas.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Niveau de log
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Mode verbeux, équivalent à \fB\-l\fR info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Mode debug, équivalent à \fB\-l\fR debug
|
||||
.SH EXEMPLES
|
||||
.TP
|
||||
Pour utiliser le dépôt EOLE des paquets candidats et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt Envole des paquets candidats et les autres en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser les dépôts Envole et EOLE des paquets candidats :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser tous les dépôts de paquets candidats :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt EOLE des paquets de développement et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt Envole des paquets de développement et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser les dépôts Envole et EOLE des paquets de développement :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser tous les dépôts de paquets de développement :
|
||||
.RS
|
||||
.nf
|
||||
\fBMaj-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "VOIR AUSSI"
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Auto.8 2.5.2
|
|
@ -1,55 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande Maj-Cd.
|
||||
.\"
|
||||
.TH Maj-Cd 8 "03 novembre 2009" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Maj-Cd \- Mise à jour des modules EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B Maj-Cd
|
||||
[
|
||||
.B \-s
|
||||
]
|
||||
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B Maj-Cd
|
||||
vous permet de lancer manuellement la mise à jour de tous les paquets modifiés depuis la sortie de la dernière version stable.
|
||||
.br
|
||||
Vous profitez ainsi des dernières améliorations, des mises à jour de sécurité et des corrections de bogues.
|
||||
.br
|
||||
Pour cela le fichier des sources de apt-get doit être correctement configuré et le réseau fonctionnel (commande \fBdiagnose\fP).
|
||||
.br
|
||||
L'adresse du dépôt est \fIhttp://eoleng.ac-dijon.fr/eoleng\fP
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées:
|
||||
.TP
|
||||
\fB-s\fP
|
||||
Equivalent à \fBapt-get -s\fP (simulation).
|
||||
|
||||
.SH FICHIER
|
||||
.PP
|
||||
\fI/etc/apt/sources\&.list\fR
|
||||
.RS 4
|
||||
Emplacements où aller chercher les paquets\&. Élément de configuration\ \&:
|
||||
Dir::Etc::SourceList\&.
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://eole.orion.education.fr/signalement/\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.\" Maj-Cd.8 1.0
|
|
@ -1,64 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande Maj-Release.
|
||||
.\"
|
||||
.TH Maj-Release 8 "Décembre 2015" "Version 2.5.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Maj-Release \- Mise à jour automatisée de la version mineure des modules EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B Upgrade-Auto
|
||||
[\fB-h\fR]
|
||||
[\fB\-\-release\fR \fIRELEASE\fR]
|
||||
[\fB\-\-force\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B Maj-Release
|
||||
vous permet de lancer manuellement la mise à jour vers une version mineure plus récente.
|
||||
.br
|
||||
Il ne s'agit pas d'un changement de version majeure.
|
||||
|
||||
.P
|
||||
Vous profitez ainsi des dernières améliorations de la version actuelle
|
||||
du serveur ainsi que des mises à jour de sécurité et des corrections
|
||||
de bogues.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-h | \-\-help\fR
|
||||
afficher un message d’aide et quitter.
|
||||
|
||||
.TP
|
||||
\fB\-\-release\fR
|
||||
numéro de version cible
|
||||
|
||||
.TP
|
||||
\fB\-\-force\fr
|
||||
ne pas demander de confirmation.
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B Upgrade-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Maj-Release.8 2.5.0
|
|
@ -1,188 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande Query-Auto.
|
||||
.\"
|
||||
.TH Query-Auto 8 "septembre 2016" "Version 2.6.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Query-Auto \- Mise à jour automatisée des modules EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Query-Auto
|
||||
.OP \-l {debug,info,warning,error,critical}
|
||||
.OP \-v
|
||||
.OP \-d
|
||||
.OP \-h]
|
||||
.OP \-n
|
||||
.OP \-f
|
||||
.OP \-F
|
||||
.OP \-s
|
||||
.OP \-C\ |\ \-D [{eole,envole}\ [{eole,envole}\ ...]]
|
||||
.OP \-r
|
||||
.OP \-R
|
||||
.OP \-\-download
|
||||
.OP \-S \fIEOLE_MIRROR\fR
|
||||
.OP \-U \fIUBUNTU_MIRROR\fR
|
||||
.OP \-V \fIENVOLE_MIRROR\fR
|
||||
.OP \-c
|
||||
.OP \-W
|
||||
.OP \-i
|
||||
.YS
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B Query-Auto
|
||||
vous permet de lancer manuellement la mise à jour de tous les paquets modifiés depuis la sortie de la dernière version stable.
|
||||
.br
|
||||
Vous profitez ainsi des dernières améliorations, des mises à jour de sécurité et des corrections de bogues.
|
||||
.br
|
||||
Pour cela le fichier des sources de apt-get doit être correctement configuré et le réseau fonctionnel (commande \fBdiagnose\fP).
|
||||
.br
|
||||
L'adresse du miroir EOLE est :
|
||||
\fIhttp://eole.ac-dijon.fr\fP\fB/eole\fR : dépôt EOLE
|
||||
.RS 54
|
||||
.br
|
||||
\fB/ubuntu\fR : dépôt Ubuntu
|
||||
.br
|
||||
\fB/envole\fR : dépôt Envole
|
||||
.SH OPTIONS
|
||||
Les options suivantes sont supportées :
|
||||
.TP
|
||||
\fB-h\fR, \fB--help\fR
|
||||
afficher ce message d'aide et quitter.
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||
exécuter en mode simulation (option forcée en
|
||||
invoquant Query\-Auto).
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
passer outre les autorisations Zéphir.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-force\-update\fR
|
||||
mettre à jour le serveur sans confirmation
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-simulate\fR
|
||||
demande à apt\-get de simuler l'installation des paquets
|
||||
.TP
|
||||
\fB\-C\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-candidat\fR [{eole,envole} [{eole,envole} ...]]
|
||||
utiliser les paquets candidats.
|
||||
.TP
|
||||
\fB\-D\fR [{eole,envole} [{eole,envole} ...]], \fB\-\-devel\fR [{eole,envole} [{eole,envole} ...]]
|
||||
utiliser les paquets en développement.
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reconfigure\fR
|
||||
exécuter reconfigure après une mise à jour réussie.
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-reboot\fR
|
||||
exécuter reconfigure après une mise à jour réussie et
|
||||
redémarrer si nécessaire (implique l'option \fB\-r\fR).
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
procéder uniquement au téléchargement des paquets en
|
||||
cache.
|
||||
.TP
|
||||
\fB\-S\fR \fIEOLE_MIRROR\fR, \fB\-\-eole\-mirror\fR \fIEOLE_MIRROR\fR
|
||||
adresse des dépôts EOLE.
|
||||
.TP
|
||||
\fB\-U\fR \fIUBUNTU_MIRROR\fR, \fB\-\-ubuntu\-mirror\fR \fIUBUNTU_MIRROR\fR
|
||||
adresse des dépôts Ubuntu.
|
||||
.TP
|
||||
\fB\-V\fR \fIENVOLE_MIRROR\fR, \fB\-\-envole\-mirror\fR \fIENVOLE_MIRROR\fR
|
||||
adresse des dépôts Envole.
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-cdrom\fR
|
||||
utiliser une source de type CDROM
|
||||
.TP
|
||||
\fB\-W\fR
|
||||
sortie formatée pour l'EAD.
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\fR
|
||||
ignore la configuration locale si creoled ne répond pas.
|
||||
.SS "logging:"
|
||||
.TP
|
||||
\fB\-l\fR {debug,info,warning,error,critical}, \fB\-\-log\-level\fR {debug,info,warning,error,critical}
|
||||
Niveau de log
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Mode verbeux, équivalent à \fB\-l\fR info
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-debug\fR
|
||||
Mode debug, équivalent à \fB\-l\fR debug
|
||||
.SH EXEMPLES
|
||||
.TP
|
||||
Pour utiliser le dépôt EOLE des paquets candidats et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt Envole des paquets candidats et les autres en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser les dépôts Envole et EOLE des paquets candidats :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser tous les dépôts de paquets candidats :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -C\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt EOLE des paquets de développement et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser le dépôt Envole des paquets de développement et conserver les autres dépôts en stable :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser les dépôts Envole et EOLE des paquets de développement :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D eole envole\fP
|
||||
.fi
|
||||
.RE
|
||||
.TP
|
||||
Pour utiliser tous les dépôts de paquets de développement :
|
||||
.RS
|
||||
.nf
|
||||
\fBQuery-Auto -D\fP
|
||||
.fi
|
||||
.RE
|
||||
.SH "VOIR AUSSI"
|
||||
.B Query-Cd
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B apt-eole
|
||||
(8),
|
||||
.B apt-get
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Query-Auto.8 2.5.2
|
|
@ -1,110 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande Maj-Release.
|
||||
.\"
|
||||
.TH Upgrade-Auto 8 "Décembre 2015" "Version 2.4.2" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Upgrade-Auto \- Outil de montée de version majeure de la distribution EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.SY Upgrade-Auto
|
||||
.OP \fB-h\fR
|
||||
.OP \fB\-\-release\fR \fIRELEASE\fR
|
||||
.OP \fB\-\-limit-rate\fR
|
||||
.OP \fB\-\-download\fR
|
||||
.OP \fB\-\-iso\fR \fIPATH\fR | \fB\-\-cdrom\fR
|
||||
.OP \fB\-\-force\fR
|
||||
.YS
|
||||
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B Upgrade-Auto
|
||||
vous permet de lancer manuellement la migration d'un module vers les
|
||||
dernières versions stables.
|
||||
|
||||
.P
|
||||
Vous profitez ainsi des dernières améliorations ainsi que des mises à
|
||||
jour de sécurité et des corrections de bogues.
|
||||
|
||||
.P
|
||||
Pour cela le serveur doit être à jour dans la version courante
|
||||
(\fBMaj-Auto\fR) et le réseau fonctionnel (commande \fBdiagnose\fP).
|
||||
|
||||
.P
|
||||
La procédure de mise à jour est la suivante :
|
||||
.nr step 1 1
|
||||
.IP \n[step] 2
|
||||
demande le numéro de version cible et confirmation à l’utilisateur
|
||||
|
||||
.IP \n+[step]
|
||||
télécharge les fichiers \fISHA256SUMS\fR et \fISHA256SUMS.gpg\fR
|
||||
|
||||
.IP \n+[step]
|
||||
vérifie la signature GPG du fichier \fISHA256SUMS\fR
|
||||
|
||||
.IP \n+[step]
|
||||
télécharge l’image ISO
|
||||
|
||||
.IP \n+[step]
|
||||
valide la somme de contrôle de l’image ISO
|
||||
|
||||
.IP \n+[step]
|
||||
copie des paquets \fI.deb\fR de l’image ISO dans le cache APT ou
|
||||
APT-CACHER-NG s’il est installé.
|
||||
|
||||
.IP \n+[step]
|
||||
exécute la procédure de mise à jour en réseau.
|
||||
|
||||
.P
|
||||
Vous pouvez limiter la taille du téléchargement en utilisant une image
|
||||
ISO stockée sur une clef USB ou un cédérom. Dans ce cas, seul les
|
||||
paquets plus récents que ceux présents sur l’image ISO seront
|
||||
téléchargés.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-h | \-\-help\fR
|
||||
afficher un message d’aide et quitter.
|
||||
|
||||
.TP
|
||||
\fB\-\-release\fR
|
||||
numéro de version cible
|
||||
|
||||
.TP
|
||||
\fB\-\-download\fR
|
||||
ne procéder qu’au téléchargement de l’image ISO de la version
|
||||
cible. La migration n’est pas effectuée.
|
||||
|
||||
.TP
|
||||
\fB\-\-limit\-rate\fR \fIBANDWIDTH\fR
|
||||
limite de bande passante à utiliser pour le téléchargement. Cette
|
||||
option est passée directement à wget. La valeur « 0 » désactive la
|
||||
limitation. Par défaut \fI120k\fR.
|
||||
|
||||
.TP
|
||||
\fB\-\-iso\fR \fIPATH\fR
|
||||
chemin vers l’image ISO. Cette option copie l’image ISO indiquée.
|
||||
|
||||
.TP
|
||||
\fB\-\-cdrom\fR
|
||||
copie l’image ISO depuis un lecteur cédérom.
|
||||
|
||||
.TP
|
||||
\fB\-f | \-\-force\fr
|
||||
ne pas demander de confirmation.
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B Maj-Auto
|
||||
(8),
|
||||
.B diagnose
|
||||
(8),
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttps://dev-eole.ac-dijon.fr/projects/eole/wiki/Accueil\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
.\" Upgrade-Auto.8 2.4.2
|
|
@ -1,69 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de Creole.
|
||||
.\"
|
||||
.TH Creole 8 "03 novembre 2009" "Version 1.0" "Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
Creole
|
||||
|
||||
.SH DESCRIPTION
|
||||
Creole est un ensemble de programmes, permettant la configuration et la mise à jour des serveurs.
|
||||
Ce mécanisme automatisé inclut une procédure de patch ainsi que la possibilité de rajouter des spécifications :
|
||||
|
||||
* ajout de variable
|
||||
* modification de fichier templatisé
|
||||
* templatisation de nouveau fichier
|
||||
|
||||
Les modules EOLE sont livrés avec un ensemble de fichiers de configuration placés dans \fI/usr/share/eole/creole/distrib\fP et copiés vers leur emplacement de destination à chaque \fBinstance\fP/\fBreconfigure\fP (Ex.:\fI/usr/share/eole/creole/distrib/my.cnf\fP vers \fI/etc/mysql/my.cnf\fP).
|
||||
.PP
|
||||
Le compte rendu d'exécution des commandes Creole est dans le fichier \fI/var/log/creole.log\fR
|
||||
|
||||
.SH "LISTE DES COMMANDES"
|
||||
.TP
|
||||
\fBMaj-Auto\fP
|
||||
Lance les mises à jour du module à partir des dépôts.
|
||||
.TP
|
||||
\fBMaj-Cd\fP
|
||||
Lance les mises à jour du module à partir d'un CD (pratique pour les connexion en bas débit).
|
||||
.TP
|
||||
\fBQuery-Auto\fP
|
||||
Donne la liste des paquets disponibles sur les dépôts sans les installer.
|
||||
.TP
|
||||
\fBinstance\fP
|
||||
Lance l'instanciation du module après l'avoir configuré avec \fBgen_config\fP.
|
||||
.TP
|
||||
\fBgen_config\fP
|
||||
Application graphique et textuelle permettant de configurer et de reconfigurer votre module.
|
||||
.TP
|
||||
\fBreconfigure\fP
|
||||
Applique les changements de configuration du module effectués dans \fBgen_config\fP.
|
||||
.TP
|
||||
\fBdiagnose\fP
|
||||
Outil de diagnostic du réseau et de l'état des services.
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://eole.orion.education.fr/signalement/\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B instance
|
||||
(8),
|
||||
.B Maj-auto
|
||||
(8),
|
||||
.B Maj-Cd
|
||||
(8),
|
||||
.B reconfigure
|
||||
(8),
|
||||
.B gen_config
|
||||
(8),
|
||||
.B Query-Auto
|
||||
(8),
|
||||
.B diagnose
|
||||
(8).
|
||||
.\" Creole.8 1.0
|
|
@ -1,46 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande diagnose.
|
||||
.\"
|
||||
.TH diagnose 8 "03 novembre 2009" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
diagnose \- Outil de diagnostic de la configuration d'un module EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B diagnose
|
||||
[
|
||||
.B \-L
|
||||
]
|
||||
|
||||
.SH DESCRIPTION
|
||||
La commande
|
||||
.B diagnose
|
||||
permet de valider que la configuration injectée via les étapes d'adaptation et d'instanciation est correcte et fonctionnelle.
|
||||
.br
|
||||
Elle permet de tester rapidement l'état du réseau et des différents services de votre module EOLE.
|
||||
.br
|
||||
Si des erreurs apparaissent, il faut revoir le fichier de configuration \fI/etc/eole/config.eol\fR, et reconfigurer le serveur.
|
||||
.SH OPTIONS
|
||||
L'option suivante est supportée:
|
||||
.TP
|
||||
\fB-L\fP
|
||||
Active le mode étendu qui permet d'obtenir plus d'informations (détail des disques, état des mises à jour, dernières actions réalisées sur le serveur).
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://eole.orion.education.fr/signalement/\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B gen_config
|
||||
(8),
|
||||
.B reconfigure
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.\" diagnose.8 1.0
|
|
@ -1,77 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande instance.
|
||||
.\"
|
||||
.TH instance 8 "03 novembre 2009" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
instance \- Outil d'instanciation d'un module EOLE
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B instance
|
||||
[\fInomDeFichier.eol\fP]
|
||||
|
||||
.SH DESCRIPTION
|
||||
Les modules EOLE sont livrés avec un ensemble de templates.
|
||||
.br
|
||||
L'instanciation consiste à remplacer les variables par les valeurs renseignées dans le fichier \fI/root/zephir.eol\fR et
|
||||
à copier les fichiers vers leur emplacement cible.
|
||||
.br Vous devez donc d'abord renseigner le fichier zephir.eol à l'aide de la commande \fBgen_config\fP.
|
||||
|
||||
.B Attention !
|
||||
.br
|
||||
Cette opération n'est à réaliser qu'une seule fois sur un module après son installation.
|
||||
.br
|
||||
Pourquoi n'utiliser instance qu'une seule fois:
|
||||
|
||||
* instance permet de le re-générer ce qui efface tous les comptes utilisateurs et les stations intégrés au domaine.
|
||||
* les scripts pre/post instance/reconf sont différents
|
||||
* risque de désynchronisation du SID
|
||||
* valeurs par défaut non mises à jour
|
||||
* reconfigure est automatique, il ne pose pas de question
|
||||
|
||||
.br
|
||||
Si des modifications sont apportées par la suite par \fBgen_config\fP dans le fichier de configuration il faut utiliser la commande \fBreconfigure\fP et non la commande \fBinstance\fP.
|
||||
|
||||
.br
|
||||
Voici les étapes gérées par la commande \fBinstance\fP:
|
||||
|
||||
* initialise les mots de passe root, scribe et admin
|
||||
* génère un nouveau SID
|
||||
* génère l'annuaire et les bases MySql si inexistants
|
||||
* lance les scripts pre/postinstance
|
||||
* programme une mise à jour automatique hebdomadaire
|
||||
* copie, patch et renseigne les templates
|
||||
* (re)lance les services
|
||||
|
||||
.br
|
||||
Le compte rendu d'exécution est dans le fichier \fI/var/log/creole.log\fR
|
||||
|
||||
.br
|
||||
Une fois l'instanciation faite le fichier \fInomDeFichier.eol\fP peut être supprimé car c'est le fichier /etc/eole/config.eol qui servira lors des \fBreconfigure\fP.
|
||||
|
||||
.SH OPTION
|
||||
L'argument suivant est supporté:
|
||||
.TP
|
||||
\fBnomDeFichier.eol\fP
|
||||
spécifier le chemin du \fIfichier.eol\fR nécessaire à l'instanciation. Par defaut le fichier utilisé est \fI/root/zephir.eol\fR
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://eole.orion.education.fr/signalement/\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B diagnose
|
||||
(8),
|
||||
.B gen_config
|
||||
(8),
|
||||
.B reconfigure
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.\" instance.8 1.0
|
|
@ -1 +0,0 @@
|
|||
Maj-Auto.8
|
|
@ -1 +0,0 @@
|
|||
Maj-Cd.8
|
|
@ -1 +0,0 @@
|
|||
Query-Auto.8
|
|
@ -1,68 +0,0 @@
|
|||
.\"
|
||||
.\" Page de manuel de la commande reconfigure.
|
||||
.\"
|
||||
.TH reconfigure 8 "03 novembre 2009" "Version 1.0" "Commande Creole - EOLE"
|
||||
|
||||
.SH NOM
|
||||
reconfigure \- Ré-applique la configuration (après une mise à jour par exemple) ou des modifications de configuration (changement d'IP par ex.).
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B reconfigure
|
||||
[\fB-f\fP]
|
||||
[\fB-a\fP]
|
||||
[\fB-i\fP]
|
||||
|
||||
.SH DESCRIPTION
|
||||
La commande \fBreconfigure\fP sert à ré-appliquer la configuration sur une machine déjà configurée (instanciée).
|
||||
.br
|
||||
Elle est également utilisée lors de l'application de patch. \fBreconfigure\fP lit le fichier \fI/etc/eole/config.eol\fP.
|
||||
.br
|
||||
\fBreconfigure\fP doit être lancé après chaque modification effectuée par \fBgen_config\fP afin d'appliquer les changements.
|
||||
|
||||
Voici les étapes gérées par la commande \fBreconfigure\fP:
|
||||
|
||||
* ré-applique le SID trouvé dans l'annuaire
|
||||
* installe/supprime des paquets (utilisé pour les noyaux)
|
||||
* exécute les scripts pre/postreconf
|
||||
* met à jour les valeurs par défaut des dictionnaires
|
||||
* recrée "admin" s'il n'a pas été trouvé
|
||||
* copie, patch et renseigne les templates
|
||||
* contrôle la version du noyau en fonctionnement et demande un redémarrage si ce n'est pas la dernière version (reboot auto si Maj par EAD)
|
||||
* relance les services
|
||||
|
||||
Lors d'une mise à jour via l'EAD, \fBreconfigure\fP est lancé automatiquement.
|
||||
|
||||
Si la mise à jour a été effectuée sur la console ou via SSH avec la commande \fBMaj-Auto\fP un message orange indique s'il est nécessaire de lancer \fBreconfigure\fP.
|
||||
|
||||
.SH OPTIONS
|
||||
Les arguments suivants sont supportés:
|
||||
.TP
|
||||
\fB-f\fP
|
||||
outrepasser le blocage par Zéphir
|
||||
.TP
|
||||
\fB-a\fP
|
||||
redémarrage automatique si nécessaire
|
||||
.TP
|
||||
\fB-i\fP
|
||||
lancer le processus en mode interactif
|
||||
|
||||
.SH "BOGUES"
|
||||
.PP
|
||||
Pour signaler un bogue, consultez l'adresse suivante \fIhttp://eole.orion.education.fr/signalement/\fR
|
||||
|
||||
.SH "AUTEURS"
|
||||
.PP
|
||||
.B Equipe EOLE
|
||||
.br
|
||||
\fIhttp://eole.orion.education.fr\fP
|
||||
|
||||
.SH "VOIR AUSSI"
|
||||
.B instance
|
||||
(8),
|
||||
.B Maj-auto
|
||||
(8),
|
||||
.B gen_config
|
||||
(8),
|
||||
.B creole
|
||||
(8).
|
||||
.\" reconfigure.8 1.0
|
|
@ -1 +0,0 @@
|
|||
Upgrade-Auto.8
|
|
@ -1,12 +0,0 @@
|
|||
[Unit]
|
||||
Description=EOLE Creole server
|
||||
After=local-fs.target network.target
|
||||
|
||||
[Service]
|
||||
Type=notify
|
||||
EnvironmentFile=-/etc/default/locale
|
||||
ExecStart=/usr/sbin/creoled --pidfile /run/creoled.pid
|
||||
PIDFile=/run/creoled.pid
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,344 +0,0 @@
|
|||
. /usr/lib/eole/ihm.sh
|
||||
|
||||
len_pf=26
|
||||
|
||||
Inactif() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
EchoOrange "Désactivé"
|
||||
}
|
||||
|
||||
NoConfig() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
EchoOrange "Non configuré"
|
||||
}
|
||||
|
||||
TestIP() {
|
||||
len=$((len_pf-7))
|
||||
printf "%-9s %${len}s => " "$2:" "$1"
|
||||
/usr/bin/fping -t50 -r2 $1 > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
}
|
||||
|
||||
TestIP2() {
|
||||
# présentation normale ;)
|
||||
printf ". %${len_pf}s => " "$2"
|
||||
/usr/bin/fping -t50 -r2 $1 > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
}
|
||||
|
||||
TestARP() {
|
||||
if [ -z "$3" ]; then
|
||||
# recherche de l'interface reseau + test
|
||||
interface=`ip route get $1 2> /dev/null|sed -ne '/ via /! s/^.* dev \([^ ]\+\) \+src \+.*$/\\1/p;Q'`
|
||||
[ "$interface" = "" ] && EchoRouge " Erreur interne impossible de determiner l'interface"
|
||||
/sbin/ifconfig $interface > /dev/null 2>&1
|
||||
[ ! $? = 0 ] && EchoRouge " Erreur interne impossible de determiner l'interface"
|
||||
else
|
||||
interface=$3
|
||||
fi
|
||||
|
||||
# utilisation d'arping
|
||||
printf ". %${len_pf}s => " "$2"
|
||||
/usr/bin/arping -w 1 -c 1 -f $1 -I $interface > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
}
|
||||
|
||||
TestService() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
CMD="/usr/bin/tcpcheck 2 $2 2> /dev/null | grep -q \" alive\""
|
||||
if [ ! "$3" = "" ]; then
|
||||
CreoleRun "$CMD" "$3"
|
||||
ret=$?
|
||||
else
|
||||
CreoleRun "$CMD"
|
||||
ret=$?
|
||||
fi
|
||||
if [ $ret -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
TestUDP() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
CMD="netstat -unl | grep -q \":$2 \""
|
||||
if [ ! "$3" = "" ]; then
|
||||
CreoleRun "$CMD" "$3"
|
||||
ret=$?
|
||||
else
|
||||
CreoleRun "$CMD"
|
||||
ret=$?
|
||||
fi
|
||||
if [ $ret -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
TestPid() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
pidof "$2" > /dev/null
|
||||
if [ $? -eq 0 ];then
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
TestDns() {
|
||||
printf ". %${len_pf}s => " "DNS $1"
|
||||
/usr/bin/host -W2 -tA $2 $1 > /dev/null 2>&1
|
||||
if [ $? -eq 0 ];then
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
TestNTP() {
|
||||
printf ". %${len_pf}s => " "Statut NTP"
|
||||
/usr/bin/ntpstat > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
EchoVert "Synchronisé"
|
||||
elif [ $? -eq 1 ]
|
||||
then
|
||||
EchoRouge "Désynchronisé"
|
||||
for ntp in $1;do
|
||||
printf ". %${len_pf}s => " "NTP $ntp"
|
||||
/usr/sbin/ntpdate -q $ntp > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
EchoVert "Ok"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
done
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
}
|
||||
|
||||
TestHTTPPage() {
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
/usr/bin/httping -g $2 -c 1 > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
EchoVert "Ok"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
}
|
||||
|
||||
TestWeb() {
|
||||
WGET_OPTIONS="-t2 --connect-timeout=3 --delete-after"
|
||||
WARN_MSG=""
|
||||
PROXIES_TESTS="PROXY:"
|
||||
if [ "$(CreoleGet activer_proxy_client)" == "oui" ]
|
||||
then
|
||||
PROXIES_TESTS="PROXY:http://$(CreoleGet proxy_client_adresse):$(CreoleGet proxy_client_port)"
|
||||
fi
|
||||
|
||||
for PROXY_LOOP in $PROXIES_TESTS
|
||||
do
|
||||
PROXY=$(echo $PROXY_LOOP | sed -e 's/^PROXY://')
|
||||
export http_proxy=$PROXY
|
||||
if [ -n "$PROXY" ]
|
||||
then
|
||||
Proxy=` echo $http_proxy | sed -e 's!http://!!' `
|
||||
TestService "Serveur Proxy" $Proxy
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
#EchoRouge "Erreur"
|
||||
#return 1
|
||||
export http_proxy=
|
||||
fi
|
||||
fi
|
||||
# Test 1er site
|
||||
wget -q $WGET_OPTIONS $2
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
fi
|
||||
if [ "$3" != "" ]
|
||||
then
|
||||
# Test second site
|
||||
msg=$(wget -nv $WGET_OPTIONS $3 2>&1)
|
||||
ret=$?
|
||||
if [ $ret -eq 0 ]
|
||||
then
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
EchoVert "Ok"
|
||||
return 0
|
||||
elif [ $ret -eq 8 ]
|
||||
then
|
||||
WARN_MSG="$msg"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
printf ". %${len_pf}s => " "$1"
|
||||
if [ -n "$WARN_MSG" ]
|
||||
then
|
||||
EchoOrange "$WARN_MSG"
|
||||
else
|
||||
EchoRouge "Erreur"
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
TestCerts() {
|
||||
CERTFILE=$1
|
||||
|
||||
# CODEERROR and MESSAGE not used now
|
||||
# ref #22341
|
||||
#CODEERROR=$2
|
||||
#man verify for CODEERROR
|
||||
#MESSAGE=$3
|
||||
#
|
||||
|
||||
if [ -z "$4" ]; then
|
||||
echo "$CERTFILE" | grep -q '^/etc/ipsec.d/'
|
||||
[ $? = 0 ] && CAFILE=/etc/ipsec.d/cacerts/CertifCa.pem || CAFILE=/etc/ssl/certs/ca.crt
|
||||
else
|
||||
# Fichier de CA spécifié, on désactive l'utilisation
|
||||
# des autres autorités installées sur le système.
|
||||
CAFILE=$4
|
||||
fi
|
||||
FAKE_CAPATH="/usr/lib/ssl/no_certs"
|
||||
TMPFILE=$(mktemp)
|
||||
mkdir -p "$FAKE_CAPATH"
|
||||
if [[ -d ${CAFILE} ]]
|
||||
then
|
||||
cat ${CAFILE}/* > ${TMPFILE}
|
||||
CAFILE=${TMPFILE}
|
||||
fi
|
||||
ssl_cmd="/usr/bin/openssl verify -CAfile $CAFILE -CApath $FAKE_CAPATH -purpose any $CERTFILE"
|
||||
printf ". %${len_pf}s => " `basename $CERTFILE`
|
||||
if [ -e $CAFILE ]; then
|
||||
if [ -e $CERTFILE ]; then
|
||||
if [ ! -s $CAFILE ] || [ ! -s $CERTFILE ]; then
|
||||
EchoRouge "fichier vide"
|
||||
else
|
||||
sslmsg="`$ssl_cmd 2>&1`"
|
||||
ERR_MSG=$(printf "$sslmsg" |grep "^error [0-9]* at ")
|
||||
RETCODE=$?
|
||||
# supression du répertoire temporaire
|
||||
rm -rf "$FAKE_CAPATH"
|
||||
if [ $RETCODE -eq 0 ]; then
|
||||
#EchoRouge "Erreur openssl"
|
||||
ERR_MSG=$(cut -d':' -f2 <<< $ERR_MSG)
|
||||
EchoRouge "Erreur : $ERR_MSG"
|
||||
return 1
|
||||
else
|
||||
EchoVert "Ok"
|
||||
printf ". %${len_pf}s => " "DNS reconnus"
|
||||
openssl x509 -in $CERTFILE -noout -text \
|
||||
| sed -n -e '/X509v3 Subject Alternative Name/{n;p;}' \
|
||||
| sed -e 's/^ *//' -e 's/DNS://g' -e 's/,//g' \
|
||||
-e 's/IP Address:[0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+//g'
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
else
|
||||
EchoRouge "$CERTFILE inexistant"
|
||||
fi
|
||||
else
|
||||
EchoRouge "$CAFILE inexistant"
|
||||
fi
|
||||
}
|
||||
|
||||
TestConnexion() {
|
||||
connexions=( `/bin/netstat -a --$2 -p -n | grep $1 | grep $3` )
|
||||
retour=1
|
||||
if [ "$2" = "tcp" ]; then
|
||||
sens=$4
|
||||
|
||||
if [ "$sens" = "reception" ]; then
|
||||
pos=3
|
||||
incr=2
|
||||
cond='LISTEN'
|
||||
elif [ "$sens" = "envoi" ]; then
|
||||
pos=4
|
||||
incr=1
|
||||
cond='ESTABLISHED'
|
||||
fi
|
||||
for indice in `seq $pos 7 ${#connexions[*]}`; do
|
||||
if [[ ${connexions[$indice]} == *$3* ]]; then
|
||||
if [[ ${connexions[$(( $indice + $incr ))]} == $cond ]]; then
|
||||
retour=0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
elif [ "$2" = "udp" ]; then
|
||||
if [ ${#connexions[*]} -gt 1 ]; then
|
||||
retour=0
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $retour == 0 ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
# @NAME : TestMailQ
|
||||
# @AIM : Tester si la file d'attente du mailer a des messages "frozen"
|
||||
# @PARAMS : None
|
||||
#
|
||||
function TestMailQ()
|
||||
{
|
||||
ckqueue="exiqgrep -i"
|
||||
# Nombre de messages "Frozen"
|
||||
printf ". %${len_pf}s => " "File d'attente"
|
||||
mailq=$(CreoleRun "${ckqueue} 2>&1 | wc -l" mail)
|
||||
if [[ ${mailq} -eq 0 ]]
|
||||
then
|
||||
EchoVert "0 message"
|
||||
else
|
||||
EchoOrange "${mailq} message(s)"
|
||||
fi
|
||||
|
||||
frozen="exiqgrep -z -i"
|
||||
# Nombre de messages "Frozen"
|
||||
printf ". %${len_pf}s => " "Messages \"Frozen\""
|
||||
mailfrz=$(CreoleRun "${frozen} 2>&1 | wc -l" mail)
|
||||
if [[ ${mailfrz} -eq 0 ]]
|
||||
then
|
||||
EchoVert "0 message"
|
||||
else
|
||||
EchoOrange "${mailfrz} message(s)"
|
||||
fi
|
||||
}
|
132
lib/eole/ihm.sh
132
lib/eole/ihm.sh
|
@ -1,132 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
TPUT=/usr/bin/tput
|
||||
#test si TPUT est utilisable
|
||||
if [ ! "$TERM" = "" ] && $TPUT hpa 60 >/dev/null 2>&1 && $TPUT setaf 1 >/dev/null 2>&1; then
|
||||
FANCYTTY=1
|
||||
else
|
||||
FANCYTTY=0
|
||||
fi
|
||||
|
||||
Pause() {
|
||||
if [ "$ModeTxt" == "yes" ];then
|
||||
echo
|
||||
elif [ "$ModeEad" == "yes" ];then
|
||||
echo "<br>"
|
||||
else
|
||||
[ $FANCYTTY = 1 ] && $TPUT setaf 6
|
||||
echo " Appuyez sur Entrée pour continuer ..."
|
||||
[ $FANCYTTY = 1 ] && $TPUT sgr0
|
||||
read BiDon
|
||||
fi
|
||||
}
|
||||
|
||||
Echo() {
|
||||
if [ "$ModeEad" != "yes" ];
|
||||
then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$1<br>"
|
||||
fi
|
||||
}
|
||||
|
||||
EchoColor() {
|
||||
if [ "$ModeTxt" = "yes" ];then
|
||||
echo "$1"
|
||||
elif [ "$ModeEad" = "yes" ];then
|
||||
echo "<FONT color=\"$3\"> $1 </FONT><br>"
|
||||
else
|
||||
[ "$FANCYTTY" = 1 ] && $TPUT setaf $2
|
||||
echo "$1"
|
||||
[ "$FANCYTTY" = 1 ] && $TPUT sgr0
|
||||
fi
|
||||
}
|
||||
|
||||
EchoRouge() {
|
||||
EchoColor "$1" "1" "red"
|
||||
}
|
||||
|
||||
EchoVert() {
|
||||
EchoColor "$1" "2" "green"
|
||||
}
|
||||
|
||||
EchoOrange() {
|
||||
EchoColor "$1" "3" "orange"
|
||||
}
|
||||
|
||||
EchoBleu() {
|
||||
EchoColor "$1" "4" "blue"
|
||||
}
|
||||
|
||||
EchoMagenta() {
|
||||
EchoColor "$1" "5" "magenta"
|
||||
}
|
||||
|
||||
EchoCyan() {
|
||||
EchoColor "$1" "6" "cyan"
|
||||
}
|
||||
|
||||
EchoBlanc() {
|
||||
EchoColor "$1" "7" "white"
|
||||
}
|
||||
|
||||
EchoGras() {
|
||||
if [ "$ModeTxt" == "yes" ];then
|
||||
echo "$1"
|
||||
elif [ "$ModeEad" == "yes" ];then
|
||||
echo "<b> $1 </b><br>"
|
||||
else
|
||||
[ $FANCYTTY = 1 ] && $TPUT bold
|
||||
echo "$1"
|
||||
[ $FANCYTTY = 1 ] && $TPUT sgr0
|
||||
fi
|
||||
}
|
||||
|
||||
Clear() {
|
||||
if [ "$ModeEad" != "yes" -a "$ModeTxt" != "yes" ];then
|
||||
clear
|
||||
fi
|
||||
}
|
||||
|
||||
Question_ouinon() {
|
||||
#attention, il faut synchroniser les modifications avec /usr/share/pyshared/pyeole/ihm.py
|
||||
question=$1
|
||||
[ "$2" = "" ] && interactive='True' || interactive=$2
|
||||
[ "$3" = "" ] && default="non" || default=$3
|
||||
[ "$4" = "" ] && level="info" || level=$4
|
||||
[ "$5" = "" ] && default_uninteractive=$default || default_uninteractive=$5
|
||||
[ ! "$interactive" = "True" ] && [ ! "$interactive" = "False" ] && echo "Question_ouinon : interactive doit être True ou False" && exit 1
|
||||
[ ! "$default" = "oui" ] && [ ! "$default" = "non" ] && echo "Question_ouinon : default doit etre oui ou non" && exit 1
|
||||
[ ! "$default_uninteractive" = "oui" ] && [ ! "$default_uninteractive" = "non" ] && echo "Question_ouinon : default_uninteractive doit etre oui ou non" && exit 1
|
||||
[ ! "$level" = "info" ] && [ ! "$level" = "warn" ] && [ ! "$level" = "err" ] && echo "Question_ouinon : level doit etre info, warn ou err" && exit 1
|
||||
#non interactive
|
||||
if [ "$interactive" = "False" ]; then
|
||||
Rep=default_uninteractive
|
||||
else
|
||||
question="$question [oui/non]"
|
||||
if [ $level = "info" ]; then
|
||||
echo "$question"
|
||||
elif [ $level = "warn" ]; then
|
||||
EchoOrange "$question"
|
||||
else
|
||||
EchoRouge "$question"
|
||||
fi
|
||||
echo -n "[$default] : "
|
||||
read Rep
|
||||
#passe en minuscule
|
||||
Rep=`echo $Rep | tr A-Z a-z`
|
||||
fi
|
||||
if [ "$default" = "non" ]; then
|
||||
if [ "$Rep" = "oui" -o "$Rep" = "o" -o "$Rep" = "yes" -o "$Rep" = "y" ];then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
if [ "$Rep" = "non" -o "$Rep" = "n" -o "$Rep" = "no" ];then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
. /usr/lib/eole/ihm.sh
|
||||
|
||||
TestFile()
|
||||
{
|
||||
[ -f "$1" ] && return 0
|
||||
EchoRouge "* Erreur : fichier $1 absent"
|
||||
echo
|
||||
return 1
|
||||
}
|
||||
|
||||
TestDir()
|
||||
{
|
||||
[ -d "$1" ] && return 0
|
||||
EchoRouge "* Erreur : répertoire $1 absent"
|
||||
echo
|
||||
return 1
|
||||
}
|
||||
|
||||
#
|
||||
# @NAME : TestCreoled
|
||||
# @AIM : Tester si Creoled fonctionne
|
||||
# @PARAMS : None
|
||||
#
|
||||
function TestCreoled()
|
||||
{
|
||||
var_name='eole_version'
|
||||
if ! CreoleGet ${var_name} > /dev/null
|
||||
then
|
||||
EchoRouge "* Erreur : Creoled n'est pas actif"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function wait_true_retcode()
|
||||
{
|
||||
# teste une commande en boucle jusqu'à ret_code=0
|
||||
# ou $tries boucles
|
||||
# exemple : wait_true_retcode "texte" "commande param1 param2"
|
||||
tries=120
|
||||
ret_code=1
|
||||
i=0
|
||||
echo -n $1
|
||||
until [ $ret_code -eq 0 ] || [ $i -ge $tries ]
|
||||
do
|
||||
i=`expr $i + 1`
|
||||
sleep 1
|
||||
eval $2
|
||||
ret_code=$?
|
||||
echo -n "."
|
||||
done
|
||||
echo
|
||||
if [ $i -eq $tries ]
|
||||
then
|
||||
EchoRouge "L'action '$1' n'a pas aboutie."
|
||||
exit $ret_code
|
||||
fi
|
||||
}
|
||||
|
||||
function only_root()
|
||||
{
|
||||
[ ! $(id -u) = 0 ] && echo "$0 ne peut être exécuté qu'en root" && exit 1
|
||||
}
|
||||
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
# Les Fonctions Zephir
|
||||
# Trois parametres
|
||||
# Code Etat (ex INIT;FIN)
|
||||
# Message (Libéllé)
|
||||
# Type de procédure(ex MAj,RECONFIGURE, INSTANCE)
|
||||
|
||||
ProgZephir=${RepZephir="/usr/share/zephir/scripts"}
|
||||
|
||||
Zlog(){
|
||||
$RepZephir/Zlog.py "$1" "$2" "$3"
|
||||
}
|
||||
|
||||
Zephir() {
|
||||
$RepZephir/Zephir.py "$1" "$2" "$3"
|
||||
}
|
||||
|
||||
# Verifie si une fonction est bloquée par Zephir
|
||||
# Une erreur retourne OK pour ne pas bloquer la procédure
|
||||
Init() {
|
||||
ProgZephir=$RepZephir/Zinit.py
|
||||
if [ -x $ProgZephir ];then
|
||||
$ProgZephir $1
|
||||
return $?
|
||||
else
|
||||
echo "Avertissement : $ProgZephir non trouvé"
|
||||
return 0
|
||||
fi
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<creole>
|
||||
<!-- Fichiers et services gérés par Creole -->
|
||||
<files>
|
||||
<!-- Déclaration d'un fichier dans une liste des fichiers qui vont
|
||||
utiliser les variables et être générés par Creole -->
|
||||
<file filelist='demo' name='/etc/demo/fichier.conf' mkdir='True' rm='True'/>
|
||||
</files>
|
||||
<!-- Conteneurs et configurations spécifiques liées aux conteneurs EOLE -->
|
||||
<containers>
|
||||
<!-- Déclaration d'un conteneur membre d'un groupe de conteneurs -->
|
||||
<container name='nom_conteneur' group='group_conteneurs'>
|
||||
<!-- Les balises packages servent à lister les paquets à installer dans le conteneur-->
|
||||
<!-- Si le module n'est pas en mode conteneur les paquets seront installés quand même sur le serveur-->
|
||||
<package>nom-du-paquet-a-installer-dans-le-conteneur</package>
|
||||
|
||||
<!-- Service a gérér dans le conteneur -->
|
||||
<!-- Si la machine n'est pas en mode conteneur le service sera géré sur le serveur -->
|
||||
<service servicelist='demoservices'>nom-du-service-a-lancer-dans-le-conteneur</service>
|
||||
</container>
|
||||
</containers>
|
||||
|
||||
<!-- Déclaration des variables de configurations -->
|
||||
<variables>
|
||||
<!-- Familles de variables, correspondent aux onglets dans GenConfig -->
|
||||
<family name='famille_demo'>
|
||||
<!-- Déclaration d'une variable de type "oui/non" avec "oui" comme valeur par défaut -->
|
||||
<variable name='activer_demo' type='oui/non' description='Activer le service de demonstration'>
|
||||
<value>oui</value>
|
||||
</variable>
|
||||
|
||||
<!-- Déclaration d'une variable de type "string" sans valeur par défaut -->
|
||||
<variable name="chaine" type='string' description='Variable de type String'/>
|
||||
|
||||
<!-- Déclaration d'une variable de type "IP" sans valeur par défaut -->
|
||||
<variable name="adresse_ip" type='ip' description='Variable de type adresse IP'/>
|
||||
|
||||
<!-- Déclaration d'une variable de type "string" avec "je suis une chaine" comme valeur par défaut -->
|
||||
<variable name="chaine2" type='string' description='Variable de type String avec une valeur par défaut'>
|
||||
<value>je suis une chaine</value>
|
||||
</variable>
|
||||
|
||||
<!-- Variable avec des valeurs multiples "multi" -->
|
||||
<variable name='ma_multi' type='string' multi="true" description="Un variable avec plusieurs valeurs"/>
|
||||
|
||||
<!-- Variables cachée par une autre -->
|
||||
<variable name='varquicache' type='oui/non' description="Répondez oui pour voir la variables que je cache">
|
||||
<value>non</value>
|
||||
</variable>
|
||||
<variable name='variablecachee' type='string' description='Je ne suis plus cachée'/>
|
||||
<!-- Attention pour cache effectivement la variable il faut déclarer une condition frozen_if_in -->
|
||||
|
||||
<!-- Groupe de variables -->
|
||||
<!-- variable "maitre" du groupe -->
|
||||
<variable name='ma_master' type='string' description='Je suis une variable maitre' multi="True"/>
|
||||
<variable name='ma_slave' type='string' description='Je suis une variable esclave obligatoire' mandatory="True"/>
|
||||
<!-- Attention il faut délcarer un groupe dans les conditions pour le le groupe soit vraiment un groupe -->
|
||||
|
||||
</family>
|
||||
</variables>
|
||||
|
||||
<!-- Déclaration des contraintes sur le traitement des fichiers -->
|
||||
<constraints>
|
||||
<!-- Condition de traitement :
|
||||
ici les cibles de la contrainte dans les balises "target" seront désactivées si la valeur de
|
||||
la variable activer_demo est égale à "non" -->
|
||||
<condition name='disabled_if_in' source='activer_demo'>
|
||||
<!-- Valeur de la variable source pour que la condition soit validée -->
|
||||
<param>non</param>
|
||||
|
||||
<!-- Cible de type famille, conduit à la désactivation de toutes les variables de la famille -->
|
||||
<target type='family'>famille_demo</target>
|
||||
|
||||
<!-- Cible de type filelist, les fichiers de la filelist ne seront pas générés si la condition est vérifiée -->
|
||||
<target type='filelist'>demo</target>
|
||||
|
||||
<!-- Cible de type servicelist, les services de la filelist ne seront plus gérés par Creole -->
|
||||
<target type='servicelist'>demoservices</target>
|
||||
</condition>
|
||||
|
||||
<!-- Condition frozen_if_in qui cache effectivement la variable "variablecachee" si "varquicache" est égale à "non" -->
|
||||
<condition name='frozen_if_in' source='varquicache'>
|
||||
<param>non</param>
|
||||
<target type='variable'>variablecachee</target>
|
||||
</condition>
|
||||
|
||||
<!-- Remplir automatiquement une variable avec la valeur d'une autre -->
|
||||
<fill name='calc_val' target='chaine'>
|
||||
<param type='eole' name='valeur'>nom_machine</param>
|
||||
</fill>
|
||||
|
||||
<!-- Déclaration du groupe de variable avec la variable maitre et esclave -->
|
||||
<group master='ma_master'>
|
||||
<slave>ma_slave</slave>
|
||||
</group>
|
||||
</constraints>
|
||||
<!-- Balise d'aide pour affichier des info-bulles dans GenConfig lorsqu'on survolle les variables -->
|
||||
<help>
|
||||
<variable name='activer_demo'>Message d'aide du type Activier un service type pour les besoins de démonstration</variable>
|
||||
</help>
|
||||
</creole>
|
|
@ -1,10 +0,0 @@
|
|||
/var/log/creole.log
|
||||
/var/log/rsyslog/local/creoled/*.log
|
||||
/var/log/reconfigure.log {
|
||||
rotate 4
|
||||
weekly
|
||||
missingok
|
||||
notifempty
|
||||
compress
|
||||
delaycompress
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
/var/log/schedule.log {
|
||||
rotate 10
|
||||
notifempty
|
||||
weekly
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
printf "
|
||||
"
|
3
po_list
3
po_list
|
@ -1,3 +0,0 @@
|
|||
creole creole/*.py bin/Maj-Auto bin/Query-Auto bin/Maj-Release
|
||||
update-manager upgrade/Upgrade-Auto
|
||||
eole-schedule schedule/schedule
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from creole.server import CreoleDaemon
|
||||
from pyeole.ihm import only_root
|
||||
|
||||
only_root()
|
||||
|
||||
d = CreoleDaemon()
|
||||
d.run()
|
|
@ -1,6 +0,0 @@
|
|||
# Configuration commune aux scripts schedule
|
||||
|
||||
SAVDIR=/home/backup
|
||||
# pour que l'affichage de [ ok ] soit ok
|
||||
export TERM='dumb'
|
||||
umask 0077
|
|
@ -1,217 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
utilitaire de lancement en fonction
|
||||
|
||||
- cron
|
||||
- bareos
|
||||
|
||||
"""
|
||||
import sys
|
||||
import time
|
||||
from os import readlink, unlink
|
||||
from os.path import join
|
||||
from glob import glob
|
||||
|
||||
from pyeole.process import system_out
|
||||
from pyeole.schedule import SCHEDULE_DIR
|
||||
from pyeole.bareos import is_running_jobs, bareos_get_jobs_list
|
||||
from creole.client import CreoleClient
|
||||
from pyeole.i18n import i18n
|
||||
from pyeole.log import init_logging
|
||||
|
||||
_ = i18n('eole-schedule')
|
||||
|
||||
log = init_logging(name=u'eole-schedule', level='info', syslog=True,
|
||||
console=['stdout', 'stderr'])
|
||||
|
||||
client = CreoleClient()
|
||||
|
||||
NOW = time.strftime('%d/%m/%y %H:%M', time.localtime())
|
||||
# day in month
|
||||
MONTH_DAY = int(time.strftime('%d', time.localtime()))
|
||||
# day in week
|
||||
WEEK_DAY = int(time.strftime('%w', time.localtime()))
|
||||
HOUR = int(time.strftime('%H', time.localtime()))
|
||||
|
||||
# night start at 12
|
||||
if HOUR > 12:
|
||||
WEEK_DAY += 1
|
||||
|
||||
if WEEK_DAY == 0:
|
||||
# sunday is 7
|
||||
WEEK_DAY = 7
|
||||
if WEEK_DAY == 8:
|
||||
WEEK_DAY = 1
|
||||
|
||||
|
||||
def log_parts(func):
|
||||
def split(msg):
|
||||
shreds = msg.split('\n')
|
||||
return [func(shred) for shred in shreds if len(shred) > 0]
|
||||
return split
|
||||
|
||||
log.info = log_parts(log.info)
|
||||
log.error = log_parts(log.error)
|
||||
log.warning = log_parts(log.warning)
|
||||
|
||||
|
||||
def run_runparts(mode, bareos_type):
|
||||
"""
|
||||
run part script
|
||||
test if weekly or monthly script must be launched this day
|
||||
"""
|
||||
if mode == 'weekly' and WEEK_DAY != client.get('/schedule/schedule/weekday'):
|
||||
return
|
||||
if mode == 'monthly' and ( WEEK_DAY != client.get('/schedule/schedule/monthday') or MONTH_DAY > 7 ):
|
||||
return
|
||||
part_str = u"{} schedule {}".format(bareos_type, mode)
|
||||
log.info(_("Starting {}").format(part_str))
|
||||
dirname = join(SCHEDULE_DIR, mode, bareos_type)
|
||||
env = {'PATH': '/sbin:/usr/sbin:/bin:/usr/bin:/usr/share/eole',
|
||||
'LC_ALL': 'fr_FR.UTF-8'}
|
||||
if mode != 'once':
|
||||
runparts_cmd = "/bin/run-parts --exit-on-error --report {} --arg {}"
|
||||
wrapped_runparts_cmd = ['/bin/bash',
|
||||
'-c',
|
||||
runparts_cmd.format(dirname, mode)]
|
||||
ret, out, err = system_out(wrapped_runparts_cmd, env=env)
|
||||
else:
|
||||
# unlink script before launch it
|
||||
# (for example remove 'reboot' link before restart the machine)
|
||||
names = glob(join(dirname, '*'))
|
||||
names.sort()
|
||||
ret = 0
|
||||
out = None
|
||||
err = None
|
||||
for name in names:
|
||||
script = readlink(name)
|
||||
unlink(name)
|
||||
wrapped_runparts_cmd = ['/bin/bash',
|
||||
'-c', script]
|
||||
ret, out, err = system_out(wrapped_runparts_cmd, env=env)
|
||||
if ret != 0:
|
||||
break
|
||||
if out:
|
||||
log.info(out)
|
||||
if err:
|
||||
log.error(err)
|
||||
if ret != 0:
|
||||
# on affiche sur stderr pour que cron le récupère et le mail
|
||||
# ce qui est printé sur stdout est envoyé dans les logs
|
||||
if out:
|
||||
sys.stderr.write(out)
|
||||
if err:
|
||||
sys.stderr.write(err)
|
||||
sys.stderr.write(_("Error detected\n"))
|
||||
log.error(_("{} exited with error return code").format(part_str))
|
||||
sys.exit(ret)
|
||||
else:
|
||||
log.info(_("{} finished").format(part_str))
|
||||
|
||||
|
||||
def schedule_pre():
|
||||
run_runparts('daily', 'pre')
|
||||
run_runparts('weekly', 'pre')
|
||||
run_runparts('monthly', 'pre')
|
||||
run_runparts('once', 'pre')
|
||||
|
||||
|
||||
def schedule_post():
|
||||
i = 0
|
||||
while is_running_jobs():
|
||||
time.sleep(1)
|
||||
i += 1
|
||||
if i == 30:
|
||||
log.info(_("Job already running, cancelling"))
|
||||
sys.exit(1)
|
||||
run_runparts('daily', 'post')
|
||||
run_runparts('weekly', 'post')
|
||||
run_runparts('monthly', 'post')
|
||||
run_runparts('once', 'post')
|
||||
|
||||
|
||||
def schedule_cron():
|
||||
"""
|
||||
If schedule.py is launched by cron, try to run pre and post
|
||||
cron file for daily, weekly and monthly if no backup is set this day
|
||||
"""
|
||||
def exit_not_cron():
|
||||
log.info(_("bareos is set for this day, cancelled"))
|
||||
sys.exit(0)
|
||||
try:
|
||||
bareosjobs = bareos_get_jobs_list()
|
||||
for job in bareosjobs:
|
||||
day = int(job['day'])
|
||||
if job['job'] == 'daily':
|
||||
if day <= WEEK_DAY <= job['end_day']:
|
||||
exit_not_cron()
|
||||
elif job['job'] == 'weekly':
|
||||
if WEEK_DAY == day:
|
||||
exit_not_cron()
|
||||
elif job['job'] == 'monthly':
|
||||
if WEEK_DAY == day and MONTH_DAY < 8:
|
||||
exit_not_cron()
|
||||
else:
|
||||
raise Exception(_('Unknown job: {0}').format(job['job']))
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
pass
|
||||
run_runparts('daily', 'pre')
|
||||
run_runparts('daily', 'post')
|
||||
|
||||
run_runparts('weekly', 'pre')
|
||||
run_runparts('weekly', 'post')
|
||||
|
||||
run_runparts('monthly', 'pre')
|
||||
run_runparts('monthly', 'post')
|
||||
|
||||
run_runparts('once', 'pre')
|
||||
run_runparts('once', 'post')
|
||||
# __________________________________________________
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
usage = """usage:
|
||||
{0} bareos [pre|post]
|
||||
{0} cron""".format(sys.argv[0])
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
print usage
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) > 3:
|
||||
log.error(_("Too many arguments: {0}").format(sys.argv))
|
||||
print usage
|
||||
sys.exit(1)
|
||||
|
||||
mode = sys.argv[1]
|
||||
|
||||
if mode == 'bareos':
|
||||
# pre|post
|
||||
if len(sys.argv) == 2:
|
||||
log.error(_("Not enough arguments: {0}").format(sys.argv))
|
||||
print usage
|
||||
sys.exit(1)
|
||||
if sys.argv[2] not in ['pre', 'post']:
|
||||
log.error(_("Second argument must be pre or post: {0}").format(sys.argv))
|
||||
print usage
|
||||
sys.exit(1)
|
||||
bareos_type = sys.argv[2]
|
||||
if bareos_type == 'pre':
|
||||
schedule_pre()
|
||||
elif bareos_type == 'post':
|
||||
schedule_post()
|
||||
|
||||
elif mode == 'cron':
|
||||
if len(sys.argv) != 2:
|
||||
log.error(_("Too many arguments for cron: {0}").format(sys.argv))
|
||||
print usage
|
||||
sys.exit(1)
|
||||
schedule_cron()
|
||||
else:
|
||||
log.error(_("Unknown schedule type : {0}").format(mode))
|
||||
print usage
|
||||
sys.exit(1)
|
|
@ -1,15 +0,0 @@
|
|||
#!/bin/bash
|
||||
# script pour rediriger la sortie standard/erreur dans les logs
|
||||
|
||||
OPT=$1
|
||||
|
||||
if [ "$OPT" = "pre" ]; then
|
||||
/usr/share/eole/schedule/schedule bareos $OPT # faire apparaitre les run-parts dans le rapport de sauvegarde, ne pas rediriger >> /var/log/schedule.log 2>&1
|
||||
exit $?
|
||||
elif [ "$OPT" = "post" ]; then
|
||||
echo "/usr/share/eole/schedule/schedule bareos $OPT >> /var/lib/eole/reports/rapport-bareos.txt 2>&1" | at now
|
||||
exit $?
|
||||
else
|
||||
echo Usage : $0 pre/post
|
||||
exit 1
|
||||
fi
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue