Compare commits
24 Commits
develop
...
pkg/dev/ri
Author | SHA1 | Date | |
---|---|---|---|
c740ec3fe3 | |||
56b1f12a4a | |||
4fc3e74bbd | |||
83d74c2b06 | |||
6a27b002ff | |||
e2d73932c0 | |||
980a119ef9 | |||
b9da2ce686 | |||
941261c830 | |||
98c77bf719 | |||
1b9d87fa53 | |||
0e988d7040 | |||
be97d757d9 | |||
19d90fd9bc | |||
5653de1e99 | |||
399bfb9ab6 | |||
234b82b459 | |||
c9e0bcbbfe | |||
47e4976f54 | |||
dd33ea5b8f | |||
689df4ec23 | |||
223fb9aaf3 | |||
bed27a1e58 | |||
40eff91684 |
5
debian/changelog
vendored
5
debian/changelog
vendored
@ -1,5 +0,0 @@
|
||||
risotto (0.1) unstable; urgency=low
|
||||
|
||||
* first version
|
||||
|
||||
-- Cadoles <contact@cadoles.com> Fri, 20 Mar 2020 15:18:25 +0100
|
14
debian/control
vendored
14
debian/control
vendored
@ -2,13 +2,23 @@ Source: risotto
|
||||
Section: admin
|
||||
Priority: extra
|
||||
Maintainer: Cadoles <contact@cadoles.com>
|
||||
Build-depends: debhelper (>=11), python3-all, python3-setuptools
|
||||
Build-depends: debhelper (>=11), python3-all, python3-setuptools, dh-python
|
||||
Standards-Version: 3.9.4
|
||||
Homepage: https://forge.cadoles.com/Infra/risotto
|
||||
|
||||
Package: python3-risotto
|
||||
Architecture: any
|
||||
Pre-Depends: dpkg, python3, ${misc:Pre-Depends}
|
||||
Depends: ${python:Depends}, ${misc:Depends},
|
||||
python3-asyncpg,
|
||||
python3-rougail,
|
||||
python3-aiohttp,
|
||||
python3-sdnotify
|
||||
Description: configuration manager libraries
|
||||
|
||||
Package: risotto
|
||||
Architecture: any
|
||||
Pre-Depends: dpkg, python3, ${misc:Pre-Depends}
|
||||
Depends: ${python:Depends}, ${misc:Depends}
|
||||
Depends: ${python:Depends}, ${misc:Depends}, python3-risotto
|
||||
Description: configuration manager
|
||||
|
||||
|
2
debian/risotto.install
vendored
Normal file
2
debian/risotto.install
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
script/risotto-server usr/bin/
|
||||
sql/risotto.sql usr/share/eole/db/eole-risotto/gen/
|
0
script/risotto-server
Normal file → Executable file
0
script/risotto-server
Normal file → Executable file
@ -1,12 +1,12 @@
|
||||
CREATE TABLE RisottoLog(
|
||||
LogId SERIAL PRIMARY KEY,
|
||||
ContextId INTEGER,
|
||||
Msg VARCHAR(255) NOT NULL,
|
||||
URI VARCHAR(255),
|
||||
URIS VARCHAR(255),
|
||||
UserLogin VARCHAR(100) NOT NULL,
|
||||
Status INTEGER NOT NULL,
|
||||
Kwargs JSON,
|
||||
Level VARCHAR(10) NOT NULL,
|
||||
ContextId INTEGER,
|
||||
Data JSON,
|
||||
Returns JSON,
|
||||
StartDate timestamp DEFAULT current_timestamp,
|
||||
StopDate timestamp
|
||||
|
@ -65,18 +65,6 @@ if 'CELERYRISOTTO_DB_USER' in environ:
|
||||
CELERYRISOTTO_DB_USER = environ['CELERYRISOTTO_DB_USER']
|
||||
else:
|
||||
CELERYRISOTTO_DB_USER = config.get('CELERYRISOTTO_DB_USER', None)
|
||||
if 'LEMUR_DB_NAME' in environ:
|
||||
LEMUR_DB_NAME = environ['LEMUR_DB_NAME']
|
||||
else:
|
||||
LEMUR_DB_NAME = config.get('LEMUR_DB_NAME', None)
|
||||
if 'LEMUR_DB_PASSWORD' in environ:
|
||||
LEMUR_DB_PASSWORD = environ['LEMUR_DB_PASSWORD']
|
||||
else:
|
||||
LEMUR_DB_PASSWORD = config.get('LEMUR_DB_PASSWORD', None)
|
||||
if 'LEMUR_DB_USER' in environ:
|
||||
LEMUR_DB_USER = environ['LEMUR_DB_USER']
|
||||
else:
|
||||
LEMUR_DB_USER = config.get('LEMUR_DB_USER', None)
|
||||
if 'DB_ADDRESS' in environ:
|
||||
DB_ADDRESS = environ['DB_ADDRESS']
|
||||
else:
|
||||
@ -113,12 +101,12 @@ if 'PASSWORD_ADMIN_EMAIL' in environ:
|
||||
PASSWORD_ADMIN_EMAIL = environ['PASSWORD_ADMIN_EMAIL']
|
||||
else:
|
||||
# this parameter is mandatory
|
||||
PASSWORD_ADMIN_EMAIL = config.get('PASSWORD_ADMIN_EMAIL', 'XXX')
|
||||
PASSWORD_ADMIN_EMAIL = config['PASSWORD_ADMIN_EMAIL']
|
||||
if 'PASSWORD_ADMIN_PASSWORD' in environ:
|
||||
PASSWORD_ADMIN_PASSWORD = environ['PASSWORD_ADMIN_PASSWORD']
|
||||
else:
|
||||
# this parameter is mandatory
|
||||
PASSWORD_ADMIN_PASSWORD = config.get('PASSWORD_ADMIN_PASSWORD', 'XXX')
|
||||
PASSWORD_ADMIN_PASSWORD = config['PASSWORD_ADMIN_PASSWORD']
|
||||
if 'PASSWORD_DEVICE_IDENTIFIER' in environ:
|
||||
PASSWORD_DEVICE_IDENTIFIER = environ['PASSWORD_DEVICE_IDENTIFIER']
|
||||
else:
|
||||
@ -127,19 +115,14 @@ if 'PASSWORD_URL' in environ:
|
||||
PASSWORD_URL = environ['PASSWORD_URL']
|
||||
else:
|
||||
PASSWORD_URL = config.get('PASSWORD_URL', 'https://localhost:8001/')
|
||||
|
||||
if 'PASSWORD_LENGTH' in environ:
|
||||
PASSWORD_LENGTH = int(environ['PASSWORD_LENGTH'])
|
||||
else:
|
||||
PASSWORD_LENGTH = int(config.get('PASSWORD_LENGTH', 20))
|
||||
if 'PKI_ADMIN_PASSWORD' in environ:
|
||||
PKI_ADMIN_PASSWORD = environ['PKI_ADMIN_PASSWORD']
|
||||
else:
|
||||
PKI_ADMIN_PASSWORD = config.get('PKI_ADMIN_PASSWORD', 'XXX')
|
||||
PKI_ADMIN_PASSWORD = config['PKI_ADMIN_PASSWORD']
|
||||
if 'PKI_ADMIN_EMAIL' in environ:
|
||||
PKI_ADMIN_EMAIL = environ['PKI_ADMIN_EMAIL']
|
||||
else:
|
||||
PKI_ADMIN_EMAIL = config.get('PKI_ADMIN_EMAIL', 'XXX')
|
||||
PKI_ADMIN_EMAIL = config['PKI_ADMIN_EMAIL']
|
||||
if 'PKI_URL' in environ:
|
||||
PKI_URL = environ['PKI_URL']
|
||||
else:
|
||||
@ -153,8 +136,7 @@ def dsn_factory(database, user, password, address=DB_ADDRESS):
|
||||
|
||||
_config = {'database': {'dsn': dsn_factory(RISOTTO_DB_NAME, RISOTTO_DB_USER, RISOTTO_DB_PASSWORD),
|
||||
'tiramisu_dsn': dsn_factory(TIRAMISU_DB_NAME, TIRAMISU_DB_USER, TIRAMISU_DB_PASSWORD),
|
||||
'celery_dsn': dsn_factory(CELERYRISOTTO_DB_NAME, CELERYRISOTTO_DB_USER, CELERYRISOTTO_DB_PASSWORD),
|
||||
'lemur_dns': dsn_factory(LEMUR_DB_NAME, LEMUR_DB_USER, LEMUR_DB_PASSWORD),
|
||||
'celery_dsn': dsn_factory(CELERYRISOTTO_DB_NAME, CELERYRISOTTO_DB_USER, CELERYRISOTTO_DB_PASSWORD)
|
||||
},
|
||||
'http_server': {'port': RISOTTO_PORT,
|
||||
'default_user': DEFAULT_USER,
|
||||
@ -173,7 +155,6 @@ _config = {'database': {'dsn': dsn_factory(RISOTTO_DB_NAME, RISOTTO_DB_USER, RIS
|
||||
'admin_password': PASSWORD_ADMIN_PASSWORD,
|
||||
'device_identifier': PASSWORD_DEVICE_IDENTIFIER,
|
||||
'service_url': PASSWORD_URL,
|
||||
'length': PASSWORD_LENGTH,
|
||||
},
|
||||
'pki': {'admin_password': PKI_ADMIN_PASSWORD,
|
||||
'owner': PKI_ADMIN_EMAIL,
|
||||
|
@ -3,11 +3,3 @@ class Context:
|
||||
self.paths = []
|
||||
self.context_id = None
|
||||
self.start_id = None
|
||||
|
||||
def copy(self):
|
||||
context = Context()
|
||||
for key, value in self.__dict__.items():
|
||||
if key.startswith('__'):
|
||||
continue
|
||||
setattr(context, key, value)
|
||||
return context
|
||||
|
@ -12,7 +12,7 @@ except:
|
||||
from .config import get_config
|
||||
from .utils import _, tiramisu_display_name
|
||||
from .logger import log
|
||||
from .dispatcher import get_dispatcher
|
||||
from .dispatcher import dispatcher
|
||||
from .context import Context
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ class Controller:
|
||||
def __init__(self,
|
||||
test: bool,
|
||||
) -> None:
|
||||
self.dispatcher = get_dispatcher()
|
||||
pass
|
||||
|
||||
async def call(self,
|
||||
uri: str,
|
||||
@ -42,11 +42,11 @@ class Controller:
|
||||
module = message.split('.', 1)[0]
|
||||
if current_module != module:
|
||||
raise ValueError(_(f'cannot call to external module ("{module}") to the URI "{uri}" from "{current_module}"'))
|
||||
return await self.dispatcher.call(version,
|
||||
message,
|
||||
risotto_context,
|
||||
**kwargs,
|
||||
)
|
||||
return await dispatcher.call(version,
|
||||
message,
|
||||
risotto_context,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def publish(self,
|
||||
uri: str,
|
||||
@ -58,19 +58,20 @@ class Controller:
|
||||
if args:
|
||||
raise ValueError(_(f'the URI "{uri}" can only be published with keyword arguments'))
|
||||
version, message = uri.split('.', 1)
|
||||
await self.dispatcher.publish(version,
|
||||
message,
|
||||
risotto_context,
|
||||
**kwargs,
|
||||
)
|
||||
await dispatcher.publish(version,
|
||||
message,
|
||||
risotto_context,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def check_role(self,
|
||||
uri: str,
|
||||
username: str,
|
||||
**kwargs: dict,
|
||||
) -> None:
|
||||
# create a new config
|
||||
async with await Config(self.dispatcher.option) as config:
|
||||
async with await Config(dispatcher.option) as config:
|
||||
await config.property.read_write()
|
||||
await config.option('message').value.set(uri)
|
||||
subconfig = config.option(uri)
|
||||
@ -83,10 +84,10 @@ class Controller:
|
||||
raise ValueError(_(f'unknown parameter in "{uri}": "{key}"'))
|
||||
except ValueOptionError as err:
|
||||
raise ValueError(_(f'invalid parameter in "{uri}": {err}'))
|
||||
await self.dispatcher.check_role(subconfig,
|
||||
username,
|
||||
uri,
|
||||
)
|
||||
await dispatcher.check_role(subconfig,
|
||||
username,
|
||||
uri,
|
||||
)
|
||||
|
||||
async def on_join(self,
|
||||
risotto_context,
|
||||
@ -98,23 +99,20 @@ class TiramisuController(Controller):
|
||||
def __init__(self,
|
||||
test: bool,
|
||||
) -> None:
|
||||
self.source_imported = None
|
||||
if not 'dataset_name' in vars(self):
|
||||
raise Exception(f'please specify "dataset_name" to "{self.__class__.__name__}"')
|
||||
self.tiramisu_cache_root_path = join(get_config()['cache']['root_path'], self.dataset_name)
|
||||
super().__init__(test)
|
||||
self.internal_source_name = get_config()['servermodel']['internal_source']
|
||||
if not test:
|
||||
db_conf = get_config()['database']['tiramisu_dsn']
|
||||
self.save_storage = Storage(engine='postgres')
|
||||
self.save_storage.setting(dsn=db_conf)
|
||||
if self.dataset_name != 'servermodel':
|
||||
self.optiondescription = None
|
||||
self.dispatcher.set_function('v1.setting.dataset.updated',
|
||||
None,
|
||||
TiramisuController.dataset_updated,
|
||||
self.__class__.__module__,
|
||||
)
|
||||
dispatcher.set_function('v1.setting.dataset.updated',
|
||||
None,
|
||||
TiramisuController.dataset_updated,
|
||||
self.__class__.__module__,
|
||||
)
|
||||
|
||||
async def on_join(self,
|
||||
risotto_context: Context,
|
||||
@ -132,12 +130,6 @@ class TiramisuController(Controller):
|
||||
risotto_context: Context,
|
||||
) -> None:
|
||||
sources = await self.get_sources(risotto_context)
|
||||
source_imported = sources != [self.internal_source_name]
|
||||
if source_imported and self.source_imported is False:
|
||||
await self.load_datas(risotto_context)
|
||||
self.source_imported = source_imported
|
||||
if not self.source_imported:
|
||||
return
|
||||
self._aggregate_tiramisu_funcs(sources)
|
||||
self._convert_dictionaries_to_tiramisu(sources)
|
||||
|
||||
@ -183,9 +175,7 @@ except:
|
||||
funcs.write(fh.read())
|
||||
funcs.write(b'\n')
|
||||
|
||||
def _convert_dictionaries_to_tiramisu(self,
|
||||
sources: list,
|
||||
) -> None:
|
||||
def _convert_dictionaries_to_tiramisu(self, sources: list) -> None:
|
||||
funcs_file = join(self.tiramisu_cache_root_path, 'funcs.py')
|
||||
tiramisu_file = join(self.tiramisu_cache_root_path, 'tiramisu.py')
|
||||
dictionaries_dir = join(self.tiramisu_cache_root_path, 'dictionaries')
|
||||
@ -319,7 +309,6 @@ except:
|
||||
await config_std.property.read_only()
|
||||
|
||||
# copy informations from 'to deploy' configuration to configuration
|
||||
await config.information.importation(await config_std.information.exportation())
|
||||
await config.value.importation(await config_std.value.exportation())
|
||||
await config.permissive.importation(await config_std.permissive.exportation())
|
||||
await config.property.importation(await config_std.property.exportation())
|
||||
|
@ -18,9 +18,6 @@ from .context import Context
|
||||
from . import register
|
||||
|
||||
|
||||
DISPATCHER = None
|
||||
|
||||
|
||||
class CallDispatcher:
|
||||
async def valid_call_returns(self,
|
||||
risotto_context: Context,
|
||||
@ -45,21 +42,14 @@ class CallDispatcher:
|
||||
for ret in returns:
|
||||
async with await Config(response, display_name=lambda self, dyn_name, suffix: self.impl_getname()) as config:
|
||||
await config.property.read_write()
|
||||
key = None
|
||||
try:
|
||||
for key, value in ret.items():
|
||||
await config.option(key).value.set(value)
|
||||
except AttributeError as err:
|
||||
if key is not None:
|
||||
err = _(f'function {module_name}.{function_name} return the unknown parameter "{key}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||
else:
|
||||
err = _(f'function {module_name}.{function_name} return unconsistency data "{err}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||
except AttributeError:
|
||||
err = _(f'function {module_name}.{function_name} return the unknown parameter "{key}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||
raise CallError(err)
|
||||
except ValueError as err:
|
||||
if key is not None:
|
||||
err = _(f'function {module_name}.{function_name} return the invalid parameter "{key}" for the uri "{risotto_context.version}.{risotto_context.message}": {err}')
|
||||
else:
|
||||
err = _(f'function {module_name}.{function_name} return unconsistency error for the uri "{risotto_context.version}.{risotto_context.message}": {err}')
|
||||
except ValueError:
|
||||
err = _(f'function {module_name}.{function_name} return the parameter "{key}" with an unvalid value "{value}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||
raise CallError(err)
|
||||
await config.property.read_only()
|
||||
mandatories = await config.value.mandatory()
|
||||
@ -118,16 +108,12 @@ class CallDispatcher:
|
||||
config_arguments,
|
||||
function_obj,
|
||||
)
|
||||
await log.success(risotto_context,
|
||||
ret,
|
||||
)
|
||||
except Exception as err:
|
||||
except CallError as err:
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
raise CallError(err) from err
|
||||
raise err from err
|
||||
else:
|
||||
error = None
|
||||
try:
|
||||
async with self.pool.acquire() as connection:
|
||||
await connection.set_type_codec(
|
||||
@ -171,22 +157,24 @@ class CallDispatcher:
|
||||
)
|
||||
raise err from err
|
||||
except CallError as err:
|
||||
error = err
|
||||
raise err from err
|
||||
except Exception as err:
|
||||
# if there is a problem with arguments, just send an error and do nothing
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
error = err
|
||||
if error:
|
||||
if not internal:
|
||||
err = CallError(str(error))
|
||||
err.context_id = risotto_context.context_id
|
||||
else:
|
||||
err = error
|
||||
raise err from error
|
||||
async with self.pool.acquire() as connection:
|
||||
await connection.set_type_codec(
|
||||
'json',
|
||||
encoder=dumps,
|
||||
decoder=loads,
|
||||
schema='pg_catalog'
|
||||
)
|
||||
risotto_context.connection = connection
|
||||
async with connection.transaction():
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
raise err from err
|
||||
return ret
|
||||
|
||||
|
||||
@ -199,6 +187,8 @@ class PublishDispatcher:
|
||||
for message, message_infos in messages.items():
|
||||
# event not emit locally
|
||||
if message_infos['pattern'] == 'event' and 'functions' in message_infos and message_infos['functions']:
|
||||
# module, submodule, submessage = message.split('.', 2)
|
||||
# if f'{module}.{submodule}' not in self.injected_self:
|
||||
uri = f'{version}.{message}'
|
||||
print(f' - {uri}')
|
||||
await self.listened_connection.add_listener(uri,
|
||||
@ -234,34 +224,21 @@ class PublishDispatcher:
|
||||
version, message = uri.split('.', 1)
|
||||
loop = get_event_loop()
|
||||
remote_kw = loads(payload)
|
||||
for function_obj in self.messages[version][message]['functions']:
|
||||
risotto_context = self.build_new_context(remote_kw['context'],
|
||||
version,
|
||||
message,
|
||||
'event',
|
||||
)
|
||||
callback = self.get_callback(version, message, function_obj, risotto_context, remote_kw['kwargs'],)
|
||||
loop.call_soon(callback)
|
||||
|
||||
def get_callback(self,
|
||||
version,
|
||||
message,
|
||||
function_obj,
|
||||
risotto_context,
|
||||
kwargs,
|
||||
):
|
||||
return lambda: ensure_future(self._publish(version,
|
||||
message,
|
||||
function_obj,
|
||||
risotto_context,
|
||||
**kwargs,
|
||||
))
|
||||
|
||||
risotto_context = self.build_new_context(remote_kw['context'],
|
||||
version,
|
||||
message,
|
||||
'event',
|
||||
)
|
||||
callback = lambda: ensure_future(self._publish(version,
|
||||
message,
|
||||
risotto_context,
|
||||
**remote_kw['kwargs'],
|
||||
))
|
||||
loop.call_soon(callback)
|
||||
|
||||
async def _publish(self,
|
||||
version: str,
|
||||
message: str,
|
||||
function_obj,
|
||||
risotto_context: Context,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
@ -279,40 +256,50 @@ class PublishDispatcher:
|
||||
schema='pg_catalog'
|
||||
)
|
||||
risotto_context.connection = connection
|
||||
function_name = function_obj['function'].__name__
|
||||
info_msg = _(f"call function {function_obj['full_module_name']}.{function_name}")
|
||||
try:
|
||||
async with connection.transaction():
|
||||
try:
|
||||
await log.start(risotto_context,
|
||||
kwargs,
|
||||
info_msg,
|
||||
)
|
||||
await self.check_message_type(risotto_context,
|
||||
kwargs,
|
||||
)
|
||||
await self.launch(risotto_context,
|
||||
kwargs,
|
||||
config_arguments,
|
||||
function_obj,
|
||||
)
|
||||
# log the success
|
||||
await log.success(risotto_context)
|
||||
except CallError as err:
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
except CallError:
|
||||
pass
|
||||
except Exception as err:
|
||||
# if there is a problem with arguments, log and do nothing
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
for function_obj in self.messages[version][message]['functions']:
|
||||
function_name = function_obj['function'].__name__
|
||||
info_msg = _(f"call function {function_obj['full_module_name']}.{function_name}")
|
||||
try:
|
||||
async with connection.transaction():
|
||||
try:
|
||||
await log.start(risotto_context,
|
||||
kwargs,
|
||||
info_msg,
|
||||
)
|
||||
await self.check_message_type(risotto_context,
|
||||
kwargs,
|
||||
)
|
||||
await self.launch(risotto_context,
|
||||
kwargs,
|
||||
config_arguments,
|
||||
function_obj,
|
||||
)
|
||||
# log the success
|
||||
await log.success(risotto_context)
|
||||
except CallError as err:
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
except CallError:
|
||||
pass
|
||||
except Exception as err:
|
||||
# if there is a problem with arguments, log and do nothing
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
async with self.pool.acquire() as connection:
|
||||
await connection.set_type_codec(
|
||||
'json',
|
||||
encoder=dumps,
|
||||
decoder=loads,
|
||||
schema='pg_catalog'
|
||||
)
|
||||
risotto_context.connection = connection
|
||||
async with connection.transaction():
|
||||
await log.failed(risotto_context,
|
||||
str(err),
|
||||
)
|
||||
|
||||
|
||||
class Dispatcher(register.RegisterDispatcher,
|
||||
@ -340,7 +327,6 @@ class Dispatcher(register.RegisterDispatcher,
|
||||
risotto_context.type = type
|
||||
risotto_context.message = message
|
||||
risotto_context.version = version
|
||||
risotto_context.pool = self.pool
|
||||
return risotto_context
|
||||
|
||||
async def check_message_type(self,
|
||||
@ -478,8 +464,6 @@ class Dispatcher(register.RegisterDispatcher,
|
||||
)
|
||||
# notification
|
||||
if function_obj.get('notification'):
|
||||
if returns is None:
|
||||
raise Exception(_(f'function "{function_obj["full_module_name"]}.{function_obj["function"].__name__}" must returns something for {function_obj["notification"]}!'))
|
||||
notif_version, notif_message = function_obj['notification'].split('.', 1)
|
||||
if not isinstance(returns, list):
|
||||
send_returns = [returns]
|
||||
@ -495,9 +479,5 @@ class Dispatcher(register.RegisterDispatcher,
|
||||
return returns
|
||||
|
||||
|
||||
def get_dispatcher():
|
||||
global DISPATCHER
|
||||
if DISPATCHER is None:
|
||||
DISPATCHER = Dispatcher()
|
||||
register.dispatcher = DISPATCHER
|
||||
return DISPATCHER
|
||||
dispatcher = Dispatcher()
|
||||
register.dispatcher = dispatcher
|
||||
|
@ -7,7 +7,7 @@ except:
|
||||
from tiramisu import Config, default_storage
|
||||
|
||||
|
||||
from .dispatcher import get_dispatcher
|
||||
from .dispatcher import dispatcher
|
||||
from .utils import _
|
||||
from .context import Context
|
||||
from .error import CallError, NotAllowedError, RegistrationError
|
||||
@ -23,13 +23,9 @@ extra_statics = {}
|
||||
|
||||
def create_context(request):
|
||||
risotto_context = Context()
|
||||
if 'username' in dict(request.match_info):
|
||||
username = request.match_info['username']
|
||||
elif 'username' in request.headers:
|
||||
username = request.headers['username']
|
||||
else:
|
||||
username = get_config()['http_server']['default_user']
|
||||
risotto_context.username = username
|
||||
risotto_context.username = request.match_info.get('username',
|
||||
get_config()['http_server']['default_user'],
|
||||
)
|
||||
return risotto_context
|
||||
|
||||
|
||||
@ -70,7 +66,6 @@ class extra_route_handler:
|
||||
if function_name != 'risotto.http':
|
||||
risotto_module_name, submodule_name = function_name.split('.', 2)[:-1]
|
||||
module_name = risotto_module_name.split('_')[-1]
|
||||
dispatcher = get_dispatcher()
|
||||
kwargs['self'] = dispatcher.injected_self[module_name + '.' + submodule_name]
|
||||
try:
|
||||
returns = await cls.function(**kwargs)
|
||||
@ -94,7 +89,6 @@ async def handle(request):
|
||||
risotto_context = create_context(request)
|
||||
kwargs = await request.json()
|
||||
try:
|
||||
dispatcher = get_dispatcher()
|
||||
pattern = dispatcher.messages[version][message]['pattern']
|
||||
if pattern == 'rpc':
|
||||
method = dispatcher.call
|
||||
@ -107,31 +101,15 @@ async def handle(request):
|
||||
internal=False,
|
||||
**kwargs,
|
||||
)
|
||||
except NotAllowedError as err:
|
||||
raise HTTPNotFound(reason=str(err))
|
||||
except CallError as err:
|
||||
raise HTTPBadRequest(reason=str(err).replace('\n', ' '))
|
||||
except Exception as err:
|
||||
context_id = None
|
||||
if isinstance(err, NotAllowedError):
|
||||
error_type = HTTPNotFound
|
||||
elif isinstance(err, CallError):
|
||||
error_type = HTTPBadRequest
|
||||
context_id = err.context_id
|
||||
else:
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
error_type = HTTPInternalServerError
|
||||
response = {'type': 'error',
|
||||
'reason': str(err).replace('\n', ' '),
|
||||
}
|
||||
if context_id is not None:
|
||||
response['context_id'] = context_id
|
||||
err = dumps({'response': response,
|
||||
'type': 'error',
|
||||
})
|
||||
raise error_type(text=err,
|
||||
content_type='application/json',
|
||||
)
|
||||
return Response(text=dumps({'response': text,
|
||||
'type': 'success',
|
||||
}),
|
||||
if get_config()['global']['debug']:
|
||||
print_exc()
|
||||
raise HTTPInternalServerError(reason=str(err))
|
||||
return Response(text=dumps({'response': text}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
@ -144,7 +122,6 @@ async def api(request,
|
||||
# check all URI that have an associated role
|
||||
# all URI without role is concidered has a private URI
|
||||
uris = []
|
||||
dispatcher = get_dispatcher()
|
||||
async with dispatcher.pool.acquire() as connection:
|
||||
async with connection.transaction():
|
||||
# Check role with ACL
|
||||
@ -170,7 +147,6 @@ async def get_app(loop):
|
||||
""" build all routes
|
||||
"""
|
||||
global extra_routes, extra_statics
|
||||
dispatcher = get_dispatcher()
|
||||
services.link_to_dispatcher(dispatcher)
|
||||
app = Application(loop=loop)
|
||||
routes = []
|
||||
|
@ -1,378 +0,0 @@
|
||||
from os import listdir, walk, makedirs
|
||||
from os.path import isfile, isdir, join, dirname
|
||||
from yaml import load, SafeLoader
|
||||
from json import load as jload, dump as jdump
|
||||
from time import time
|
||||
from shutil import copy2, rmtree, move
|
||||
from hashlib import sha512
|
||||
from subprocess import Popen
|
||||
|
||||
from rougail import RougailConvert, RougailConfig, RougailUpgrade
|
||||
try:
|
||||
from tiramisu3 import Config
|
||||
except:
|
||||
from tiramisu import Config
|
||||
|
||||
from .utils import _
|
||||
|
||||
|
||||
DATASET_PATH = '/usr/share/risotto/'
|
||||
TMP_DIRECTORY = '/tmp'
|
||||
PACKER_TMP_DIRECTORY = join(TMP_DIRECTORY, 'packer')
|
||||
PACKER_FILE_NAME = 'recipe.json'
|
||||
IMAGES_DIRECTORY = join(TMP_DIRECTORY, 'images')
|
||||
|
||||
|
||||
FUNCTIONS = b"""try:
|
||||
from tiramisu3 import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||
except:
|
||||
from tiramisu import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||
# =============================================================
|
||||
# fork of risotto-setting/src/risotto_setting/config/config.py
|
||||
def get_password(**kwargs):
|
||||
return 'password'
|
||||
|
||||
|
||||
def get_ip(**kwargs):
|
||||
return '1.1.1.1'
|
||||
|
||||
|
||||
def get_chain(**kwargs):
|
||||
return 'chain'
|
||||
|
||||
|
||||
def get_certificates(**kwargs):
|
||||
return []
|
||||
|
||||
|
||||
def get_certificate(**kwargs):
|
||||
return 'certificate'
|
||||
|
||||
|
||||
def get_private_key(**kwargs):
|
||||
return 'private_key'
|
||||
|
||||
|
||||
def get_linked_configuration(**kwargs):
|
||||
if 'test' in kwargs and kwargs['test']:
|
||||
return kwargs['test'][0]
|
||||
return 'configuration'
|
||||
|
||||
|
||||
def zone_information(**kwargs):
|
||||
return 'zone'
|
||||
# =============================================================
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Images:
|
||||
def __init__(self,
|
||||
image_dir: str=None,
|
||||
tmp_dir: str=None,
|
||||
):
|
||||
if image_dir is None:
|
||||
image_dir = IMAGES_DIRECTORY
|
||||
self.image_dir = image_dir
|
||||
if isdir(self.image_dir):
|
||||
rmtree(self.image_dir)
|
||||
if tmp_dir is None:
|
||||
tmp_dir = PACKER_TMP_DIRECTORY
|
||||
self.tmp_dir = tmp_dir
|
||||
self.load_applications()
|
||||
|
||||
def load_applications(self) -> None:
|
||||
self.build_images = []
|
||||
self.applications = {}
|
||||
for distrib in listdir(join(DATASET_PATH, 'seed')):
|
||||
distrib_dir = join(DATASET_PATH, 'seed', distrib, 'applicationservice')
|
||||
if not isdir(distrib_dir):
|
||||
continue
|
||||
for release in listdir(distrib_dir):
|
||||
release_dir = join(distrib_dir, release)
|
||||
if not isdir(release_dir):
|
||||
continue
|
||||
for applicationservice in listdir(release_dir):
|
||||
applicationservice_dir = join(release_dir, applicationservice)
|
||||
if not isdir(applicationservice_dir):
|
||||
continue
|
||||
if applicationservice in self.applications:
|
||||
raise Exception('multi applicationservice')
|
||||
with open(join(applicationservice_dir, 'applicationservice.yml')) as yaml:
|
||||
app = load(yaml, Loader=SafeLoader)
|
||||
self.applications[applicationservice] = {'path': applicationservice_dir,
|
||||
'yml': app,
|
||||
}
|
||||
if 'service' in app and app['service']:
|
||||
self.build_images.append(applicationservice)
|
||||
|
||||
def calc_depends(self,
|
||||
dependencies: list,
|
||||
appname,
|
||||
key_is_name=False,
|
||||
):
|
||||
app = self.applications[appname]['yml']
|
||||
if not 'depends' in app or not app['depends']:
|
||||
return
|
||||
for dependency in app['depends']:
|
||||
if key_is_name:
|
||||
key = appname
|
||||
else:
|
||||
key = self.applications[dependency]['path']
|
||||
if key not in dependencies:
|
||||
dependencies.insert(0, key)
|
||||
self.calc_depends(dependencies, dependency, key_is_name)
|
||||
|
||||
def list_oses(self):
|
||||
oses = set()
|
||||
for build in self.build_images:
|
||||
dependencies = [build]
|
||||
self.calc_depends(dependencies, build, True)
|
||||
for dependency in dependencies:
|
||||
if isdir(join(self.applications[dependency]['path'], 'packer', 'os')):
|
||||
oses.add(dependency)
|
||||
break
|
||||
for os in oses:
|
||||
dependencies = [self.applications[os]['path']]
|
||||
self.calc_depends(dependencies, os)
|
||||
yield os, dependencies
|
||||
|
||||
def list_images(self):
|
||||
for build in self.build_images:
|
||||
dependencies = [self.applications[build]['path']]
|
||||
self.calc_depends(dependencies, build)
|
||||
yield build, dependencies
|
||||
|
||||
async def build(self) -> None:
|
||||
if isdir(self.tmp_dir):
|
||||
rmtree(self.tmp_dir)
|
||||
image = Image(self.image_dir,
|
||||
self.tmp_dir,
|
||||
)
|
||||
print(_('Build OSes'))
|
||||
if not isdir(join(self.image_dir, 'os')):
|
||||
makedirs(join(self.image_dir, 'os'))
|
||||
for application, dependencies_path in self.list_oses():
|
||||
print(_(f'Build OS {application}'))
|
||||
await image.build_os(application,
|
||||
dependencies_path,
|
||||
)
|
||||
print(_('Build images'))
|
||||
for application, dependencies_path in self.list_images():
|
||||
print(_(f'Build image {application}'))
|
||||
await image.build_image(application,
|
||||
dependencies_path,
|
||||
)
|
||||
|
||||
|
||||
class Image:
|
||||
def __init__(self,
|
||||
image_dir: str,
|
||||
tmp_dir: str,
|
||||
):
|
||||
self.image_dir = image_dir
|
||||
self.tmp_dir = tmp_dir
|
||||
|
||||
@staticmethod
|
||||
def copy_files(dependencies_path: list,
|
||||
dst_path: str,
|
||||
element: str,
|
||||
) -> None:
|
||||
for dependency_path in dependencies_path:
|
||||
src_path = join(dependency_path,
|
||||
'packer',
|
||||
element,
|
||||
)
|
||||
root_len = len(src_path) + 1
|
||||
for dir_name, subdir_names, filenames in walk(src_path):
|
||||
subdir = join(dst_path, dir_name[root_len:])
|
||||
if not isdir(subdir):
|
||||
makedirs(subdir)
|
||||
for filename in filenames:
|
||||
path = join(dir_name, filename)
|
||||
sub_dst_path = join(subdir, filename)
|
||||
if isfile(sub_dst_path):
|
||||
raise Exception(_(f'Try to copy {sub_dst_path} which is already exists'))
|
||||
copy2(path, sub_dst_path)
|
||||
|
||||
async def load_configuration(self,
|
||||
dependencies_path: list,
|
||||
packer_tmp_directory: str,
|
||||
) -> dict:
|
||||
config = RougailConfig.copy()
|
||||
dictionaries = [join(dependency_path, 'dictionaries') for dependency_path in dependencies_path if isdir(join(dependency_path, 'dictionaries'))]
|
||||
upgrade = RougailUpgrade()
|
||||
dest_dictionaries = join(packer_tmp_directory, 'dictionaries')
|
||||
makedirs(dest_dictionaries)
|
||||
dest_dictionaries_extras = join(packer_tmp_directory, 'dictionaries_extras')
|
||||
makedirs(dest_dictionaries_extras)
|
||||
for dependency_path in dependencies_path:
|
||||
dictionaries_dir = join(dependency_path, 'dictionaries')
|
||||
if isdir(dictionaries_dir):
|
||||
upgrade.load_xml_from_folders(dictionaries_dir,
|
||||
dest_dictionaries,
|
||||
RougailConfig['variable_namespace'],
|
||||
)
|
||||
extra_dir = join(dependency_path, 'extras', 'packer')
|
||||
if isdir(extra_dir):
|
||||
upgrade.load_xml_from_folders(extra_dir,
|
||||
dest_dictionaries_extras,
|
||||
'packer',
|
||||
)
|
||||
config['dictionaries_dir'] = [dest_dictionaries]
|
||||
config['extra_dictionaries'] = {'packer': [dest_dictionaries_extras]}
|
||||
self.merge_funcs(config, dependencies_path, packer_tmp_directory)
|
||||
packer_configuration = await self.get_packer_information(config, packer_tmp_directory)
|
||||
return packer_configuration
|
||||
|
||||
@staticmethod
|
||||
def merge_funcs(config: RougailConfig,
|
||||
dependencies_path: list,
|
||||
packer_tmp_directory: str,
|
||||
):
|
||||
functions = FUNCTIONS
|
||||
for dependency_path in dependencies_path:
|
||||
funcs_dir = join(dependency_path, 'funcs')
|
||||
if not isdir(funcs_dir):
|
||||
continue
|
||||
for func in listdir(funcs_dir):
|
||||
with open(join(funcs_dir, func), 'rb') as fh:
|
||||
functions += fh.read()
|
||||
func_name = join(packer_tmp_directory, 'func.py')
|
||||
with open(func_name, 'wb') as fh:
|
||||
fh.write(functions)
|
||||
config['functions_file'] = func_name
|
||||
|
||||
@staticmethod
|
||||
async def get_packer_information(config: RougailConfig,
|
||||
packer_tmp_directory: str,
|
||||
) -> dict:
|
||||
eolobj = RougailConvert(config)
|
||||
xml = eolobj.save(join(packer_tmp_directory, 'tiramisu.py'))
|
||||
optiondescription = {}
|
||||
exec(xml, None, optiondescription)
|
||||
config = await Config(optiondescription['option_0'])
|
||||
return await config.option('packer').value.dict(leader_to_list=True, flatten=True)
|
||||
|
||||
@staticmethod
|
||||
def do_recipe_checksum(path: str,
|
||||
) -> str:
|
||||
files = []
|
||||
root_len = len(path) + 1
|
||||
for dir_name, subdir_names, filenames in walk(path):
|
||||
subpath = dir_name[root_len:]
|
||||
for filename in filenames:
|
||||
with open(join(dir_name, filename), 'rb') as fh:
|
||||
ctl_sum = sha512(fh.read()).hexdigest()
|
||||
abs_path = join(subpath, filename)
|
||||
files.append(f'{abs_path}/{ctl_sum}')
|
||||
files.sort()
|
||||
print(files, sha512('\n'.join(files).encode()).hexdigest())
|
||||
return sha512('\n'.join(files).encode()).hexdigest()
|
||||
|
||||
def get_tmp_directory(self,
|
||||
application: str,
|
||||
) -> str:
|
||||
return join(self.tmp_dir,
|
||||
application + '_' + str(time()),
|
||||
)
|
||||
|
||||
def get_os_filename(self,
|
||||
packer_configuration: dict,
|
||||
) -> str:
|
||||
return join(self.image_dir,
|
||||
'os',
|
||||
packer_configuration['os_name'] + '_' + packer_configuration['os_version'] + '.img',
|
||||
)
|
||||
|
||||
def get_image_filename(self,
|
||||
recipe_checksum: str,
|
||||
) -> str:
|
||||
return join(self.image_dir,
|
||||
f'{recipe_checksum}.img',
|
||||
)
|
||||
|
||||
async def build_os(self,
|
||||
application: str,
|
||||
dependencies_path: list,
|
||||
) -> None:
|
||||
packer_tmp_directory = self.get_tmp_directory(application)
|
||||
packer_configuration = await self.load_configuration(dependencies_path, packer_tmp_directory)
|
||||
packer_dst_os_filename = self.get_os_filename(packer_configuration)
|
||||
self.copy_files(dependencies_path,
|
||||
packer_tmp_directory,
|
||||
'os',
|
||||
)
|
||||
packer_configuration['tmp_directory'] = packer_tmp_directory
|
||||
recipe = {'variables': packer_configuration}
|
||||
self.build(packer_dst_os_filename,
|
||||
packer_tmp_directory,
|
||||
recipe,
|
||||
)
|
||||
|
||||
async def build_image(self,
|
||||
application: str,
|
||||
dependencies_path: list,
|
||||
) -> None:
|
||||
packer_tmp_directory = self.get_tmp_directory(application)
|
||||
makedirs(packer_tmp_directory)
|
||||
self.copy_files(dependencies_path,
|
||||
packer_tmp_directory,
|
||||
'image',
|
||||
)
|
||||
recipe_checksum = self.do_recipe_checksum(packer_tmp_directory)
|
||||
packer_dst_filename = self.get_image_filename(recipe_checksum)
|
||||
packer_configuration = await self.load_configuration(dependencies_path, packer_tmp_directory)
|
||||
packer_dst_os_filename = join(self.image_dir,
|
||||
'os',
|
||||
packer_configuration['os_name'] + '_' + packer_configuration['os_version'] + '.img',
|
||||
)
|
||||
packer_configuration['tmp_directory'] = packer_tmp_directory
|
||||
recipe = {'variables': packer_configuration}
|
||||
recipe['variables']['iso_url'] = packer_dst_os_filename
|
||||
self.build(packer_dst_filename,
|
||||
packer_tmp_directory,
|
||||
recipe,
|
||||
f'{packer_dst_os_filename}.sha256',
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build(packer_dst_filename: str,
|
||||
tmp_directory: str,
|
||||
recipe: dict,
|
||||
sha_file: str=None,
|
||||
) -> None:
|
||||
packer_filename = join(tmp_directory, PACKER_FILE_NAME)
|
||||
if sha_file is not None:
|
||||
with open(sha_file, 'r') as fh:
|
||||
sha256 = fh.read().split(' ', 1)[0]
|
||||
recipe['variables']['iso_checksum'] = sha256
|
||||
with open(packer_filename, 'r') as recipe_fd:
|
||||
for key, value in jload(recipe_fd).items():
|
||||
recipe[key] = value
|
||||
with open(packer_filename, 'w') as recipe_fd:
|
||||
jdump(recipe, recipe_fd, indent=2)
|
||||
preprocessors = join(tmp_directory, 'preprocessors')
|
||||
if isfile(preprocessors):
|
||||
proc = Popen([preprocessors],
|
||||
#stdout=PIPE,
|
||||
#stderr=PIPE,
|
||||
cwd=tmp_directory,
|
||||
)
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise Exception(_(f'error when executing {preprocessors}'))
|
||||
proc = Popen(['packer', 'build', packer_filename],
|
||||
#stdout=PIPE,
|
||||
#stderr=PIPE,
|
||||
cwd=tmp_directory,
|
||||
)
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise Exception(_(f'cannot build {packer_dst_filename} with {packer_filename}'))
|
||||
if not isdir(dirname(packer_dst_filename)):
|
||||
makedirs(dirname(packer_dst_filename))
|
||||
move(join(tmp_directory, 'image.img'), packer_dst_filename)
|
||||
move(join(tmp_directory, 'image.sha256'), f'{packer_dst_filename}.sha256')
|
||||
print(_(f'Image {packer_dst_filename} created'))
|
||||
rmtree(tmp_directory)
|
@ -2,57 +2,35 @@ from typing import Dict, Any, Optional
|
||||
from json import dumps, loads
|
||||
from asyncpg.exceptions import UndefinedTableError
|
||||
from datetime import datetime
|
||||
from asyncio import Lock
|
||||
|
||||
from .context import Context
|
||||
from .utils import _
|
||||
from .config import get_config
|
||||
|
||||
database_lock = Lock()
|
||||
|
||||
|
||||
LEVELS = ['Error', 'Info', 'Success', 'Started', 'Failure']
|
||||
|
||||
|
||||
class Logger:
|
||||
""" An object to manager log
|
||||
"""
|
||||
def __init__(self) -> None:
|
||||
self.log_connection = None
|
||||
|
||||
async def get_connection(self,
|
||||
risotto_context: Context,
|
||||
):
|
||||
if not self.log_connection:
|
||||
self.log_connection = await risotto_context.pool.acquire()
|
||||
await self.log_connection.set_type_codec(
|
||||
'json',
|
||||
encoder=dumps,
|
||||
decoder=loads,
|
||||
schema='pg_catalog'
|
||||
)
|
||||
return self.log_connection
|
||||
|
||||
async def insert(self,
|
||||
msg: str,
|
||||
uri: str,
|
||||
uris: str,
|
||||
risotto_context: Context,
|
||||
level: str,
|
||||
kwargs: Any=None,
|
||||
data: Any=None,
|
||||
start: bool=False,
|
||||
) -> None:
|
||||
uri = self._get_last_uri(risotto_context)
|
||||
uris = " ".join(risotto_context.paths)
|
||||
insert = 'INSERT INTO RisottoLog(Msg, URI, URIS, UserLogin, Status'
|
||||
insert = 'INSERT INTO RisottoLog(Msg, URI, URIS, UserLogin, Level'
|
||||
values = 'VALUES($1,$2,$3,$4,$5'
|
||||
args = [msg, uri, uris, risotto_context.username, LEVELS.index(level)]
|
||||
if kwargs:
|
||||
insert += ', Kwargs'
|
||||
args = [msg, uri, uris, risotto_context.username, level]
|
||||
if data:
|
||||
insert += ', Data'
|
||||
values += ',$6'
|
||||
args.append(dumps(kwargs))
|
||||
args.append(dumps(data))
|
||||
context_id = risotto_context.context_id
|
||||
if context_id is not None:
|
||||
insert += ', ContextId'
|
||||
if kwargs:
|
||||
if data:
|
||||
values += ',$7'
|
||||
else:
|
||||
values += ',$6'
|
||||
@ -60,9 +38,7 @@ class Logger:
|
||||
|
||||
sql = insert + ') ' + values + ') RETURNING LogId'
|
||||
try:
|
||||
async with database_lock:
|
||||
connection = await self.get_connection(risotto_context)
|
||||
log_id = await connection.fetchval(sql, *args)
|
||||
log_id = await risotto_context.connection.fetchval(sql, *args)
|
||||
if context_id is None and start:
|
||||
risotto_context.context_id = log_id
|
||||
if start:
|
||||
@ -70,6 +46,34 @@ class Logger:
|
||||
except UndefinedTableError as err:
|
||||
raise Exception(_(f'cannot access to database ({err}), was the database really created?'))
|
||||
|
||||
async def query(self,
|
||||
risotto_context: Context,
|
||||
context_id: int,
|
||||
uri: Optional[str],
|
||||
) -> list:
|
||||
sql = '''SELECT Msg as msg, URI as uri_name, URIS as uris, UserLogin as user_login, Level as level, Data as data, StartDate as start_date, StopDate as stop_date
|
||||
FROM RisottoLog
|
||||
WHERE UserLogin = $1 AND (LogId = $2 OR ContextId = $2)
|
||||
'''
|
||||
args = [sql, risotto_context.username, context_id]
|
||||
if uri is not None:
|
||||
sql += ' AND URI = $3'
|
||||
args.append(uri)
|
||||
ret = []
|
||||
for row in await risotto_context.connection.fetch(*args):
|
||||
d = {}
|
||||
for key, value in row.items():
|
||||
if key == 'data':
|
||||
if not value:
|
||||
value = {}
|
||||
else:
|
||||
value = loads(value)
|
||||
elif key in ['start_date', 'stop_date']:
|
||||
value = str(value)
|
||||
d[key] = value
|
||||
ret.append(d)
|
||||
return ret
|
||||
|
||||
def _get_last_uri(self,
|
||||
risotto_context: Context,
|
||||
) -> str:
|
||||
@ -105,6 +109,8 @@ class Logger:
|
||||
paths_msg = self._get_message_paths(risotto_context)
|
||||
print(_(f'{risotto_context.username}: ERROR: {error} ({paths_msg} with arguments "{arguments}": {msg})'))
|
||||
await self.insert(msg,
|
||||
self._get_last_uri(risotto_context),
|
||||
paths_msg,
|
||||
risotto_context,
|
||||
'Error',
|
||||
arguments,
|
||||
@ -121,6 +127,8 @@ class Logger:
|
||||
if get_config()['global']['debug']:
|
||||
print(_(f'{risotto_context.username}: INFO:{paths_msg}: {msg}'))
|
||||
await self.insert(msg,
|
||||
self._get_last_uri(risotto_context),
|
||||
paths_msg,
|
||||
risotto_context,
|
||||
'Info',
|
||||
arguments,
|
||||
@ -133,14 +141,12 @@ class Logger:
|
||||
) -> None:
|
||||
paths_msg = self._get_message_paths(risotto_context)
|
||||
if get_config()['global']['debug']:
|
||||
if risotto_context.context_id != None:
|
||||
context = f'({risotto_context.context_id})'
|
||||
else:
|
||||
context = ''
|
||||
print(_(f'{risotto_context.username}: START{context}:{paths_msg}: {msg}'))
|
||||
print(_(f'{risotto_context.username}: START:{paths_msg}: {msg}'))
|
||||
await self.insert(msg,
|
||||
self._get_last_uri(risotto_context),
|
||||
paths_msg,
|
||||
risotto_context,
|
||||
'Started',
|
||||
'Start',
|
||||
arguments,
|
||||
start=True,
|
||||
)
|
||||
@ -151,24 +157,22 @@ class Logger:
|
||||
) -> None:
|
||||
if get_config()['global']['debug']:
|
||||
paths_msg = self._get_message_paths(risotto_context)
|
||||
print(_(f'{risotto_context.username}: SUCCESS({risotto_context.context_id}):{paths_msg}'))
|
||||
print(_(f'{risotto_context.username}: SUCCESS:{paths_msg}({risotto_context.context_id})'))
|
||||
sql = """UPDATE RisottoLog
|
||||
SET StopDate = $2,
|
||||
Status = $3
|
||||
Level = 'SUCCESS'
|
||||
"""
|
||||
args = [datetime.now(), LEVELS.index('Success')]
|
||||
args = [datetime.now()]
|
||||
if returns:
|
||||
sql += """, Returns = $4
|
||||
sql += """, Returns = $3
|
||||
"""
|
||||
args.append(dumps(returns))
|
||||
sql += """WHERE LogId = $1
|
||||
"""
|
||||
async with database_lock:
|
||||
connection = await self.get_connection(risotto_context)
|
||||
await connection.execute(sql,
|
||||
risotto_context.start_id,
|
||||
*args,
|
||||
)
|
||||
await risotto_context.connection.execute(sql,
|
||||
risotto_context.start_id,
|
||||
*args,
|
||||
)
|
||||
|
||||
async def failed(self,
|
||||
risotto_context: Context,
|
||||
@ -176,25 +180,18 @@ class Logger:
|
||||
) -> None:
|
||||
if get_config()['global']['debug']:
|
||||
paths_msg = self._get_message_paths(risotto_context)
|
||||
if risotto_context.context_id != None:
|
||||
context = f'({risotto_context.context_id})'
|
||||
else:
|
||||
context = ''
|
||||
print(_(f'{risotto_context.username}: FAILED({risotto_context.context_id}):{paths_msg}: {err}'))
|
||||
print(_(f'{risotto_context.username}: FAILED:{paths_msg}({risotto_context.context_id}): err'))
|
||||
sql = """UPDATE RisottoLog
|
||||
SET StopDate = $2,
|
||||
Status = $4,
|
||||
Level = 'FAILED',
|
||||
Msg = $3
|
||||
WHERE LogId = $1
|
||||
"""
|
||||
async with database_lock:
|
||||
connection = await self.get_connection(risotto_context)
|
||||
await connection.execute(sql,
|
||||
risotto_context.start_id,
|
||||
datetime.now(),
|
||||
err[:254],
|
||||
LEVELS.index('Failure'),
|
||||
)
|
||||
await risotto_context.connection.execute(sql,
|
||||
risotto_context.start_id,
|
||||
datetime.now(),
|
||||
err,
|
||||
)
|
||||
|
||||
async def info(self,
|
||||
risotto_context,
|
||||
@ -203,6 +200,8 @@ class Logger:
|
||||
if get_config()['global']['debug']:
|
||||
print(msg)
|
||||
await self.insert(msg,
|
||||
'',
|
||||
None,
|
||||
risotto_context,
|
||||
'Info',
|
||||
)
|
||||
|
@ -19,8 +19,8 @@ from .utils import _
|
||||
|
||||
MESSAGE_ROOT_PATH = get_config()['global']['message_root_path']
|
||||
groups.addgroup('message')
|
||||
CUSTOMTYPES = None
|
||||
MESSAGE_TRANSLATION = None
|
||||
MESSAGE_TRANSLATION = translation('risotto-message', join(MESSAGE_ROOT_PATH, '..', 'locale')).gettext
|
||||
|
||||
|
||||
|
||||
class DictOption(Option):
|
||||
@ -313,7 +313,6 @@ class CustomParam:
|
||||
'string': 'String',
|
||||
'number': 'Number',
|
||||
'object': 'Dict',
|
||||
'any': 'Any',
|
||||
'array': 'Array',
|
||||
'file': 'File',
|
||||
'float': 'Float'}
|
||||
@ -449,7 +448,6 @@ def _get_option(name,
|
||||
'reverse_condition': ParamValue(True)}),
|
||||
calc_value_property_help))
|
||||
|
||||
props.append('notunique')
|
||||
description = arg.description.strip().rstrip()
|
||||
kwargs = {'name': name,
|
||||
'doc': _get_description(description, name),
|
||||
@ -525,7 +523,6 @@ def _parse_responses(message_def,
|
||||
'Number': IntOption,
|
||||
'Boolean': BoolOption,
|
||||
'Dict': DictOption,
|
||||
'Any': AnyOption,
|
||||
'Float': FloatOption,
|
||||
# FIXME
|
||||
'File': StrOption}.get(type_)
|
||||
@ -533,9 +530,8 @@ def _parse_responses(message_def,
|
||||
raise Exception(f'unknown param type {obj.type} in responses of message {message_def.message}')
|
||||
if hasattr(obj, 'default'):
|
||||
kwargs['default'] = obj.default
|
||||
kwargs['properties'] = ('notunique',)
|
||||
else:
|
||||
kwargs['properties'] = ('mandatory', 'notunique')
|
||||
kwargs['properties'] = ('mandatory',)
|
||||
options.append(option(**kwargs))
|
||||
od = OptionDescription(uri,
|
||||
message_def.response.description,
|
||||
@ -593,11 +589,6 @@ def get_messages(current_module_names,
|
||||
):
|
||||
"""generate description from yml files
|
||||
"""
|
||||
global MESSAGE_TRANSLATION, CUSTOMTYPES
|
||||
if MESSAGE_TRANSLATION is None:
|
||||
MESSAGE_TRANSLATION = translation('risotto-message', join(MESSAGE_ROOT_PATH, '..', 'locale')).gettext
|
||||
if CUSTOMTYPES is None:
|
||||
CUSTOMTYPES = load_customtypes()
|
||||
optiondescriptions = {}
|
||||
optiondescriptions_info = {}
|
||||
messages = list(list_messages(uris,
|
||||
@ -609,7 +600,7 @@ def get_messages(current_module_names,
|
||||
select_option = ChoiceOption('message',
|
||||
'Nom du message.',
|
||||
tuple(messages),
|
||||
properties=frozenset(['mandatory', 'positional', 'notunique']))
|
||||
properties=frozenset(['mandatory', 'positional']))
|
||||
for uri in messages:
|
||||
message_def = get_message(uri,
|
||||
current_module_names,
|
||||
@ -637,3 +628,6 @@ def get_messages(current_module_names,
|
||||
optiondescriptions,
|
||||
)
|
||||
return optiondescriptions_info, root
|
||||
|
||||
|
||||
CUSTOMTYPES = load_customtypes()
|
||||
|
@ -83,16 +83,12 @@ def register(uris: str,
|
||||
uris = [uris]
|
||||
|
||||
def decorator(function):
|
||||
try:
|
||||
for uri in uris:
|
||||
dispatcher.set_function(uri,
|
||||
notification,
|
||||
function,
|
||||
function.__module__
|
||||
)
|
||||
except NameError:
|
||||
# if you when register uri, please use get_dispatcher before registered uri
|
||||
pass
|
||||
for uri in uris:
|
||||
dispatcher.set_function(uri,
|
||||
notification,
|
||||
function,
|
||||
function.__module__
|
||||
)
|
||||
return decorator
|
||||
|
||||
|
||||
@ -111,7 +107,6 @@ class RegisterDispatcher:
|
||||
version = obj['version']
|
||||
if version not in self.messages:
|
||||
self.messages[version] = {}
|
||||
obj['message'] = tiramisu_message
|
||||
self.messages[version][tiramisu_message] = obj
|
||||
|
||||
def get_function_args(self,
|
||||
@ -318,7 +313,6 @@ class RegisterDispatcher:
|
||||
risotto_context.username = internal_user
|
||||
risotto_context.paths.append(f'internal.{submodule_name}.on_join')
|
||||
risotto_context.type = None
|
||||
risotto_context.pool = self.pool
|
||||
risotto_context.connection = connection
|
||||
risotto_context.module = submodule_name.split('.', 1)[0]
|
||||
info_msg = _(f'in function risotto_{submodule_name}.on_join')
|
||||
|
@ -23,5 +23,5 @@ def tiramisu_display_name(kls,
|
||||
if suffix:
|
||||
doc += suffix
|
||||
if name != doc:
|
||||
name += f'" "{doc}'
|
||||
name += f' ({doc})'
|
||||
return name
|
||||
|
Reference in New Issue
Block a user