Compare commits
116 Commits
299a9f52a0
...
pkg/dev/ri
Author | SHA1 | Date | |
---|---|---|---|
939ecdbe85 | |||
4853bb47f0 | |||
8566751809 | |||
9d4644bedf | |||
4d2439c250 | |||
c0244eac8c | |||
fcd137ae6f | |||
b6c5dccf17 | |||
9423cdef8d | |||
ef43b197a1 | |||
fc5a13152b | |||
94b6563d8f | |||
617ef55dd3 | |||
88c2c168ac | |||
8c5a2bb702 | |||
09cd0a4e4c | |||
92846052b1 | |||
3085bf67d6 | |||
c63170be1d | |||
1063d2e735 | |||
2a98575790 | |||
ed51bc483d | |||
52209a5ebd | |||
0442e772c2 | |||
b0edfb7b01 | |||
27031dbf0e | |||
cb4dde1dc4 | |||
9ebe79d533 | |||
c740ec3fe3 | |||
4c83e6d89d | |||
19240489db | |||
30a267bf4a | |||
56b1f12a4a | |||
f88bcef5c0 | |||
4fc3e74bbd | |||
5663b2768b | |||
83d74c2b06 | |||
01834c6ba7 | |||
6a27b002ff | |||
8fdc34c4d3 | |||
e2d73932c0 | |||
980a119ef9 | |||
f623feb8a8 | |||
b9da2ce686 | |||
46f8a4323b | |||
941261c830 | |||
6c4bbb3dca | |||
98c77bf719 | |||
279e3a7c4c | |||
1b9d87fa53 | |||
13c7d5816c | |||
0e988d7040 | |||
a89e512266 | |||
be97d757d9 | |||
7afccab9b1 | |||
19d90fd9bc | |||
5653de1e99 | |||
c84b9435b0 | |||
e664dd6174 | |||
399bfb9ab6 | |||
3823eedd02 | |||
a12e679b3c | |||
32b83f496b | |||
dc7d081608 | |||
ca101cf094 | |||
c309ebbd56 | |||
a64131fb03 | |||
e787eb2ef5 | |||
10637656fa | |||
234b82b459 | |||
d3a5c99e51 | |||
c9e0bcbbfe | |||
47e4976f54 | |||
dd33ea5b8f | |||
689df4ec23 | |||
223fb9aaf3 | |||
c3d25b4aff | |||
bed27a1e58 | |||
40eff91684 | |||
46ea792c5e | |||
5708cb1ea9 | |||
664a2404fa | |||
9741669487 | |||
2b12c9bd0d | |||
4ad337b965 | |||
b182010bad | |||
191797d239 | |||
99168f5715 | |||
1e88a5eab0 | |||
8eadc93c95 | |||
57e3b955e5 | |||
75f1a0a230 | |||
e472282a33 | |||
93b61176d8 | |||
e475dffc95 | |||
b2151c41bc | |||
197a9e82a2 | |||
e58c61b76a | |||
867de5f56e | |||
d424c9cd0b | |||
c442abe674 | |||
f9a344e7a6 | |||
9d1e72cdd8 | |||
eef7f02512 | |||
963fe2a9b6 | |||
efb8f22872 | |||
db8bdf4a6d | |||
d6bbcaa65c | |||
8bec9c7ce1 | |||
cc6dd3efe3 | |||
892e052969 | |||
5103b7bd28 | |||
d1cf1df4b1 | |||
5fdca08c4d | |||
27f145cd6d | |||
b39aac8fd2 |
25
README.md
25
README.md
@ -29,23 +29,26 @@ psql -U postgres -h localhost -c "GRANT ALL ON DATABASE risotto TO risotto;"
|
|||||||
#psql -U postgres -h localhost -c "CREATE EXTENSION hstore;" risotto
|
#psql -U postgres -h localhost -c "CREATE EXTENSION hstore;" risotto
|
||||||
```
|
```
|
||||||
|
|
||||||
Gestion de la base de données avec Sqitch
|
Gestion de la base de données
|
||||||
|
|
||||||
```
|
La plupart des services se greffant à risotto nécessite une table dans la base de données risotto.
|
||||||
cpanm --quiet --notest App::Sqitch
|
Chacun de ces services documente la structure de la table mais ne se charge pas de sa création.
|
||||||
sqitch init risotto --uri https://forge.cadoles.com/Infra/risotto --engine pg
|
La création de la table, selon le schéma fournit dans la documentation, est à la charge de l’administrateur du système.
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Commande :
|
|
||||||
|
|
||||||
# Empty database:
|
# Empty database:
|
||||||
su - postgres
|
|
||||||
psql -U postgres risotto
|
|
||||||
drop table log; drop table userrole; drop table release; drop table source; drop table server; drop table servermodel; drop table applicationservice; drop table roleuri; drop table risottouser; drop table uri;
|
|
||||||
|
|
||||||
|
````
|
||||||
|
psql -U postgres
|
||||||
|
drop database risotto;
|
||||||
|
drop user risotto;
|
||||||
|
\q
|
||||||
|
reconfigure
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
psql -U postgres tiramisu
|
psql -U postgres tiramisu
|
||||||
drop table value; drop table property; drop table permissive; drop table information; drop table session;
|
drop table value; drop table property; drop table permissive; drop table information; drop table session;
|
||||||
|
```
|
||||||
|
|
||||||
# Import EOLE
|
# Import EOLE
|
||||||
./script/cucchiaiata source.create -n eole -u http://localhost
|
./script/cucchiaiata source.create -n eole -u http://localhost
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
Message
|
Message
|
||||||
=======
|
=======
|
||||||
|
|
||||||
message: config.session.server.start
|
|
||||||
version: v1
|
|
||||||
uri: v1.config.session.server.start
|
uri: v1.config.session.server.start
|
||||||
|
version: v1
|
||||||
|
module: config
|
||||||
|
submodule: session
|
||||||
|
message: config.session.server.start
|
||||||
|
submessage: session.server.start
|
||||||
|
subsubmessage: server.start
|
||||||
|
662
debian/agpl-3
vendored
Normal file
662
debian/agpl-3
vendored
Normal file
@ -0,0 +1,662 @@
|
|||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
1
debian/compat
vendored
Normal file
1
debian/compat
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
11
|
24
debian/control
vendored
Normal file
24
debian/control
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
Source: risotto
|
||||||
|
Section: admin
|
||||||
|
Priority: extra
|
||||||
|
Maintainer: Cadoles <contact@cadoles.com>
|
||||||
|
Build-depends: debhelper (>=11), python3-all, python3-setuptools, dh-python
|
||||||
|
Standards-Version: 3.9.4
|
||||||
|
Homepage: https://forge.cadoles.com/Infra/risotto
|
||||||
|
|
||||||
|
Package: python3-risotto
|
||||||
|
Architecture: any
|
||||||
|
Pre-Depends: dpkg, python3, ${misc:Pre-Depends}
|
||||||
|
Depends: ${python:Depends}, ${misc:Depends},
|
||||||
|
python3-asyncpg,
|
||||||
|
python3-rougail,
|
||||||
|
python3-aiohttp,
|
||||||
|
python3-sdnotify
|
||||||
|
Description: configuration manager libraries
|
||||||
|
|
||||||
|
Package: risotto
|
||||||
|
Architecture: any
|
||||||
|
Pre-Depends: dpkg, python3, ${misc:Pre-Depends}
|
||||||
|
Depends: ${python:Depends}, ${misc:Depends}, python3-risotto
|
||||||
|
Description: configuration manager
|
||||||
|
|
22
debian/copyright
vendored
Normal file
22
debian/copyright
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||||
|
Upstream-Name: risotto
|
||||||
|
Upstream-Contact: Cadoles <contact@cadoles.com>
|
||||||
|
Source: https://forge.cadoles.com/Infra/risotto
|
||||||
|
|
||||||
|
Files: *
|
||||||
|
Copyright: 2019-2020 Cadoles <contact@cadoles.com>
|
||||||
|
License: AGPL-3+
|
||||||
|
|
||||||
|
License: AGPL-3+
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
.
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
.
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
2
debian/risotto.install
vendored
Normal file
2
debian/risotto.install
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
script/risotto-server usr/bin/
|
||||||
|
sql/risotto.sql usr/share/eole/db/eole-risotto/gen/
|
10
debian/rules
vendored
Normal file
10
debian/rules
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/make -f
|
||||||
|
# See debhelper(7) (uncomment to enable)
|
||||||
|
# output every command that modifies files on the build system.
|
||||||
|
#DH_VERBOSE = 1
|
||||||
|
|
||||||
|
export PYBUILD_NAME = risotto
|
||||||
|
export PYBUILD_DISABLE_python3 = test
|
||||||
|
|
||||||
|
%:
|
||||||
|
dh $@ --with python3 --buildsystem=pybuild
|
1
debian/source/format
vendored
Normal file
1
debian/source/format
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.0 (quilt)
|
@ -1,110 +1,34 @@
|
|||||||
import asyncpg
|
import asyncpg
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from os import listdir
|
||||||
|
from os.path import isdir, join
|
||||||
|
from sys import exit
|
||||||
|
|
||||||
|
|
||||||
|
from risotto.utils import _
|
||||||
from risotto.config import get_config
|
from risotto.config import get_config
|
||||||
|
|
||||||
VERSION_INIT = """
|
|
||||||
-- Source
|
|
||||||
CREATE TABLE Source (
|
|
||||||
SourceId SERIAL PRIMARY KEY,
|
|
||||||
SourceName VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
SourceURL TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Release
|
|
||||||
CREATE TABLE Release (
|
|
||||||
ReleaseId SERIAL PRIMARY KEY,
|
|
||||||
ReleaseName VARCHAR(255) NOT NULL,
|
|
||||||
ReleaseSourceId INTEGER NOT NULL,
|
|
||||||
ReleaseDistribution VARCHAR(20) CONSTRAINT releasedistribution_choice CHECK (ReleaseDistribution IN ('last', 'n-1', 'n-2')),
|
|
||||||
UNIQUE (ReleaseName, ReleaseSourceId),
|
|
||||||
UNIQUE (ReleaseDistribution, ReleaseSourceId),
|
|
||||||
FOREIGN KEY (ReleaseSourceId) REFERENCES Source(SourceId)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Servermodel
|
|
||||||
CREATE TABLE Servermodel (
|
|
||||||
ServermodelId SERIAL PRIMARY KEY,
|
|
||||||
ServermodelName VARCHAR(255) NOT NULL,
|
|
||||||
ServermodelDescription VARCHAR(255) NOT NULL,
|
|
||||||
ServermodelParentsId INTEGER [] DEFAULT '{}',
|
|
||||||
ServermodelReleaseId INTEGER NOT NULL,
|
|
||||||
ServermodelApplicationserviceId INTEGER NOT NULL,
|
|
||||||
UNIQUE (ServermodelName, ServermodelReleaseId)
|
|
||||||
);
|
|
||||||
CREATE INDEX ServermodelApplicationserviceId_index ON Servermodel (ServermodelApplicationserviceId);
|
|
||||||
|
|
||||||
-- Applicationservice
|
|
||||||
CREATE TABLE Applicationservice (
|
|
||||||
ApplicationserviceId SERIAL PRIMARY KEY,
|
|
||||||
ApplicationserviceName VARCHAR(255) NOT NULL,
|
|
||||||
ApplicationserviceDescription VARCHAR(255) NOT NULL,
|
|
||||||
ApplicationserviceReleaseId INTEGER NOT NULL,
|
|
||||||
UNIQUE (ApplicationserviceName, ApplicationserviceReleaseId)
|
|
||||||
);
|
|
||||||
CREATE TABLE ApplicationserviceDependency (
|
|
||||||
ApplicationserviceId INTEGER NOT NULL,
|
|
||||||
ApplicationserviceDependencyId INTEGER NOT NULL,
|
|
||||||
UNIQUE(ApplicationserviceId, ApplicationserviceDependencyId),
|
|
||||||
FOREIGN KEY (ApplicationserviceId) REFERENCES Applicationservice(ApplicationserviceId),
|
|
||||||
FOREIGN KEY (ApplicationserviceDependencyId) REFERENCES Applicationservice(ApplicationserviceId)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Server
|
|
||||||
CREATE TABLE Server (
|
|
||||||
ServerId SERIAL PRIMARY KEY,
|
|
||||||
ServerName VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
ServerDescription VARCHAR(255) NOT NULL,
|
|
||||||
ServerServermodelId INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
-- User, Role and ACL
|
|
||||||
CREATE TABLE RisottoUser (
|
|
||||||
UserId SERIAL PRIMARY KEY,
|
|
||||||
UserLogin VARCHAR(100) NOT NULL UNIQUE,
|
|
||||||
UserName VARCHAR(100) NOT NULL,
|
|
||||||
UserSurname VARCHAR(100) NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE UserRole (
|
|
||||||
RoleId SERIAL PRIMARY KEY,
|
|
||||||
RoleUserId INTEGER NOT NULL,
|
|
||||||
RoleName VARCHAR(255) NOT NULL,
|
|
||||||
RoleAttribute VARCHAR(255),
|
|
||||||
RoleAttributeValue VARCHAR(255),
|
|
||||||
FOREIGN KEY (RoleUserId) REFERENCES RisottoUser(UserId)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE URI (
|
|
||||||
URIId SERIAL PRIMARY KEY,
|
|
||||||
URIName VARCHAR(255) NOT NULL UNIQUE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE RoleURI (
|
|
||||||
RoleName VARCHAR(255) NOT NULL,
|
|
||||||
URIId INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (URIId) REFERENCES URI(URIId),
|
|
||||||
PRIMARY KEY (RoleName, URIId)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Log
|
|
||||||
CREATE TABLE log(
|
|
||||||
Msg VARCHAR(255) NOT NULL,
|
|
||||||
Level VARCHAR(10) NOT NULL,
|
|
||||||
Path VARCHAR(255),
|
|
||||||
Username VARCHAR(100) NOT NULL,
|
|
||||||
Data JSON,
|
|
||||||
Date timestamp DEFAULT current_timestamp
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
sql_dir = get_config()['global']['sql_dir']
|
||||||
|
if not isdir(sql_dir):
|
||||||
|
print('no sql file to import')
|
||||||
|
exit()
|
||||||
db_conf = get_config()['database']['dsn']
|
db_conf = get_config()['database']['dsn']
|
||||||
pool = await asyncpg.create_pool(db_conf)
|
pool = await asyncpg.create_pool(db_conf)
|
||||||
async with pool.acquire() as connection:
|
async with pool.acquire() as connection:
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
returns = await connection.execute(VERSION_INIT)
|
for filename in listdir(sql_dir):
|
||||||
|
if filename.endswith('.sql'):
|
||||||
|
sql_filename = join(sql_dir, filename)
|
||||||
|
with open(sql_filename, 'r') as sql:
|
||||||
|
try:
|
||||||
|
await connection.execute(sql.read())
|
||||||
|
except Exception as err:
|
||||||
|
print(_(f'unable to import {filename}: {err}'))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
loop.run_until_complete(main())
|
loop.run_until_complete(main())
|
||||||
# asyncio.run(main())
|
|
||||||
|
5
script/server.py → script/risotto-server
Normal file → Executable file
5
script/server.py → script/risotto-server
Normal file → Executable file
@ -1,10 +1,15 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from sdnotify import SystemdNotifier
|
||||||
from asyncio import get_event_loop
|
from asyncio import get_event_loop
|
||||||
from risotto import get_app
|
from risotto import get_app
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
notifier = SystemdNotifier()
|
||||||
loop = get_event_loop()
|
loop = get_event_loop()
|
||||||
loop.run_until_complete(get_app(loop))
|
loop.run_until_complete(get_app(loop))
|
||||||
|
print('HTTP server ready')
|
||||||
|
notifier.notify("READY=1")
|
||||||
try:
|
try:
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
8
setup.py
Normal file
8
setup.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='risotto',
|
||||||
|
version='0.1',
|
||||||
|
packages=['risotto' ],
|
||||||
|
package_dir={"": "src"},
|
||||||
|
)
|
16
sql/risotto.sql
Normal file
16
sql/risotto.sql
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
CREATE TABLE RisottoLog(
|
||||||
|
LogId SERIAL PRIMARY KEY,
|
||||||
|
ContextId INTEGER,
|
||||||
|
Msg VARCHAR(255) NOT NULL,
|
||||||
|
URI VARCHAR(255),
|
||||||
|
URIS VARCHAR(255),
|
||||||
|
UserLogin VARCHAR(100) NOT NULL,
|
||||||
|
Status INTEGER NOT NULL,
|
||||||
|
Kwargs JSON,
|
||||||
|
Returns JSON,
|
||||||
|
StartDate timestamp DEFAULT current_timestamp,
|
||||||
|
StopDate timestamp
|
||||||
|
);
|
||||||
|
CREATE INDEX RisottoLog_ContextId_index ON RisottoLog(ContextId);
|
||||||
|
CREATE INDEX RisottoLog_Login_index ON RisottoLog(UserLogin);
|
||||||
|
CREATE INDEX RisottoLog_URI_index ON RisottoLog(URI);
|
@ -1,4 +1,3 @@
|
|||||||
from .http import get_app
|
from .http import get_app, services
|
||||||
|
|
||||||
__ALL__ = ('get_app',)
|
|
||||||
|
|
||||||
|
__ALL__ = ('get_app', 'services')
|
||||||
|
@ -1,33 +1,194 @@
|
|||||||
from os import environ
|
from os import environ
|
||||||
|
from os.path import isfile
|
||||||
|
from configobj import ConfigObj
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
|
||||||
CONFIGURATION_DIR = 'configurations'
|
CONFIG_FILE = environ.get('CONFIG_FILE', '/etc/risotto/risotto.conf')
|
||||||
TMP_DIR = 'tmp'
|
|
||||||
DEFAULT_USER = environ.get('DEFAULT_USER', 'Anonymous')
|
|
||||||
DEFAULT_DSN = environ.get('RISOTTO_DSN', 'postgres:///risotto?host=/var/run/postgresql/&user=risotto')
|
if isfile(CONFIG_FILE):
|
||||||
DEFAULT_TIRAMISU_DSN = environ.get('DEFAULT_TIRAMISU_DSN', 'postgres:///tiramisu?host=/var/run/postgresql/&user=tiramisu')
|
config = ConfigObj(CONFIG_FILE)
|
||||||
MESSAGE_PATH = environ.get('MESSAGE_PATH', '/root/risotto-message/messages')
|
else:
|
||||||
MODULE_NAME = environ.get('MODULE_NAME', 'test')
|
config = {}
|
||||||
|
|
||||||
|
|
||||||
|
if 'RISOTTO_PORT' in environ:
|
||||||
|
RISOTTO_PORT = environ['RISOTTO_PORT']
|
||||||
|
else:
|
||||||
|
RISOTTO_PORT = config.get('RISOTTO_PORT', 8080)
|
||||||
|
if 'RISOTTO_URL' in environ:
|
||||||
|
RISOTTO_URL = environ['RISOTTO_URL']
|
||||||
|
else:
|
||||||
|
RISOTTO_URL = config.get('RISOTTO_URL', 'http://localhost:8080/')
|
||||||
|
if 'CONFIGURATION_DIR' in environ:
|
||||||
|
CONFIGURATION_DIR = environ['CONFIGURATION_DIR']
|
||||||
|
else:
|
||||||
|
CONFIGURATION_DIR = config.get('CONFIGURATION_DIR', '/srv/risotto/configurations')
|
||||||
|
if 'DEFAULT_USER' in environ:
|
||||||
|
DEFAULT_USER = environ['DEFAULT_USER']
|
||||||
|
else:
|
||||||
|
DEFAULT_USER = config.get('DEFAULT_USER', 'Anonymous')
|
||||||
|
if 'RISOTTO_DB_NAME' in environ:
|
||||||
|
RISOTTO_DB_NAME = environ['RISOTTO_DB_NAME']
|
||||||
|
else:
|
||||||
|
RISOTTO_DB_NAME = config.get('RISOTTO_DB_NAME', 'risotto')
|
||||||
|
if 'RISOTTO_DB_PASSWORD' in environ:
|
||||||
|
RISOTTO_DB_PASSWORD = environ['RISOTTO_DB_PASSWORD']
|
||||||
|
else:
|
||||||
|
RISOTTO_DB_PASSWORD = config.get('RISOTTO_DB_PASSWORD', 'risotto')
|
||||||
|
if 'RISOTTO_DB_USER' in environ:
|
||||||
|
RISOTTO_DB_USER = environ['RISOTTO_DB_USER']
|
||||||
|
else:
|
||||||
|
RISOTTO_DB_USER = config.get('RISOTTO_DB_USER', 'risotto')
|
||||||
|
if 'TIRAMISU_DB_NAME' in environ:
|
||||||
|
TIRAMISU_DB_NAME = environ['TIRAMISU_DB_NAME']
|
||||||
|
else:
|
||||||
|
TIRAMISU_DB_NAME = config.get('TIRAMISU_DB_NAME', 'tiramisu')
|
||||||
|
if 'TIRAMISU_DB_PASSWORD' in environ:
|
||||||
|
TIRAMISU_DB_PASSWORD = environ['TIRAMISU_DB_PASSWORD']
|
||||||
|
else:
|
||||||
|
TIRAMISU_DB_PASSWORD = config.get('TIRAMISU_DB_PASSWORD', 'tiramisu')
|
||||||
|
if 'TIRAMISU_DB_USER' in environ:
|
||||||
|
TIRAMISU_DB_USER = environ['TIRAMISU_DB_USER']
|
||||||
|
else:
|
||||||
|
TIRAMISU_DB_USER = config.get('TIRAMISU_DB_USER', 'tiramisu')
|
||||||
|
if 'CELERYRISOTTO_DB_NAME' in environ:
|
||||||
|
CELERYRISOTTO_DB_NAME = environ['CELERYRISOTTO_DB_NAME']
|
||||||
|
else:
|
||||||
|
CELERYRISOTTO_DB_NAME = config.get('CELERYRISOTTO_DB_NAME', None)
|
||||||
|
if 'CELERYRISOTTO_DB_PASSWORD' in environ:
|
||||||
|
CELERYRISOTTO_DB_PASSWORD = environ['CELERYRISOTTO_DB_PASSWORD']
|
||||||
|
else:
|
||||||
|
CELERYRISOTTO_DB_PASSWORD = config.get('CELERYRISOTTO_DB_PASSWORD', None)
|
||||||
|
if 'CELERYRISOTTO_DB_USER' in environ:
|
||||||
|
CELERYRISOTTO_DB_USER = environ['CELERYRISOTTO_DB_USER']
|
||||||
|
else:
|
||||||
|
CELERYRISOTTO_DB_USER = config.get('CELERYRISOTTO_DB_USER', None)
|
||||||
|
if 'LEMUR_DB_NAME' in environ:
|
||||||
|
LEMUR_DB_NAME = environ['LEMUR_DB_NAME']
|
||||||
|
else:
|
||||||
|
LEMUR_DB_NAME = config.get('LEMUR_DB_NAME', None)
|
||||||
|
if 'LEMUR_DB_PASSWORD' in environ:
|
||||||
|
LEMUR_DB_PASSWORD = environ['LEMUR_DB_PASSWORD']
|
||||||
|
else:
|
||||||
|
LEMUR_DB_PASSWORD = config.get('LEMUR_DB_PASSWORD', None)
|
||||||
|
if 'LEMUR_DB_USER' in environ:
|
||||||
|
LEMUR_DB_USER = environ['LEMUR_DB_USER']
|
||||||
|
else:
|
||||||
|
LEMUR_DB_USER = config.get('LEMUR_DB_USER', None)
|
||||||
|
if 'DB_ADDRESS' in environ:
|
||||||
|
DB_ADDRESS = environ['DB_ADDRESS']
|
||||||
|
else:
|
||||||
|
DB_ADDRESS = config.get('DB_ADDRESS', 'localhost')
|
||||||
|
if 'MESSAGE_PATH' in environ:
|
||||||
|
MESSAGE_PATH = environ['MESSAGE_PATH']
|
||||||
|
else:
|
||||||
|
MESSAGE_PATH = config.get('MESSAGE_PATH', '/root/risotto-message/messages')
|
||||||
|
if 'SQL_DIR' in environ:
|
||||||
|
SQL_DIR = environ['SQL_DIR']
|
||||||
|
else:
|
||||||
|
SQL_DIR = config.get('SQL_DIR', './sql')
|
||||||
|
if 'CACHE_ROOT_PATH' in environ:
|
||||||
|
CACHE_ROOT_PATH = environ['CACHE_ROOT_PATH']
|
||||||
|
else:
|
||||||
|
CACHE_ROOT_PATH = config.get('CACHE_ROOT_PATH', '/var/cache/risotto')
|
||||||
|
if 'SRV_SEED_PATH' in environ:
|
||||||
|
SRV_SEED_PATH = environ['SRV_SEED_PATH']
|
||||||
|
else:
|
||||||
|
SRV_SEED_PATH = config.get('SRV_SEED_PATH', '/srv/seed')
|
||||||
|
if 'TMP_DIR' in environ:
|
||||||
|
TMP_DIR = environ['TMP_DIR']
|
||||||
|
else:
|
||||||
|
TMP_DIR = config.get('TMP_DIR', '/tmp')
|
||||||
|
if 'IMAGE_PATH' in environ:
|
||||||
|
IMAGE_PATH = environ['IMAGE_PATH']
|
||||||
|
else:
|
||||||
|
IMAGE_PATH = config.get('IMAGE_PATH', '/tmp')
|
||||||
|
if 'PASSWORD_ADMIN_USERNAME' in environ:
|
||||||
|
PASSWORD_ADMIN_USERNAME = environ['PASSWORD_ADMIN_USERNAME']
|
||||||
|
else:
|
||||||
|
PASSWORD_ADMIN_USERNAME = config.get('PASSWORD_ADMIN_USERNAME', 'risotto')
|
||||||
|
if 'PASSWORD_ADMIN_EMAIL' in environ:
|
||||||
|
PASSWORD_ADMIN_EMAIL = environ['PASSWORD_ADMIN_EMAIL']
|
||||||
|
else:
|
||||||
|
# this parameter is mandatory
|
||||||
|
PASSWORD_ADMIN_EMAIL = config.get('PASSWORD_ADMIN_EMAIL', 'XXX')
|
||||||
|
if 'PASSWORD_ADMIN_PASSWORD' in environ:
|
||||||
|
PASSWORD_ADMIN_PASSWORD = environ['PASSWORD_ADMIN_PASSWORD']
|
||||||
|
else:
|
||||||
|
# this parameter is mandatory
|
||||||
|
PASSWORD_ADMIN_PASSWORD = config.get('PASSWORD_ADMIN_PASSWORD', 'XXX')
|
||||||
|
if 'PASSWORD_DEVICE_IDENTIFIER' in environ:
|
||||||
|
PASSWORD_DEVICE_IDENTIFIER = environ['PASSWORD_DEVICE_IDENTIFIER']
|
||||||
|
else:
|
||||||
|
PASSWORD_DEVICE_IDENTIFIER = config.get('PASSWORD_DEVICE_IDENTIFIER', uuid4())
|
||||||
|
if 'PASSWORD_URL' in environ:
|
||||||
|
PASSWORD_URL = environ['PASSWORD_URL']
|
||||||
|
else:
|
||||||
|
PASSWORD_URL = config.get('PASSWORD_URL', 'https://localhost:8001/')
|
||||||
|
|
||||||
|
if 'PASSWORD_LENGTH' in environ:
|
||||||
|
PASSWORD_LENGTH = int(environ['PASSWORD_LENGTH'])
|
||||||
|
else:
|
||||||
|
PASSWORD_LENGTH = int(config.get('PASSWORD_LENGTH', 20))
|
||||||
|
if 'PKI_ADMIN_PASSWORD' in environ:
|
||||||
|
PKI_ADMIN_PASSWORD = environ['PKI_ADMIN_PASSWORD']
|
||||||
|
else:
|
||||||
|
PKI_ADMIN_PASSWORD = config.get('PKI_ADMIN_PASSWORD', 'XXX')
|
||||||
|
if 'PKI_ADMIN_EMAIL' in environ:
|
||||||
|
PKI_ADMIN_EMAIL = environ['PKI_ADMIN_EMAIL']
|
||||||
|
else:
|
||||||
|
PKI_ADMIN_EMAIL = config.get('PKI_ADMIN_EMAIL', 'XXX')
|
||||||
|
if 'PKI_URL' in environ:
|
||||||
|
PKI_URL = environ['PKI_URL']
|
||||||
|
else:
|
||||||
|
PKI_URL = config.get('PKI_URL', 'http://localhost:8002')
|
||||||
|
|
||||||
|
|
||||||
|
def dsn_factory(database, user, password, address=DB_ADDRESS):
|
||||||
|
mangled_address = '/var/run/postgresql' if address == 'localhost' else address
|
||||||
|
return f'postgres:///{database}?host={mangled_address}/&user={user}&password={password}'
|
||||||
|
|
||||||
|
|
||||||
|
_config = {'database': {'dsn': dsn_factory(RISOTTO_DB_NAME, RISOTTO_DB_USER, RISOTTO_DB_PASSWORD),
|
||||||
|
'tiramisu_dsn': dsn_factory(TIRAMISU_DB_NAME, TIRAMISU_DB_USER, TIRAMISU_DB_PASSWORD),
|
||||||
|
'celery_dsn': dsn_factory(CELERYRISOTTO_DB_NAME, CELERYRISOTTO_DB_USER, CELERYRISOTTO_DB_PASSWORD),
|
||||||
|
'lemur_dns': dsn_factory(LEMUR_DB_NAME, LEMUR_DB_USER, LEMUR_DB_PASSWORD),
|
||||||
|
},
|
||||||
|
'http_server': {'port': RISOTTO_PORT,
|
||||||
|
'default_user': DEFAULT_USER,
|
||||||
|
'url': RISOTTO_URL},
|
||||||
|
'global': {'message_root_path': MESSAGE_PATH,
|
||||||
|
'configurations_dir': CONFIGURATION_DIR,
|
||||||
|
'debug': True,
|
||||||
|
'internal_user': '_internal',
|
||||||
|
'check_role': True,
|
||||||
|
'admin_user': DEFAULT_USER,
|
||||||
|
'sql_dir': SQL_DIR,
|
||||||
|
'tmp_dir': TMP_DIR,
|
||||||
|
},
|
||||||
|
'password': {'admin_username': PASSWORD_ADMIN_USERNAME,
|
||||||
|
'admin_email': PASSWORD_ADMIN_EMAIL,
|
||||||
|
'admin_password': PASSWORD_ADMIN_PASSWORD,
|
||||||
|
'device_identifier': PASSWORD_DEVICE_IDENTIFIER,
|
||||||
|
'service_url': PASSWORD_URL,
|
||||||
|
'length': PASSWORD_LENGTH,
|
||||||
|
},
|
||||||
|
'pki': {'admin_password': PKI_ADMIN_PASSWORD,
|
||||||
|
'owner': PKI_ADMIN_EMAIL,
|
||||||
|
'url': PKI_URL,
|
||||||
|
},
|
||||||
|
'cache': {'root_path': CACHE_ROOT_PATH},
|
||||||
|
'servermodel': {'internal_source_path': SRV_SEED_PATH,
|
||||||
|
'internal_source': 'internal'},
|
||||||
|
'submodule': {'allow_insecure_https': False,
|
||||||
|
'pki': '192.168.56.112'},
|
||||||
|
'provider': {'factory_configuration_filename': 'infra.json',
|
||||||
|
'packer_filename': 'recipe.json',
|
||||||
|
'risotto_images_dir': IMAGE_PATH},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_config():
|
def get_config():
|
||||||
return {'database': {'dsn': DEFAULT_DSN,
|
return _config
|
||||||
'tiramisu_dsn': DEFAULT_TIRAMISU_DSN,
|
|
||||||
},
|
|
||||||
'http_server': {'port': 8080,
|
|
||||||
'default_user': DEFAULT_USER},
|
|
||||||
'global': {'message_root_path': MESSAGE_PATH,
|
|
||||||
'debug': True,
|
|
||||||
'internal_user': 'internal',
|
|
||||||
'check_role': True,
|
|
||||||
'admin_user': DEFAULT_USER,
|
|
||||||
'module_name': MODULE_NAME,
|
|
||||||
'version': 'v1'},
|
|
||||||
'source': {'root_path': '/srv/seed'},
|
|
||||||
'cache': {'root_path': '/var/cache/risotto'},
|
|
||||||
'servermodel': {'internal_source': 'internal',
|
|
||||||
'internal_distribution': 'last',
|
|
||||||
'internal_release_name': 'none'},
|
|
||||||
'submodule': {'allow_insecure_https': False,
|
|
||||||
'pki': '192.168.56.112'},
|
|
||||||
}
|
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
class Context:
|
class Context:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.paths = []
|
self.paths = []
|
||||||
|
self.context_id = None
|
||||||
|
self.start_id = None
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
context = Context()
|
||||||
|
for key, value in self.__dict__.items():
|
||||||
|
if key.startswith('__'):
|
||||||
|
continue
|
||||||
|
setattr(context, key, value)
|
||||||
|
return context
|
||||||
|
@ -1,49 +1,332 @@
|
|||||||
|
from os import listdir, makedirs
|
||||||
|
from os.path import join, isdir, isfile
|
||||||
|
from shutil import rmtree
|
||||||
|
from traceback import print_exc
|
||||||
|
from typing import Dict
|
||||||
|
from rougail import RougailConvert, RougailConfig, RougailUpgrade
|
||||||
|
try:
|
||||||
|
from tiramisu3 import Storage, Config
|
||||||
|
except:
|
||||||
|
from tiramisu import Storage, Config
|
||||||
|
|
||||||
from .config import get_config
|
from .config import get_config
|
||||||
from .dispatcher import dispatcher
|
from .utils import _, tiramisu_display_name
|
||||||
|
from .logger import log
|
||||||
|
from .dispatcher import get_dispatcher
|
||||||
from .context import Context
|
from .context import Context
|
||||||
from .remote import remote
|
|
||||||
|
|
||||||
|
RougailConfig['variable_namespace'] = 'configuration'
|
||||||
|
|
||||||
|
|
||||||
class Controller:
|
class Controller:
|
||||||
"""Common controller used to add a service in Risotto
|
"""Common controller used to add a service in Risotto
|
||||||
"""
|
"""
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
test: bool):
|
test: bool,
|
||||||
self.submodule = get_config()['global']['module_name']
|
) -> None:
|
||||||
|
self.dispatcher = get_dispatcher()
|
||||||
|
|
||||||
async def call(self,
|
async def call(self,
|
||||||
uri: str,
|
uri: str,
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
**kwargs):
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
""" a wrapper to dispatcher's call"""
|
""" a wrapper to dispatcher's call"""
|
||||||
version, submodule, message = uri.split('.', 2)
|
if args:
|
||||||
uri = submodule + '.' + message
|
raise ValueError(_(f'the URI "{uri}" can only be called with keyword arguments'))
|
||||||
if submodule != self.submodule:
|
current_uri = risotto_context.paths[-1]
|
||||||
return await remote.call_or_publish(submodule,
|
current_module = risotto_context.module
|
||||||
version,
|
version, message = uri.split('.', 1)
|
||||||
message,
|
module = message.split('.', 1)[0]
|
||||||
kwargs)
|
if current_module != module:
|
||||||
return await dispatcher.call(version,
|
raise ValueError(_(f'cannot call to external module ("{module}") to the URI "{uri}" from "{current_module}"'))
|
||||||
uri,
|
return await self.dispatcher.call(version,
|
||||||
risotto_context,
|
message,
|
||||||
**kwargs)
|
risotto_context,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
async def publish(self,
|
async def publish(self,
|
||||||
uri: str,
|
uri: str,
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
**kwargs):
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
""" a wrapper to dispatcher's publish"""
|
""" a wrapper to dispatcher's publish"""
|
||||||
version, submodule, uri = uri.split('.', 2)
|
if args:
|
||||||
if submodule != self.submodule:
|
raise ValueError(_(f'the URI "{uri}" can only be published with keyword arguments'))
|
||||||
await remote.call_or_publish(submodule,
|
version, message = uri.split('.', 1)
|
||||||
version,
|
await self.dispatcher.publish(version,
|
||||||
message,
|
message,
|
||||||
kwargs)
|
risotto_context,
|
||||||
await dispatcher.publish(version,
|
**kwargs,
|
||||||
uri,
|
)
|
||||||
risotto_context,
|
|
||||||
**kwargs)
|
@staticmethod
|
||||||
|
async def check_role(uri: str,
|
||||||
|
username: str,
|
||||||
|
**kwargs: dict,
|
||||||
|
) -> None:
|
||||||
|
# create a new config
|
||||||
|
async with await Config(self.dispatcher.option) as config:
|
||||||
|
await config.property.read_write()
|
||||||
|
await config.option('message').value.set(uri)
|
||||||
|
subconfig = config.option(uri)
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
try:
|
||||||
|
await subconfig.option(key).value.set(value)
|
||||||
|
except AttributeError:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
print_exc()
|
||||||
|
raise ValueError(_(f'unknown parameter in "{uri}": "{key}"'))
|
||||||
|
except ValueOptionError as err:
|
||||||
|
raise ValueError(_(f'invalid parameter in "{uri}": {err}'))
|
||||||
|
await self.dispatcher.check_role(subconfig,
|
||||||
|
username,
|
||||||
|
uri,
|
||||||
|
)
|
||||||
|
|
||||||
async def on_join(self,
|
async def on_join(self,
|
||||||
risotto_context):
|
risotto_context,
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TiramisuController(Controller):
|
||||||
|
def __init__(self,
|
||||||
|
test: bool,
|
||||||
|
) -> None:
|
||||||
|
if not 'dataset_name' in vars(self):
|
||||||
|
raise Exception(f'please specify "dataset_name" to "{self.__class__.__name__}"')
|
||||||
|
self.tiramisu_cache_root_path = join(get_config()['cache']['root_path'], self.dataset_name)
|
||||||
|
super().__init__(test)
|
||||||
|
if not test:
|
||||||
|
db_conf = get_config()['database']['tiramisu_dsn']
|
||||||
|
self.save_storage = Storage(engine='postgres')
|
||||||
|
self.save_storage.setting(dsn=db_conf)
|
||||||
|
if self.dataset_name != 'servermodel':
|
||||||
|
self.optiondescription = None
|
||||||
|
self.dispatcher.set_function('v1.setting.dataset.updated',
|
||||||
|
None,
|
||||||
|
TiramisuController.dataset_updated,
|
||||||
|
self.__class__.__module__,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def on_join(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
) -> None:
|
||||||
|
if isdir(self.tiramisu_cache_root_path):
|
||||||
|
await self.load_datas(risotto_context)
|
||||||
|
|
||||||
|
async def dataset_updated(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
) -> Dict:
|
||||||
|
await self.gen_dictionaries(risotto_context)
|
||||||
|
await self.load_datas(risotto_context)
|
||||||
|
|
||||||
|
async def gen_dictionaries(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
) -> None:
|
||||||
|
sources = await self.get_sources(risotto_context)
|
||||||
|
self._aggregate_tiramisu_funcs(sources)
|
||||||
|
self._convert_dictionaries_to_tiramisu(sources)
|
||||||
|
|
||||||
|
async def get_sources(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
) -> None:
|
||||||
|
return await self.call('v1.setting.source.list',
|
||||||
|
risotto_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _aggregate_tiramisu_funcs(self,
|
||||||
|
sources: list,
|
||||||
|
) -> None:
|
||||||
|
dest_file = join(self.tiramisu_cache_root_path, 'funcs.py')
|
||||||
|
if not isdir(self.tiramisu_cache_root_path):
|
||||||
|
makedirs(self.tiramisu_cache_root_path)
|
||||||
|
with open(dest_file, 'wb') as funcs:
|
||||||
|
funcs.write(b"""try:
|
||||||
|
from tiramisu3 import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||||
|
except:
|
||||||
|
from tiramisu import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||||
|
|
||||||
|
""")
|
||||||
|
for source in sources:
|
||||||
|
root_path = join(source['source_directory'],
|
||||||
|
self.dataset_name,
|
||||||
|
)
|
||||||
|
if not isdir(root_path):
|
||||||
|
continue
|
||||||
|
for service in listdir(root_path):
|
||||||
|
path = join(root_path,
|
||||||
|
service,
|
||||||
|
'funcs',
|
||||||
|
)
|
||||||
|
if not isdir(path):
|
||||||
|
continue
|
||||||
|
for filename in listdir(path):
|
||||||
|
if not filename.endswith('.py'):
|
||||||
|
continue
|
||||||
|
filename_path = join(path, filename)
|
||||||
|
with open(filename_path, 'rb') as fh:
|
||||||
|
funcs.write(f'# {filename_path}\n'.encode())
|
||||||
|
funcs.write(fh.read())
|
||||||
|
funcs.write(b'\n')
|
||||||
|
|
||||||
|
def _convert_dictionaries_to_tiramisu(self, sources: list) -> None:
|
||||||
|
funcs_file = join(self.tiramisu_cache_root_path, 'funcs.py')
|
||||||
|
tiramisu_file = join(self.tiramisu_cache_root_path, 'tiramisu.py')
|
||||||
|
dictionaries_dir = join(self.tiramisu_cache_root_path, 'dictionaries')
|
||||||
|
extras_dictionaries_dir = join(self.tiramisu_cache_root_path, 'extra_dictionaries')
|
||||||
|
if isdir(dictionaries_dir):
|
||||||
|
rmtree(dictionaries_dir)
|
||||||
|
makedirs(dictionaries_dir)
|
||||||
|
if isdir(extras_dictionaries_dir):
|
||||||
|
rmtree(extras_dictionaries_dir)
|
||||||
|
makedirs(extras_dictionaries_dir)
|
||||||
|
extras = []
|
||||||
|
upgrade = RougailUpgrade()
|
||||||
|
for source in sources:
|
||||||
|
root_path = join(source['source_directory'],
|
||||||
|
self.dataset_name,
|
||||||
|
)
|
||||||
|
if not isdir(root_path):
|
||||||
|
continue
|
||||||
|
for service in listdir(root_path):
|
||||||
|
# upgrade dictionaries
|
||||||
|
path = join(root_path,
|
||||||
|
service,
|
||||||
|
'dictionaries',
|
||||||
|
)
|
||||||
|
if not isdir(path):
|
||||||
|
continue
|
||||||
|
upgrade.load_xml_from_folders(path,
|
||||||
|
dictionaries_dir,
|
||||||
|
RougailConfig['variable_namespace'],
|
||||||
|
)
|
||||||
|
for service in listdir(root_path):
|
||||||
|
# upgrade extra dictionaries
|
||||||
|
path = join(root_path,
|
||||||
|
service,
|
||||||
|
'extras',
|
||||||
|
)
|
||||||
|
if not isdir(path):
|
||||||
|
continue
|
||||||
|
for namespace in listdir(path):
|
||||||
|
extra_dir = join(path,
|
||||||
|
namespace,
|
||||||
|
)
|
||||||
|
if not isdir(extra_dir):
|
||||||
|
continue
|
||||||
|
extra_dictionaries_dir = join(extras_dictionaries_dir,
|
||||||
|
namespace,
|
||||||
|
)
|
||||||
|
if not isdir(extra_dictionaries_dir):
|
||||||
|
makedirs(extra_dictionaries_dir)
|
||||||
|
extras.append((namespace, [extra_dictionaries_dir]))
|
||||||
|
upgrade.load_xml_from_folders(extra_dir,
|
||||||
|
extra_dictionaries_dir,
|
||||||
|
namespace,
|
||||||
|
)
|
||||||
|
del upgrade
|
||||||
|
config = RougailConfig.copy()
|
||||||
|
config['functions_file'] = funcs_file
|
||||||
|
config['dictionaries_dir'] = [dictionaries_dir]
|
||||||
|
config['extra_dictionaries'] = {}
|
||||||
|
for extra in extras:
|
||||||
|
config['extra_dictionaries'][extra[0]] = extra[1]
|
||||||
|
eolobj = RougailConvert(rougailconfig=config)
|
||||||
|
eolobj.save(tiramisu_file)
|
||||||
|
|
||||||
|
async def load(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
name: str,
|
||||||
|
to_deploy: bool=False,
|
||||||
|
) -> Config:
|
||||||
|
if self.optiondescription is None:
|
||||||
|
# use file in cache
|
||||||
|
tiramisu_file = join(self.tiramisu_cache_root_path, 'tiramisu.py')
|
||||||
|
if not isfile(tiramisu_file):
|
||||||
|
raise Exception(_(f'unable to load the "{self.dataset_name}" configuration, is dataset loaded?'))
|
||||||
|
with open(tiramisu_file) as fileio:
|
||||||
|
tiramisu_locals = {}
|
||||||
|
try:
|
||||||
|
exec(fileio.read(), None, tiramisu_locals)
|
||||||
|
except Exception as err:
|
||||||
|
raise Exception(_(f'unable to load tiramisu file {tiramisu_file}: {err}'))
|
||||||
|
|
||||||
|
self.optiondescription = tiramisu_locals['option_0']
|
||||||
|
del tiramisu_locals
|
||||||
|
try:
|
||||||
|
letter = self.dataset_name[0]
|
||||||
|
if not to_deploy:
|
||||||
|
session_id = f'{letter}_{name}'
|
||||||
|
else:
|
||||||
|
session_id = f'{letter}td_{name}'
|
||||||
|
config = await Config(self.optiondescription,
|
||||||
|
session_id=session_id,
|
||||||
|
storage=self.save_storage,
|
||||||
|
display_name=tiramisu_display_name,
|
||||||
|
)
|
||||||
|
# change default rights
|
||||||
|
await config.property.read_only()
|
||||||
|
await config.permissive.add('basic')
|
||||||
|
await config.permissive.add('normal')
|
||||||
|
await config.permissive.add('expert')
|
||||||
|
|
||||||
|
# set information and owner
|
||||||
|
await config.owner.set(session_id)
|
||||||
|
await config.information.set(f'{self.dataset_name}_name', name)
|
||||||
|
except Exception as err:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
print_exc()
|
||||||
|
msg = _(f'unable to load config for {self.dataset_name} "{name}": {err}')
|
||||||
|
await log.error_msg(risotto_context,
|
||||||
|
None,
|
||||||
|
msg,
|
||||||
|
)
|
||||||
|
return config
|
||||||
|
|
||||||
|
async def _deploy_configuration(self,
|
||||||
|
dico: dict,
|
||||||
|
) -> None:
|
||||||
|
config_std = dico['config_to_deploy']
|
||||||
|
config = dico['config']
|
||||||
|
# when deploy, calculate force_store_value
|
||||||
|
ro = await config_std.property.getdefault('read_only', 'append')
|
||||||
|
if 'force_store_value' not in ro:
|
||||||
|
await config_std.property.read_write()
|
||||||
|
if self.dataset_name == 'servermodel':
|
||||||
|
# server_deployed should be hidden
|
||||||
|
await config_std.forcepermissive.option('configuration.general.server_deployed').value.set(True)
|
||||||
|
ro = frozenset(list(ro) + ['force_store_value'])
|
||||||
|
rw = await config_std.property.getdefault('read_write', 'append')
|
||||||
|
rw = frozenset(list(rw) + ['force_store_value'])
|
||||||
|
await config_std.property.setdefault(ro, 'read_only', 'append')
|
||||||
|
await config_std.property.setdefault(rw, 'read_write', 'append')
|
||||||
|
await config_std.property.read_only()
|
||||||
|
|
||||||
|
# copy informations from 'to deploy' configuration to configuration
|
||||||
|
await config.information.importation(await config_std.information.exportation())
|
||||||
|
await config.value.importation(await config_std.value.exportation())
|
||||||
|
await config.permissive.importation(await config_std.permissive.exportation())
|
||||||
|
await config.property.importation(await config_std.property.exportation())
|
||||||
|
|
||||||
|
async def build_configuration(self,
|
||||||
|
config: Config,
|
||||||
|
) -> dict:
|
||||||
|
configuration = {}
|
||||||
|
for option in await config.option.list('optiondescription'):
|
||||||
|
name = await option.option.name()
|
||||||
|
if name == 'services':
|
||||||
|
continue
|
||||||
|
if name == RougailConfig['variable_namespace']:
|
||||||
|
fullpath = False
|
||||||
|
flatten = True
|
||||||
|
else:
|
||||||
|
fullpath = True
|
||||||
|
flatten = False
|
||||||
|
configuration.update(await option.value.dict(leader_to_list=True, fullpath=fullpath, flatten=flatten))
|
||||||
|
return configuration
|
||||||
|
@ -1,4 +1,10 @@
|
|||||||
from tiramisu import Config
|
try:
|
||||||
|
from tiramisu3 import Config
|
||||||
|
from tiramisu3.error import ValueOptionError
|
||||||
|
except:
|
||||||
|
from tiramisu import Config
|
||||||
|
from tiramisu.error import ValueOptionError
|
||||||
|
from asyncio import get_event_loop, ensure_future
|
||||||
from traceback import print_exc
|
from traceback import print_exc
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from typing import Dict, Callable, List, Optional
|
from typing import Dict, Callable, List, Optional
|
||||||
@ -10,8 +16,9 @@ from .logger import log
|
|||||||
from .config import get_config
|
from .config import get_config
|
||||||
from .context import Context
|
from .context import Context
|
||||||
from . import register
|
from . import register
|
||||||
from .remote import Remote
|
|
||||||
import asyncpg
|
|
||||||
|
DISPATCHER = None
|
||||||
|
|
||||||
|
|
||||||
class CallDispatcher:
|
class CallDispatcher:
|
||||||
@ -26,68 +33,101 @@ class CallDispatcher:
|
|||||||
if response.impl_get_information('multi'):
|
if response.impl_get_information('multi'):
|
||||||
if not isinstance(returns, list):
|
if not isinstance(returns, list):
|
||||||
err = _(f'function {module_name}.{function_name} has to return a list')
|
err = _(f'function {module_name}.{function_name} has to return a list')
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
raise CallError(err)
|
||||||
raise CallError(str(err))
|
|
||||||
else:
|
else:
|
||||||
if not isinstance(returns, dict):
|
if not isinstance(returns, dict):
|
||||||
await log.error_msg(risotto_context, kwargs, returns)
|
|
||||||
err = _(f'function {module_name}.{function_name} has to return a dict')
|
err = _(f'function {module_name}.{function_name} has to return a dict')
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
raise CallError(err)
|
||||||
raise CallError(str(err))
|
|
||||||
returns = [returns]
|
returns = [returns]
|
||||||
if response is None:
|
if response is None:
|
||||||
raise Exception('hu?')
|
raise Exception('hu?')
|
||||||
else:
|
else:
|
||||||
for ret in returns:
|
for ret in returns:
|
||||||
async with await Config(response, display_name=lambda self, dyn_name: self.impl_getname()) as config:
|
async with await Config(response, display_name=lambda self, dyn_name, suffix: self.impl_getname()) as config:
|
||||||
await config.property.read_write()
|
await config.property.read_write()
|
||||||
|
key = None
|
||||||
try:
|
try:
|
||||||
for key, value in ret.items():
|
for key, value in ret.items():
|
||||||
await config.option(key).value.set(value)
|
await config.option(key).value.set(value)
|
||||||
except AttributeError:
|
except AttributeError as err:
|
||||||
err = _(f'function {module_name}.{function_name} return the unknown parameter "{key}"')
|
if key is not None:
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
err = _(f'function {module_name}.{function_name} return the unknown parameter "{key}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||||
raise CallError(str(err))
|
else:
|
||||||
except ValueError:
|
err = _(f'function {module_name}.{function_name} return unconsistency data "{err}" for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||||
err = _(f'function {module_name}.{function_name} return the parameter "{key}" with an unvalid value "{value}"')
|
raise CallError(err)
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
except ValueError as err:
|
||||||
raise CallError(str(err))
|
if key is not None:
|
||||||
|
err = _(f'function {module_name}.{function_name} return the invalid parameter "{key}" for the uri "{risotto_context.version}.{risotto_context.message}": {err}')
|
||||||
|
else:
|
||||||
|
err = _(f'function {module_name}.{function_name} return unconsistency error for the uri "{risotto_context.version}.{risotto_context.message}": {err}')
|
||||||
|
raise CallError(err)
|
||||||
await config.property.read_only()
|
await config.property.read_only()
|
||||||
mandatories = await config.value.mandatory()
|
mandatories = await config.value.mandatory()
|
||||||
if mandatories:
|
if mandatories:
|
||||||
mand = [mand.split('.')[-1] for mand in mandatories]
|
mand = [mand.split('.')[-1] for mand in mandatories]
|
||||||
raise ValueError(_(f'missing parameters in response: {mand} in message "{risotto_context.message}"'))
|
raise ValueError(_(f'missing parameters in response of the uri "{risotto_context.version}.{risotto_context.message}": {mand} in message'))
|
||||||
try:
|
try:
|
||||||
await config.value.dict()
|
await config.value.dict()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
err = _(f'function {module_name}.{function_name} return an invalid response {err}')
|
err = _(f'function {module_name}.{function_name} return an invalid response {err} for the uri "{risotto_context.version}.{risotto_context.message}"')
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
raise CallError(err)
|
||||||
raise CallError(str(err))
|
|
||||||
|
|
||||||
async def call(self,
|
async def call(self,
|
||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
old_risotto_context: Context,
|
old_risotto_context: Context,
|
||||||
check_role: bool=False,
|
check_role: bool=False,
|
||||||
**kwargs):
|
internal: bool=True,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
""" execute the function associate with specified uri
|
""" execute the function associate with specified uri
|
||||||
arguments are validate before
|
arguments are validate before
|
||||||
"""
|
"""
|
||||||
risotto_context = self.build_new_context(old_risotto_context,
|
risotto_context = self.build_new_context(old_risotto_context.__dict__,
|
||||||
version,
|
version,
|
||||||
message,
|
message,
|
||||||
'rpc')
|
'rpc',
|
||||||
function_objs = [self.messages[version][message]]
|
)
|
||||||
# do not start a new database connection
|
if version not in self.messages:
|
||||||
|
raise CallError(_(f'cannot find version of message "{version}"'))
|
||||||
|
if message not in self.messages[version]:
|
||||||
|
raise CallError(_(f'cannot find message "{version}.{message}"'))
|
||||||
|
function_obj = self.messages[version][message]
|
||||||
|
# log
|
||||||
|
function_name = function_obj['function'].__name__
|
||||||
|
info_msg = _(f"call function {function_obj['full_module_name']}.{function_name}")
|
||||||
if hasattr(old_risotto_context, 'connection'):
|
if hasattr(old_risotto_context, 'connection'):
|
||||||
|
# do not start a new database connection
|
||||||
risotto_context.connection = old_risotto_context.connection
|
risotto_context.connection = old_risotto_context.connection
|
||||||
return await self.launch(version,
|
await log.start(risotto_context,
|
||||||
message,
|
kwargs,
|
||||||
risotto_context,
|
info_msg,
|
||||||
check_role,
|
)
|
||||||
kwargs,
|
await self.check_message_type(risotto_context,
|
||||||
function_objs)
|
kwargs,
|
||||||
|
)
|
||||||
|
config_arguments = await self.load_kwargs_to_config(risotto_context,
|
||||||
|
f'{version}.{message}',
|
||||||
|
kwargs,
|
||||||
|
check_role,
|
||||||
|
internal,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
ret = await self.launch(risotto_context,
|
||||||
|
kwargs,
|
||||||
|
config_arguments,
|
||||||
|
function_obj,
|
||||||
|
)
|
||||||
|
await log.success(risotto_context,
|
||||||
|
ret,
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
await log.failed(risotto_context,
|
||||||
|
str(err),
|
||||||
|
)
|
||||||
|
raise CallError(err) from err
|
||||||
else:
|
else:
|
||||||
|
error = None
|
||||||
try:
|
try:
|
||||||
async with self.pool.acquire() as connection:
|
async with self.pool.acquire() as connection:
|
||||||
await connection.set_type_codec(
|
await connection.set_type_codec(
|
||||||
@ -98,142 +138,248 @@ class CallDispatcher:
|
|||||||
)
|
)
|
||||||
risotto_context.connection = connection
|
risotto_context.connection = connection
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
return await self.launch(version,
|
try:
|
||||||
message,
|
await log.start(risotto_context,
|
||||||
risotto_context,
|
kwargs,
|
||||||
check_role,
|
info_msg,
|
||||||
kwargs,
|
)
|
||||||
function_objs)
|
await self.check_message_type(risotto_context,
|
||||||
|
kwargs,
|
||||||
|
)
|
||||||
|
config_arguments = await self.load_kwargs_to_config(risotto_context,
|
||||||
|
f'{version}.{message}',
|
||||||
|
kwargs,
|
||||||
|
check_role,
|
||||||
|
internal,
|
||||||
|
)
|
||||||
|
ret = await self.launch(risotto_context,
|
||||||
|
kwargs,
|
||||||
|
config_arguments,
|
||||||
|
function_obj,
|
||||||
|
)
|
||||||
|
# log the success
|
||||||
|
await log.success(risotto_context,
|
||||||
|
ret,
|
||||||
|
)
|
||||||
|
if not internal and isinstance(ret, dict):
|
||||||
|
ret['context_id'] = risotto_context.context_id
|
||||||
|
except CallError as err:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
print_exc()
|
||||||
|
await log.failed(risotto_context,
|
||||||
|
str(err),
|
||||||
|
)
|
||||||
|
raise err from err
|
||||||
except CallError as err:
|
except CallError as err:
|
||||||
raise err
|
error = err
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# if there is a problem with arguments, just send an error and do nothing
|
# if there is a problem with arguments, just send an error and do nothing
|
||||||
if get_config()['global']['debug']:
|
if get_config()['global']['debug']:
|
||||||
print_exc()
|
print_exc()
|
||||||
async with self.pool.acquire() as connection:
|
await log.failed(risotto_context,
|
||||||
await connection.set_type_codec(
|
str(err),
|
||||||
'json',
|
)
|
||||||
encoder=dumps,
|
error = err
|
||||||
decoder=loads,
|
if error:
|
||||||
schema='pg_catalog'
|
if not internal:
|
||||||
)
|
err = CallError(str(error))
|
||||||
risotto_context.connection = connection
|
err.context_id = risotto_context.context_id
|
||||||
async with connection.transaction():
|
else:
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
err = error
|
||||||
raise err
|
raise err from error
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class PublishDispatcher:
|
class PublishDispatcher:
|
||||||
|
async def register_remote(self) -> None:
|
||||||
|
print()
|
||||||
|
print(_('======== Registered remote event ========'))
|
||||||
|
self.listened_connection = await self.pool.acquire()
|
||||||
|
for version, messages in self.messages.items():
|
||||||
|
for message, message_infos in messages.items():
|
||||||
|
# event not emit locally
|
||||||
|
if message_infos['pattern'] == 'event' and 'functions' in message_infos and message_infos['functions']:
|
||||||
|
uri = f'{version}.{message}'
|
||||||
|
print(f' - {uri}')
|
||||||
|
await self.listened_connection.add_listener(uri,
|
||||||
|
self.to_async_publish,
|
||||||
|
)
|
||||||
|
|
||||||
async def publish(self,
|
async def publish(self,
|
||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
old_risotto_context: Context,
|
risotto_context: Context,
|
||||||
check_role: bool=False,
|
**kwargs,
|
||||||
**kwargs) -> None:
|
) -> None:
|
||||||
risotto_context = self.build_new_context(old_risotto_context,
|
if version not in self.messages or message not in self.messages[version]:
|
||||||
version,
|
raise ValueError(_(f'cannot find URI "{version}.{message}"'))
|
||||||
message,
|
|
||||||
'event')
|
# publish to remote
|
||||||
function_objs = self.messages[version][message].get('functions', [])
|
remote_kw = dumps({'kwargs': kwargs,
|
||||||
# do not start a new database connection
|
'context': {'username': risotto_context.username,
|
||||||
if hasattr(old_risotto_context, 'connection'):
|
'paths': risotto_context.paths,
|
||||||
risotto_context.connection = old_risotto_context.connection
|
'context_id': risotto_context.context_id,
|
||||||
return await self.launch(version,
|
}
|
||||||
message,
|
})
|
||||||
risotto_context,
|
# FIXME should be better :/
|
||||||
check_role,
|
remote_kw = remote_kw.replace("'", "''")
|
||||||
kwargs,
|
await risotto_context.connection.execute(f'NOTIFY "{version}.{message}", \'{remote_kw}\'')
|
||||||
function_objs)
|
|
||||||
else:
|
def to_async_publish(self,
|
||||||
|
con: 'asyncpg.connection.Connection',
|
||||||
|
pid: int,
|
||||||
|
uri: str,
|
||||||
|
payload: str,
|
||||||
|
) -> None:
|
||||||
|
version, message = uri.split('.', 1)
|
||||||
|
loop = get_event_loop()
|
||||||
|
remote_kw = loads(payload)
|
||||||
|
for function_obj in self.messages[version][message]['functions']:
|
||||||
|
risotto_context = self.build_new_context(remote_kw['context'],
|
||||||
|
version,
|
||||||
|
message,
|
||||||
|
'event',
|
||||||
|
)
|
||||||
|
callback = self.get_callback(version, message, function_obj, risotto_context, remote_kw['kwargs'],)
|
||||||
|
loop.call_soon(callback)
|
||||||
|
|
||||||
|
def get_callback(self,
|
||||||
|
version,
|
||||||
|
message,
|
||||||
|
function_obj,
|
||||||
|
risotto_context,
|
||||||
|
kwargs,
|
||||||
|
):
|
||||||
|
return lambda: ensure_future(self._publish(version,
|
||||||
|
message,
|
||||||
|
function_obj,
|
||||||
|
risotto_context,
|
||||||
|
**kwargs,
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
async def _publish(self,
|
||||||
|
version: str,
|
||||||
|
message: str,
|
||||||
|
function_obj,
|
||||||
|
risotto_context: Context,
|
||||||
|
**kwargs,
|
||||||
|
) -> None:
|
||||||
|
config_arguments = await self.load_kwargs_to_config(risotto_context,
|
||||||
|
f'{version}.{message}',
|
||||||
|
kwargs,
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
async with self.pool.acquire() as connection:
|
||||||
|
await connection.set_type_codec(
|
||||||
|
'json',
|
||||||
|
encoder=dumps,
|
||||||
|
decoder=loads,
|
||||||
|
schema='pg_catalog'
|
||||||
|
)
|
||||||
|
risotto_context.connection = connection
|
||||||
|
function_name = function_obj['function'].__name__
|
||||||
|
info_msg = _(f"call function {function_obj['full_module_name']}.{function_name}")
|
||||||
try:
|
try:
|
||||||
async with self.pool.acquire() as connection:
|
async with connection.transaction():
|
||||||
await connection.set_type_codec(
|
try:
|
||||||
'json',
|
await log.start(risotto_context,
|
||||||
encoder=dumps,
|
kwargs,
|
||||||
decoder=loads,
|
info_msg,
|
||||||
schema='pg_catalog'
|
)
|
||||||
)
|
await self.check_message_type(risotto_context,
|
||||||
risotto_context.connection = connection
|
kwargs,
|
||||||
async with connection.transaction():
|
)
|
||||||
return await self.launch(version,
|
await self.launch(risotto_context,
|
||||||
message,
|
kwargs,
|
||||||
risotto_context,
|
config_arguments,
|
||||||
check_role,
|
function_obj,
|
||||||
kwargs,
|
)
|
||||||
function_objs)
|
# log the success
|
||||||
except CallError as err:
|
await log.success(risotto_context)
|
||||||
raise err
|
except CallError as err:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
print_exc()
|
||||||
|
await log.failed(risotto_context,
|
||||||
|
str(err),
|
||||||
|
)
|
||||||
|
except CallError:
|
||||||
|
pass
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# if there is a problem with arguments, just send an error and do nothing
|
# if there is a problem with arguments, log and do nothing
|
||||||
if get_config()['global']['debug']:
|
if get_config()['global']['debug']:
|
||||||
print_exc()
|
print_exc()
|
||||||
async with self.pool.acquire() as connection:
|
await log.failed(risotto_context,
|
||||||
await connection.set_type_codec(
|
str(err),
|
||||||
'json',
|
)
|
||||||
encoder=dumps,
|
|
||||||
decoder=loads,
|
|
||||||
schema='pg_catalog'
|
|
||||||
)
|
|
||||||
risotto_context.connection = connection
|
|
||||||
async with connection.transaction():
|
|
||||||
await log.error_msg(risotto_context, kwargs, err)
|
|
||||||
raise err
|
|
||||||
|
|
||||||
|
|
||||||
class Dispatcher(register.RegisterDispatcher,
|
class Dispatcher(register.RegisterDispatcher,
|
||||||
Remote,
|
|
||||||
CallDispatcher,
|
CallDispatcher,
|
||||||
PublishDispatcher):
|
PublishDispatcher,
|
||||||
|
):
|
||||||
""" Manage message (call or publish)
|
""" Manage message (call or publish)
|
||||||
so launch a function when a message is called
|
so launch a function when a message is called
|
||||||
"""
|
"""
|
||||||
def build_new_context(self,
|
def build_new_context(self,
|
||||||
old_risotto_context: Context,
|
context: dict,
|
||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
type: str):
|
type: str,
|
||||||
|
) -> Context:
|
||||||
""" This is a new call or a new publish, so create a new context
|
""" This is a new call or a new publish, so create a new context
|
||||||
"""
|
"""
|
||||||
uri = version + '.' + message
|
uri = version + '.' + message
|
||||||
risotto_context = Context()
|
risotto_context = Context()
|
||||||
risotto_context.username = old_risotto_context.username
|
risotto_context.username = context['username']
|
||||||
risotto_context.paths = copy(old_risotto_context.paths)
|
risotto_context.paths = copy(context['paths'])
|
||||||
|
risotto_context.context_id = context['context_id']
|
||||||
risotto_context.paths.append(uri)
|
risotto_context.paths.append(uri)
|
||||||
risotto_context.uri = uri
|
risotto_context.uri = uri
|
||||||
risotto_context.type = type
|
risotto_context.type = type
|
||||||
risotto_context.message = message
|
risotto_context.message = message
|
||||||
risotto_context.version = version
|
risotto_context.version = version
|
||||||
|
risotto_context.pool = self.pool
|
||||||
return risotto_context
|
return risotto_context
|
||||||
|
|
||||||
async def check_message_type(self,
|
async def check_message_type(self,
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
kwargs: Dict):
|
kwargs: Dict,
|
||||||
|
) -> None:
|
||||||
if self.messages[risotto_context.version][risotto_context.message]['pattern'] != risotto_context.type:
|
if self.messages[risotto_context.version][risotto_context.message]['pattern'] != risotto_context.type:
|
||||||
msg = _(f'{risotto_context.uri} is not a {risotto_context.type} message')
|
msg = _(f'{risotto_context.uri} is not a {risotto_context.type} message')
|
||||||
await log.error_msg(risotto_context, kwargs, msg)
|
|
||||||
raise CallError(msg)
|
raise CallError(msg)
|
||||||
|
|
||||||
async def load_kwargs_to_config(self,
|
async def load_kwargs_to_config(self,
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
uri: str,
|
uri: str,
|
||||||
kwargs: Dict,
|
kwargs: Dict,
|
||||||
check_role: bool):
|
check_role: bool,
|
||||||
|
internal: bool,
|
||||||
|
):
|
||||||
""" create a new Config et set values to it
|
""" create a new Config et set values to it
|
||||||
"""
|
"""
|
||||||
# create a new config
|
# create a new config
|
||||||
async with await Config(self.option) as config:
|
async with await Config(self.option) as config:
|
||||||
await config.property.read_write()
|
await config.property.read_write()
|
||||||
# set message's option
|
# set message's option
|
||||||
await config.option('message').value.set(risotto_context.message)
|
await config.option('message').value.set(uri)
|
||||||
# store values
|
# store values
|
||||||
subconfig = config.option(risotto_context.message)
|
subconfig = config.option(uri)
|
||||||
|
extra_parameters = {}
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
try:
|
if not internal or not key.startswith('_'):
|
||||||
await subconfig.option(key).value.set(value)
|
try:
|
||||||
except AttributeError:
|
await subconfig.option(key).value.set(value)
|
||||||
if get_config()['global']['debug']:
|
except AttributeError:
|
||||||
print_exc()
|
if get_config()['global']['debug']:
|
||||||
raise ValueError(_(f'unknown parameter in "{uri}": "{key}"'))
|
print_exc()
|
||||||
|
raise ValueError(_(f'unknown parameter in "{uri}": "{key}"'))
|
||||||
|
except ValueOptionError as err:
|
||||||
|
raise ValueError(_(f'invalid parameter in "{uri}": {err}'))
|
||||||
|
else:
|
||||||
|
extra_parameters[key] = value
|
||||||
# check mandatories options
|
# check mandatories options
|
||||||
if check_role and get_config().get('global').get('check_role'):
|
if check_role and get_config().get('global').get('check_role'):
|
||||||
await self.check_role(subconfig,
|
await self.check_role(subconfig,
|
||||||
@ -245,7 +391,10 @@ class Dispatcher(register.RegisterDispatcher,
|
|||||||
mand = [mand.split('.')[-1] for mand in mandatories]
|
mand = [mand.split('.')[-1] for mand in mandatories]
|
||||||
raise ValueError(_(f'missing parameters in "{uri}": {mand}'))
|
raise ValueError(_(f'missing parameters in "{uri}": {mand}'))
|
||||||
# return complete an validated kwargs
|
# return complete an validated kwargs
|
||||||
return await subconfig.value.dict()
|
parameters = await subconfig.value.dict()
|
||||||
|
if extra_parameters:
|
||||||
|
parameters.update(extra_parameters)
|
||||||
|
return parameters
|
||||||
|
|
||||||
def get_service(self,
|
def get_service(self,
|
||||||
name: str):
|
name: str):
|
||||||
@ -254,14 +403,15 @@ class Dispatcher(register.RegisterDispatcher,
|
|||||||
async def check_role(self,
|
async def check_role(self,
|
||||||
config: Config,
|
config: Config,
|
||||||
user_login: str,
|
user_login: str,
|
||||||
uri: str) -> None:
|
uri: str,
|
||||||
|
) -> None:
|
||||||
async with self.pool.acquire() as connection:
|
async with self.pool.acquire() as connection:
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
# Verify if user exists and get ID
|
# Verify if user exists and get ID
|
||||||
sql = '''
|
sql = '''
|
||||||
SELECT UserId
|
SELECT UserId
|
||||||
FROM RisottoUser
|
FROM UserUser
|
||||||
WHERE UserLogin = $1
|
WHERE Login = $1
|
||||||
'''
|
'''
|
||||||
user_id = await connection.fetchval(sql,
|
user_id = await connection.fetchval(sql,
|
||||||
user_login)
|
user_login)
|
||||||
@ -278,8 +428,8 @@ class Dispatcher(register.RegisterDispatcher,
|
|||||||
# Check role
|
# Check role
|
||||||
select_role_uri = '''
|
select_role_uri = '''
|
||||||
SELECT RoleName
|
SELECT RoleName
|
||||||
FROM URI, RoleURI
|
FROM UserURI, UserRoleURI
|
||||||
WHERE URI.URIName = $1 AND RoleURI.URIId = URI.URIId
|
WHERE UserURI.URIName = $1 AND UserRoleURI.URIId = UserURI.URIId
|
||||||
'''
|
'''
|
||||||
select_role_user = '''
|
select_role_user = '''
|
||||||
SELECT RoleAttribute, RoleAttributeValue
|
SELECT RoleAttribute, RoleAttributeValue
|
||||||
@ -299,78 +449,55 @@ class Dispatcher(register.RegisterDispatcher,
|
|||||||
raise NotAllowedError(_(f'You ({user_login}) don\'t have any authorisation to access to "{uri}"'))
|
raise NotAllowedError(_(f'You ({user_login}) don\'t have any authorisation to access to "{uri}"'))
|
||||||
|
|
||||||
async def launch(self,
|
async def launch(self,
|
||||||
version: str,
|
|
||||||
message: str,
|
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
check_role: bool,
|
|
||||||
kwargs: Dict,
|
kwargs: Dict,
|
||||||
function_objs: List) -> Optional[Dict]:
|
config_arguments: dict,
|
||||||
await self.check_message_type(risotto_context,
|
function_obj: Callable,
|
||||||
kwargs)
|
) -> Optional[Dict]:
|
||||||
config_arguments = await self.load_kwargs_to_config(risotto_context,
|
# so send the message
|
||||||
f'{version}.{message}',
|
function = function_obj['function']
|
||||||
kwargs,
|
risotto_context.module = function_obj['module'].split('.', 1)[0]
|
||||||
check_role)
|
# build argument for this function
|
||||||
# config is ok, so send the message
|
if risotto_context.type == 'rpc':
|
||||||
for function_obj in function_objs:
|
kw = config_arguments
|
||||||
function = function_obj['function']
|
else:
|
||||||
module_name = function.__module__.split('.')[-2]
|
kw = {}
|
||||||
function_name = function.__name__
|
for key, value in config_arguments.items():
|
||||||
info_msg = _(f'in module {module_name}.{function_name}')
|
if key in function_obj['arguments']:
|
||||||
try:
|
kw[key] = value
|
||||||
# build argument for this function
|
|
||||||
if risotto_context.type == 'rpc':
|
|
||||||
kw = config_arguments
|
|
||||||
else:
|
|
||||||
kw = {}
|
|
||||||
for key, value in config_arguments.items():
|
|
||||||
if key in function_obj['arguments']:
|
|
||||||
kw[key] = value
|
|
||||||
|
|
||||||
|
kw['risotto_context'] = risotto_context
|
||||||
|
# launch
|
||||||
kw['risotto_context'] = risotto_context
|
returns = await function(self.get_service(function_obj['module']), **kw)
|
||||||
returns = await function(self.injected_self[function_obj['module']], **kw)
|
if risotto_context.type == 'rpc':
|
||||||
except CallError as err:
|
# valid returns
|
||||||
if risotto_context.type == 'rpc':
|
await self.valid_call_returns(risotto_context,
|
||||||
raise err
|
function,
|
||||||
continue
|
returns,
|
||||||
except Exception as err:
|
kwargs,
|
||||||
if risotto_context.type == 'rpc':
|
)
|
||||||
raise err
|
# notification
|
||||||
if get_config().get('global').get('debug'):
|
if function_obj.get('notification'):
|
||||||
print_exc()
|
if returns is None:
|
||||||
await log.error_msg(risotto_context,
|
raise Exception(_(f'function "{function_obj["full_module_name"]}.{function_obj["function"].__name__}" must returns something for {function_obj["notification"]}!'))
|
||||||
kwargs,
|
notif_version, notif_message = function_obj['notification'].split('.', 1)
|
||||||
err)
|
if not isinstance(returns, list):
|
||||||
continue
|
send_returns = [returns]
|
||||||
else:
|
else:
|
||||||
if risotto_context.type == 'rpc':
|
send_returns = returns
|
||||||
# valid returns
|
for ret in send_returns:
|
||||||
await self.valid_call_returns(risotto_context,
|
await self.publish(notif_version,
|
||||||
function,
|
notif_message,
|
||||||
returns,
|
risotto_context,
|
||||||
kwargs)
|
**ret,
|
||||||
# log the success
|
)
|
||||||
await log.info_msg(risotto_context,
|
if risotto_context.type == 'rpc':
|
||||||
{'arguments': kwargs,
|
return returns
|
||||||
'returns': returns},
|
|
||||||
info_msg)
|
|
||||||
# notification
|
|
||||||
if function_obj.get('notification'):
|
|
||||||
notif_version, notif_message = function_obj['notification'].split('.', 1)
|
|
||||||
if not isinstance(returns, list):
|
|
||||||
send_returns = [returns]
|
|
||||||
else:
|
|
||||||
send_returns = returns
|
|
||||||
for ret in send_returns:
|
|
||||||
await self.publish(notif_version,
|
|
||||||
notif_message,
|
|
||||||
risotto_context,
|
|
||||||
**ret)
|
|
||||||
if risotto_context.type == 'rpc':
|
|
||||||
return returns
|
|
||||||
|
|
||||||
|
|
||||||
dispatcher = Dispatcher()
|
def get_dispatcher():
|
||||||
register.dispatcher = dispatcher
|
global DISPATCHER
|
||||||
|
if DISPATCHER is None:
|
||||||
|
DISPATCHER = Dispatcher()
|
||||||
|
register.dispatcher = DISPATCHER
|
||||||
|
return DISPATCHER
|
||||||
|
@ -1,40 +1,64 @@
|
|||||||
from aiohttp.web import Application, Response, get, post, HTTPBadRequest, HTTPInternalServerError, HTTPNotFound
|
from aiohttp.web import Application, Response, get, post, HTTPBadRequest, HTTPInternalServerError, HTTPNotFound, static
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from traceback import print_exc
|
from traceback import print_exc
|
||||||
from tiramisu import Config, default_storage
|
try:
|
||||||
|
from tiramisu3 import Config, default_storage
|
||||||
|
except:
|
||||||
|
from tiramisu import Config, default_storage
|
||||||
|
|
||||||
|
|
||||||
from .dispatcher import dispatcher
|
from .dispatcher import get_dispatcher
|
||||||
from .utils import _
|
from .utils import _
|
||||||
from .context import Context
|
from .context import Context
|
||||||
from .error import CallError, NotAllowedError, RegistrationError
|
from .error import CallError, NotAllowedError, RegistrationError
|
||||||
from .message import get_messages
|
from .message import get_messages
|
||||||
from .logger import log
|
#from .logger import log
|
||||||
from .config import get_config
|
from .config import get_config
|
||||||
from .services import load_services
|
from . import services
|
||||||
|
|
||||||
|
|
||||||
|
extra_routes = {}
|
||||||
|
extra_statics = {}
|
||||||
|
|
||||||
|
|
||||||
def create_context(request):
|
def create_context(request):
|
||||||
risotto_context = Context()
|
risotto_context = Context()
|
||||||
risotto_context.username = request.match_info.get('username',
|
if 'username' in dict(request.match_info):
|
||||||
get_config()['http_server']['default_user'])
|
username = request.match_info['username']
|
||||||
|
elif 'username' in request.headers:
|
||||||
|
username = request.headers['username']
|
||||||
|
else:
|
||||||
|
username = get_config()['http_server']['default_user']
|
||||||
|
risotto_context.username = username
|
||||||
return risotto_context
|
return risotto_context
|
||||||
|
|
||||||
|
|
||||||
def register(version: str,
|
def register(version: str,
|
||||||
path: str):
|
path: str,
|
||||||
|
):
|
||||||
""" Decorator to register function to the http route
|
""" Decorator to register function to the http route
|
||||||
"""
|
"""
|
||||||
def decorator(function):
|
def decorator(function):
|
||||||
if path in extra_routes:
|
if path in extra_routes:
|
||||||
raise RegistrationError(f'the route {path} is already registered')
|
raise RegistrationError(f'the route "{path}" is already registered')
|
||||||
extra_routes[path] = {'function': function,
|
extra_routes[path] = {'function': function,
|
||||||
'version': version}
|
'version': version,
|
||||||
|
}
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def register_static(path: str,
|
||||||
|
directory: str,
|
||||||
|
) -> None:
|
||||||
|
if path in extra_statics:
|
||||||
|
raise RegistrationError(f'the static path "{path}" is already registered')
|
||||||
|
extra_statics[path] = directory
|
||||||
|
|
||||||
|
|
||||||
class extra_route_handler:
|
class extra_route_handler:
|
||||||
async def __new__(cls, request):
|
async def __new__(cls,
|
||||||
|
request,
|
||||||
|
):
|
||||||
kwargs = dict(request.match_info)
|
kwargs = dict(request.match_info)
|
||||||
kwargs['request'] = request
|
kwargs['request'] = request
|
||||||
kwargs['risotto_context'] = create_context(request)
|
kwargs['risotto_context'] = create_context(request)
|
||||||
@ -44,8 +68,10 @@ class extra_route_handler:
|
|||||||
function_name = cls.function.__module__
|
function_name = cls.function.__module__
|
||||||
# if not 'api' function
|
# if not 'api' function
|
||||||
if function_name != 'risotto.http':
|
if function_name != 'risotto.http':
|
||||||
module_name = function_name.split('.')[-2]
|
risotto_module_name, submodule_name = function_name.split('.', 2)[:-1]
|
||||||
kwargs['self'] = dispatcher.injected_self[module_name]
|
module_name = risotto_module_name.split('_')[-1]
|
||||||
|
dispatcher = get_dispatcher()
|
||||||
|
kwargs['self'] = dispatcher.injected_self[module_name + '.' + submodule_name]
|
||||||
try:
|
try:
|
||||||
returns = await cls.function(**kwargs)
|
returns = await cls.function(**kwargs)
|
||||||
except NotAllowedError as err:
|
except NotAllowedError as err:
|
||||||
@ -59,7 +85,8 @@ class extra_route_handler:
|
|||||||
# await log.info_msg(kwargs['risotto_context'],
|
# await log.info_msg(kwargs['risotto_context'],
|
||||||
# dict(request.match_info))
|
# dict(request.match_info))
|
||||||
return Response(text=dumps(returns),
|
return Response(text=dumps(returns),
|
||||||
content_type='application/json')
|
content_type='application/json',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def handle(request):
|
async def handle(request):
|
||||||
@ -67,6 +94,7 @@ async def handle(request):
|
|||||||
risotto_context = create_context(request)
|
risotto_context = create_context(request)
|
||||||
kwargs = await request.json()
|
kwargs = await request.json()
|
||||||
try:
|
try:
|
||||||
|
dispatcher = get_dispatcher()
|
||||||
pattern = dispatcher.messages[version][message]['pattern']
|
pattern = dispatcher.messages[version][message]['pattern']
|
||||||
if pattern == 'rpc':
|
if pattern == 'rpc':
|
||||||
method = dispatcher.call
|
method = dispatcher.call
|
||||||
@ -76,63 +104,104 @@ async def handle(request):
|
|||||||
message,
|
message,
|
||||||
risotto_context,
|
risotto_context,
|
||||||
check_role=True,
|
check_role=True,
|
||||||
**kwargs)
|
internal=False,
|
||||||
except NotAllowedError as err:
|
**kwargs,
|
||||||
raise HTTPNotFound(reason=str(err))
|
)
|
||||||
except CallError as err:
|
|
||||||
raise HTTPBadRequest(reason=str(err).replace('\n', ' '))
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if get_config()['global']['debug']:
|
context_id = None
|
||||||
print_exc()
|
if isinstance(err, NotAllowedError):
|
||||||
raise HTTPInternalServerError(reason=str(err))
|
error_type = HTTPNotFound
|
||||||
return Response(text=dumps({'response': text},
|
elif isinstance(err, CallError):
|
||||||
content_type='application/json'))
|
error_type = HTTPBadRequest
|
||||||
|
context_id = err.context_id
|
||||||
|
else:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
print_exc()
|
||||||
|
error_type = HTTPInternalServerError
|
||||||
|
response = {'type': 'error',
|
||||||
|
'reason': str(err).replace('\n', ' '),
|
||||||
|
}
|
||||||
|
if context_id is not None:
|
||||||
|
response['context_id'] = context_id
|
||||||
|
err = dumps({'response': response,
|
||||||
|
'type': 'error',
|
||||||
|
})
|
||||||
|
raise error_type(text=err,
|
||||||
|
content_type='application/json',
|
||||||
|
)
|
||||||
|
return Response(text=dumps({'response': text,
|
||||||
|
'type': 'success',
|
||||||
|
}),
|
||||||
|
content_type='application/json',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def api(request, risotto_context):
|
async def api(request,
|
||||||
global tiramisu
|
risotto_context,
|
||||||
if not tiramisu:
|
):
|
||||||
|
global TIRAMISU
|
||||||
|
if not TIRAMISU:
|
||||||
# check all URI that have an associated role
|
# check all URI that have an associated role
|
||||||
# all URI without role is concidered has a private URI
|
# all URI without role is concidered has a private URI
|
||||||
uris = []
|
uris = []
|
||||||
|
dispatcher = get_dispatcher()
|
||||||
async with dispatcher.pool.acquire() as connection:
|
async with dispatcher.pool.acquire() as connection:
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
# Check role with ACL
|
# Check role with ACL
|
||||||
sql = '''
|
sql = '''
|
||||||
SELECT URI.URIName
|
SELECT UserURI.URIName
|
||||||
FROM URI, RoleURI
|
FROM UserURI, UserRoleURI
|
||||||
WHERE RoleURI.URIId = URI.URIId
|
WHERE UserRoleURI.URIId = UserURI.URIId
|
||||||
'''
|
'''
|
||||||
uris = [uri['uriname'] for uri in await connection.fetch(sql)]
|
uris = [uri['uriname'] for uri in await connection.fetch(sql)]
|
||||||
async with await Config(get_messages(load_shortarg=True,
|
risotto_modules = services.get_services_list()
|
||||||
uris=uris)[1]) as config:
|
async with await Config(get_messages(current_module_names=risotto_modules,
|
||||||
|
load_shortarg=True,
|
||||||
|
current_version=risotto_context.version,
|
||||||
|
uris=uris,
|
||||||
|
)[1],
|
||||||
|
display_name=lambda self, dyn_name, suffix: self.impl_getname()) as config:
|
||||||
await config.property.read_write()
|
await config.property.read_write()
|
||||||
tiramisu = await config.option.dict(remotable='none')
|
TIRAMISU = await config.option.dict(remotable='none')
|
||||||
return tiramisu
|
return TIRAMISU
|
||||||
|
|
||||||
|
|
||||||
extra_routes = {'': {'function': api,
|
|
||||||
'version': 'v1'}}
|
|
||||||
|
|
||||||
|
|
||||||
async def get_app(loop):
|
async def get_app(loop):
|
||||||
""" build all routes
|
""" build all routes
|
||||||
"""
|
"""
|
||||||
global extra_routes
|
global extra_routes, extra_statics
|
||||||
load_services()
|
dispatcher = get_dispatcher()
|
||||||
|
services.link_to_dispatcher(dispatcher)
|
||||||
app = Application(loop=loop)
|
app = Application(loop=loop)
|
||||||
routes = []
|
routes = []
|
||||||
default_storage.engine('dictionary')
|
default_storage.engine('dictionary')
|
||||||
await dispatcher.load()
|
await dispatcher.load()
|
||||||
|
versions = []
|
||||||
for version, messages in dispatcher.messages.items():
|
for version, messages in dispatcher.messages.items():
|
||||||
|
if version not in versions:
|
||||||
|
versions.append(version)
|
||||||
print()
|
print()
|
||||||
print(_('======== Registered messages ========'))
|
print(_('======== Registered messages ========'))
|
||||||
for message in messages:
|
for message, message_infos in messages.items():
|
||||||
web_message = f'/api/{version}/{message}'
|
web_message = f'/api/{version}/{message}'
|
||||||
pattern = dispatcher.messages[version][message]['pattern']
|
pattern = message_infos['pattern']
|
||||||
print(f' - {web_message} ({pattern})')
|
print(f' - {web_message} ({pattern})')
|
||||||
routes.append(post(web_message, handle))
|
routes.append(post(web_message, handle))
|
||||||
print()
|
print()
|
||||||
|
print(_('======== Registered api routes ========'))
|
||||||
|
for version in versions:
|
||||||
|
api_route = {'function': api,
|
||||||
|
'version': version,
|
||||||
|
'path': f'/api/{version}',
|
||||||
|
}
|
||||||
|
extra_handler = type(api_route['path'], (extra_route_handler,), api_route)
|
||||||
|
routes.append(get(api_route['path'], extra_handler))
|
||||||
|
print(f' - {api_route["path"]} (http_get)')
|
||||||
|
# last version is default version
|
||||||
|
routes.append(get('/api', extra_handler))
|
||||||
|
print(f' - /api (http_get)')
|
||||||
|
print()
|
||||||
|
if extra_routes:
|
||||||
print(_('======== Registered extra routes ========'))
|
print(_('======== Registered extra routes ========'))
|
||||||
for path, extra in extra_routes.items():
|
for path, extra in extra_routes.items():
|
||||||
version = extra['version']
|
version = extra['version']
|
||||||
@ -141,12 +210,22 @@ async def get_app(loop):
|
|||||||
extra_handler = type(path, (extra_route_handler,), extra)
|
extra_handler = type(path, (extra_route_handler,), extra)
|
||||||
routes.append(get(path, extra_handler))
|
routes.append(get(path, extra_handler))
|
||||||
print(f' - {path} (http_get)')
|
print(f' - {path} (http_get)')
|
||||||
# routes.append(get(f'/api/{version}', api))
|
if extra_statics:
|
||||||
print()
|
if not extra_routes:
|
||||||
|
print(_('======== Registered static routes ========'))
|
||||||
|
for path, directory in extra_statics.items():
|
||||||
|
routes.append(static(path, directory))
|
||||||
|
print(f' - {path} (static)')
|
||||||
del extra_routes
|
del extra_routes
|
||||||
app.add_routes(routes)
|
del extra_statics
|
||||||
|
app.router.add_routes(routes)
|
||||||
|
await dispatcher.register_remote()
|
||||||
|
print()
|
||||||
await dispatcher.on_join()
|
await dispatcher.on_join()
|
||||||
return await loop.create_server(app.make_handler(), '*', get_config()['http_server']['port'])
|
return await loop.create_server(app.make_handler(),
|
||||||
|
'*',
|
||||||
|
get_config()['http_server']['port'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
tiramisu = None
|
TIRAMISU = None
|
||||||
|
288
src/risotto/image.py
Normal file
288
src/risotto/image.py
Normal file
@ -0,0 +1,288 @@
|
|||||||
|
from os import listdir, walk, makedirs
|
||||||
|
from os.path import isfile, isdir, join, dirname
|
||||||
|
from yaml import load, SafeLoader
|
||||||
|
from json import load as jload, dump as jdump
|
||||||
|
from time import time
|
||||||
|
from shutil import copy2, rmtree, move
|
||||||
|
from hashlib import sha512
|
||||||
|
from subprocess import Popen
|
||||||
|
|
||||||
|
from rougail import RougailConvert, RougailConfig, RougailUpgrade
|
||||||
|
try:
|
||||||
|
from tiramisu3 import Config
|
||||||
|
except:
|
||||||
|
from tiramisu import Config
|
||||||
|
|
||||||
|
from .utils import _
|
||||||
|
|
||||||
|
|
||||||
|
DATASET_PATH = '/usr/share/risotto/'
|
||||||
|
TMP_DIRECTORY = '/tmp'
|
||||||
|
PACKER_TMP_DIRECTORY = join(TMP_DIRECTORY, 'packer')
|
||||||
|
PACKER_FILE_NAME = 'recipe.json'
|
||||||
|
IMAGES_DIRECTORY = join(TMP_DIRECTORY, 'images')
|
||||||
|
|
||||||
|
|
||||||
|
FUNCTIONS = b"""try:
|
||||||
|
from tiramisu3 import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||||
|
except:
|
||||||
|
from tiramisu import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value
|
||||||
|
# =============================================================
|
||||||
|
# fork of risotto-setting/src/risotto_setting/config/config.py
|
||||||
|
def get_password(**kwargs):
|
||||||
|
return 'password'
|
||||||
|
|
||||||
|
|
||||||
|
def get_ip(**kwargs):
|
||||||
|
return '1.1.1.1'
|
||||||
|
|
||||||
|
|
||||||
|
def get_chain(**kwargs):
|
||||||
|
return 'chain'
|
||||||
|
|
||||||
|
|
||||||
|
def get_certificates(**kwargs):
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def get_certificate(**kwargs):
|
||||||
|
return 'certificate'
|
||||||
|
|
||||||
|
|
||||||
|
def get_private_key(**kwargs):
|
||||||
|
return 'private_key'
|
||||||
|
|
||||||
|
|
||||||
|
def get_linked_configuration(**kwargs):
|
||||||
|
if 'test' in kwargs and kwargs['test']:
|
||||||
|
return kwargs['test'][0]
|
||||||
|
return 'configuration'
|
||||||
|
|
||||||
|
|
||||||
|
def zone_information(**kwargs):
|
||||||
|
return 'zone'
|
||||||
|
# =============================================================
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Image:
|
||||||
|
def __init__(self,
|
||||||
|
image_dir: str=None,
|
||||||
|
tmp_dir: str=None,
|
||||||
|
):
|
||||||
|
if image_dir is None:
|
||||||
|
image_dir = IMAGES_DIRECTORY
|
||||||
|
self.image_dir = image_dir
|
||||||
|
if tmp_dir is None:
|
||||||
|
tmp_dir = PACKER_TMP_DIRECTORY
|
||||||
|
self.tmp_dir = tmp_dir
|
||||||
|
self.parse_applications()
|
||||||
|
|
||||||
|
def parse_applications(self) -> None:
|
||||||
|
self.builds = []
|
||||||
|
self.applications = {}
|
||||||
|
for distrib in listdir(join(DATASET_PATH, 'seed')):
|
||||||
|
distrib_dir = join(DATASET_PATH, 'seed', distrib, 'applicationservice')
|
||||||
|
if not isdir(distrib_dir):
|
||||||
|
continue
|
||||||
|
for release in listdir(distrib_dir):
|
||||||
|
release_dir = join(distrib_dir, release)
|
||||||
|
if not isdir(release_dir):
|
||||||
|
continue
|
||||||
|
for applicationservice in listdir(release_dir):
|
||||||
|
applicationservice_dir = join(release_dir, applicationservice)
|
||||||
|
if not isdir(applicationservice_dir):
|
||||||
|
continue
|
||||||
|
if applicationservice in self.applications:
|
||||||
|
raise Exception('multi applicationservice')
|
||||||
|
with open(join(applicationservice_dir, 'applicationservice.yml')) as yaml:
|
||||||
|
app = load(yaml, Loader=SafeLoader)
|
||||||
|
self.applications[applicationservice] = {'path': applicationservice_dir,
|
||||||
|
'yml': app,
|
||||||
|
}
|
||||||
|
if 'service' in app and app['service']:
|
||||||
|
self.builds.append(applicationservice)
|
||||||
|
|
||||||
|
def calc_depends(self,
|
||||||
|
dependencies: list,
|
||||||
|
appname,
|
||||||
|
):
|
||||||
|
app = self.applications[appname]['yml']
|
||||||
|
if not 'depends' in app or not app['depends']:
|
||||||
|
return
|
||||||
|
for dependency in app['depends']:
|
||||||
|
dependency_path = self.applications[dependency]['path']
|
||||||
|
if dependency_path not in dependencies:
|
||||||
|
dependencies.insert(0, dependency_path)
|
||||||
|
self.calc_depends(dependencies, dependency)
|
||||||
|
|
||||||
|
|
||||||
|
def list_images(self):
|
||||||
|
print(self.builds)
|
||||||
|
for build in self.builds:
|
||||||
|
dependencies = [self.applications[build]['path']]
|
||||||
|
self.calc_depends(dependencies, build)
|
||||||
|
yield build, dependencies
|
||||||
|
|
||||||
|
def copy_files(self,
|
||||||
|
src_path: str,
|
||||||
|
dst_path: str,
|
||||||
|
) -> None:
|
||||||
|
root_len = len(src_path) + 1
|
||||||
|
for dir_name, subdir_names, filenames in walk(src_path):
|
||||||
|
subdir = join(dst_path, dir_name[root_len:])
|
||||||
|
if not isdir(subdir):
|
||||||
|
makedirs(subdir)
|
||||||
|
for filename in filenames:
|
||||||
|
path = join(dir_name, filename)
|
||||||
|
sub_dst_path = join(subdir, filename)
|
||||||
|
if isfile(sub_dst_path):
|
||||||
|
raise Exception(_(f'Try to copy {sub_dst_path} which is already exists'))
|
||||||
|
copy2(path, sub_dst_path)
|
||||||
|
|
||||||
|
def load_configuration(self,
|
||||||
|
dependencies_path: list,
|
||||||
|
packer_tmp_directory: str,
|
||||||
|
) -> dict:
|
||||||
|
config = RougailConfig.copy()
|
||||||
|
dictionaries = [join(dependency_path, 'dictionaries') for dependency_path in dependencies_path if isdir(join(dependency_path, 'dictionaries'))]
|
||||||
|
upgrade = RougailUpgrade()
|
||||||
|
dest_dictionaries = join(packer_tmp_directory, 'dictionaries')
|
||||||
|
makedirs(dest_dictionaries)
|
||||||
|
dest_dictionaries_extras = join(packer_tmp_directory, 'dictionaries_extras')
|
||||||
|
makedirs(dest_dictionaries_extras)
|
||||||
|
for dependency_path in dependencies_path:
|
||||||
|
dictionaries_dir = join(dependency_path, 'dictionaries')
|
||||||
|
if isdir(dictionaries_dir):
|
||||||
|
upgrade.load_xml_from_folders(dictionaries_dir,
|
||||||
|
dest_dictionaries,
|
||||||
|
RougailConfig['variable_namespace'],
|
||||||
|
)
|
||||||
|
extra_dir = join(dependency_path, 'extras', 'packer')
|
||||||
|
if isdir(extra_dir):
|
||||||
|
upgrade.load_xml_from_folders(extra_dir,
|
||||||
|
dest_dictionaries_extras,
|
||||||
|
'packer',
|
||||||
|
)
|
||||||
|
config['dictionaries_dir'] = [dest_dictionaries]
|
||||||
|
config['extra_dictionaries'] = {'packer': [dest_dictionaries_extras]}
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def merge_funcs(self,
|
||||||
|
config: RougailConfig,
|
||||||
|
dependencies_path: list,
|
||||||
|
packer_tmp_directory: str,
|
||||||
|
):
|
||||||
|
functions = FUNCTIONS
|
||||||
|
for dependency_path in dependencies_path:
|
||||||
|
funcs_dir = join(dependency_path, 'funcs')
|
||||||
|
if not isdir(funcs_dir):
|
||||||
|
continue
|
||||||
|
for func in listdir(funcs_dir):
|
||||||
|
with open(join(funcs_dir, func), 'rb') as fh:
|
||||||
|
functions += fh.read()
|
||||||
|
func_name = join(packer_tmp_directory, 'func.py')
|
||||||
|
with open(func_name, 'wb') as fh:
|
||||||
|
fh.write(functions)
|
||||||
|
config['functions_file'] = func_name
|
||||||
|
|
||||||
|
async def get_packer_information(self,
|
||||||
|
config: RougailConfig,
|
||||||
|
packer_tmp_directory: str,
|
||||||
|
) -> dict:
|
||||||
|
eolobj = RougailConvert(config)
|
||||||
|
xml = eolobj.save(join(packer_tmp_directory, 'tiramisu.py'))
|
||||||
|
optiondescription = {}
|
||||||
|
exec(xml, None, optiondescription)
|
||||||
|
config = await Config(optiondescription['option_0'])
|
||||||
|
return await config.option('packer').value.dict(flatten=True)
|
||||||
|
|
||||||
|
def do_recipe_checksum(self,
|
||||||
|
path: str,
|
||||||
|
) -> str:
|
||||||
|
files = []
|
||||||
|
root_len = len(path) + 1
|
||||||
|
for dir_name, subdir_names, filenames in walk(path):
|
||||||
|
subpath = dir_name[root_len:]
|
||||||
|
for filename in filenames:
|
||||||
|
with open(join(dir_name, filename), 'rb') as fh:
|
||||||
|
ctl_sum = sha512(fh.read()).hexdigest()
|
||||||
|
files.append(f'{subpath}/{filename}/ctl_sum')
|
||||||
|
return sha512('\n'.join(files).encode()).hexdigest()
|
||||||
|
|
||||||
|
async def build(self) -> None:
|
||||||
|
if isdir(self.tmp_dir):
|
||||||
|
rmtree(self.tmp_dir)
|
||||||
|
for application, dependencies_path in self.list_images():
|
||||||
|
packer_tmp_directory = join(self.tmp_dir,
|
||||||
|
application + '_' + str(time()),
|
||||||
|
)
|
||||||
|
makedirs(packer_tmp_directory)
|
||||||
|
packer_tmp_os_directory = join(packer_tmp_directory, 'os')
|
||||||
|
makedirs(packer_tmp_os_directory)
|
||||||
|
packer_tmp_img_directory = join(packer_tmp_directory, 'image')
|
||||||
|
makedirs(packer_tmp_img_directory)
|
||||||
|
config = self.load_configuration(dependencies_path, packer_tmp_directory)
|
||||||
|
self.merge_funcs(config, dependencies_path, packer_tmp_directory)
|
||||||
|
packer_configuration = await self.get_packer_information(config, packer_tmp_directory)
|
||||||
|
# OS image needed ?
|
||||||
|
packer_dst_os_filename = join(self.image_dir,
|
||||||
|
'os',
|
||||||
|
packer_configuration['os_name'] + '_' + packer_configuration['os_version'] + '.img',
|
||||||
|
)
|
||||||
|
for dependency_path in dependencies_path:
|
||||||
|
packer_directory = join(dependency_path,
|
||||||
|
'packer',
|
||||||
|
'os',
|
||||||
|
)
|
||||||
|
self.copy_files(packer_directory,
|
||||||
|
packer_tmp_os_directory,
|
||||||
|
)
|
||||||
|
packer_directory = join(dependency_path,
|
||||||
|
'packer',
|
||||||
|
'image',
|
||||||
|
)
|
||||||
|
self.copy_files(packer_directory,
|
||||||
|
packer_tmp_img_directory,
|
||||||
|
)
|
||||||
|
if not isfile(packer_dst_os_filename):
|
||||||
|
self.build_image(packer_dst_os_filename,
|
||||||
|
packer_tmp_os_directory,
|
||||||
|
packer_configuration,
|
||||||
|
)
|
||||||
|
recipe_checksum = self.do_recipe_checksum(packer_tmp_img_directory)
|
||||||
|
packer_dst_filename = join(self.image_dir,
|
||||||
|
f'{recipe_checksum}.img',
|
||||||
|
)
|
||||||
|
self.build_image(packer_dst_filename,
|
||||||
|
packer_tmp_img_directory,
|
||||||
|
packer_configuration,
|
||||||
|
)
|
||||||
|
|
||||||
|
def build_image(self,
|
||||||
|
packer_dst_filename: str,
|
||||||
|
tmp_directory: str,
|
||||||
|
packer_configuration: dict,
|
||||||
|
) -> None:
|
||||||
|
packer_configuration['tmp_directory'] = tmp_directory
|
||||||
|
recipe = {'variables': packer_configuration}
|
||||||
|
packer_filename = join(tmp_directory, PACKER_FILE_NAME)
|
||||||
|
with open(packer_filename, 'r') as recipe_fd:
|
||||||
|
for key, value in jload(recipe_fd).items():
|
||||||
|
recipe[key] = value
|
||||||
|
with open(packer_filename, 'w') as recipe_fd:
|
||||||
|
jdump(recipe, recipe_fd, indent=2)
|
||||||
|
proc = Popen(['packer', 'build', packer_filename],
|
||||||
|
#stdout=PIPE,
|
||||||
|
#stderr=PIPE,
|
||||||
|
cwd=tmp_directory,
|
||||||
|
)
|
||||||
|
proc.wait()
|
||||||
|
if proc.returncode:
|
||||||
|
raise Exception(_(f'cannot build {packer_dst_filename} with {packer_filename}'))
|
||||||
|
move(join(tmp_directory, 'image.img'), packer_dst_filename)
|
||||||
|
move(join(tmp_directory, 'image.sha256'), f'{packer_dst_filename}.sha256')
|
||||||
|
rmtree(tmp_directory)
|
||||||
|
print(_(f'Image {packer_dst_filename} created'))
|
@ -1,32 +1,87 @@
|
|||||||
from typing import Dict, Any
|
from typing import Dict, Any, Optional
|
||||||
from json import dumps
|
from json import dumps, loads
|
||||||
|
from asyncpg.exceptions import UndefinedTableError
|
||||||
|
from datetime import datetime
|
||||||
|
from asyncio import Lock
|
||||||
|
|
||||||
from .context import Context
|
from .context import Context
|
||||||
from .utils import _
|
from .utils import _
|
||||||
from .config import get_config
|
from .config import get_config
|
||||||
|
|
||||||
|
database_lock = Lock()
|
||||||
|
|
||||||
|
|
||||||
|
LEVELS = ['Error', 'Info', 'Success', 'Started', 'Failure']
|
||||||
|
|
||||||
|
|
||||||
class Logger:
|
class Logger:
|
||||||
""" An object to manager log
|
""" An object to manager log
|
||||||
"""
|
"""
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.log_connection = None
|
||||||
|
|
||||||
|
async def get_connection(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
):
|
||||||
|
if not self.log_connection:
|
||||||
|
self.log_connection = await risotto_context.pool.acquire()
|
||||||
|
await self.log_connection.set_type_codec(
|
||||||
|
'json',
|
||||||
|
encoder=dumps,
|
||||||
|
decoder=loads,
|
||||||
|
schema='pg_catalog'
|
||||||
|
)
|
||||||
|
return self.log_connection
|
||||||
|
|
||||||
async def insert(self,
|
async def insert(self,
|
||||||
msg: str,
|
msg: str,
|
||||||
path: str,
|
risotto_context: Context,
|
||||||
risotto_context: str,
|
|
||||||
level: str,
|
level: str,
|
||||||
data: Any= None) -> None:
|
kwargs: Any=None,
|
||||||
insert = 'INSERT INTO log(Msg, Path, Username, Level'
|
start: bool=False,
|
||||||
values = 'VALUES($1,$2,$3,$4'
|
) -> None:
|
||||||
args = [msg, path, risotto_context.username, level]
|
uri = self._get_last_uri(risotto_context)
|
||||||
if data:
|
uris = " ".join(risotto_context.paths)
|
||||||
insert += ', Data'
|
insert = 'INSERT INTO RisottoLog(Msg, URI, URIS, UserLogin, Status'
|
||||||
values += ',$5'
|
values = 'VALUES($1,$2,$3,$4,$5'
|
||||||
args.append(dumps(data))
|
args = [msg, uri, uris, risotto_context.username, LEVELS.index(level)]
|
||||||
|
if kwargs:
|
||||||
|
insert += ', Kwargs'
|
||||||
|
values += ',$6'
|
||||||
|
args.append(dumps(kwargs))
|
||||||
|
context_id = risotto_context.context_id
|
||||||
|
if context_id is not None:
|
||||||
|
insert += ', ContextId'
|
||||||
|
if kwargs:
|
||||||
|
values += ',$7'
|
||||||
|
else:
|
||||||
|
values += ',$6'
|
||||||
|
args.append(context_id)
|
||||||
|
|
||||||
sql = insert + ') ' + values + ')'
|
sql = insert + ') ' + values + ') RETURNING LogId'
|
||||||
await risotto_context.connection.fetch(sql, *args)
|
try:
|
||||||
|
async with database_lock:
|
||||||
|
connection = await self.get_connection(risotto_context)
|
||||||
|
log_id = await connection.fetchval(sql, *args)
|
||||||
|
if context_id is None and start:
|
||||||
|
risotto_context.context_id = log_id
|
||||||
|
if start:
|
||||||
|
risotto_context.start_id = log_id
|
||||||
|
except UndefinedTableError as err:
|
||||||
|
raise Exception(_(f'cannot access to database ({err}), was the database really created?'))
|
||||||
|
|
||||||
|
def _get_last_uri(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
) -> str:
|
||||||
|
if risotto_context.paths:
|
||||||
|
return risotto_context.paths[-1]
|
||||||
|
return ''
|
||||||
|
|
||||||
def _get_message_paths(self,
|
def _get_message_paths(self,
|
||||||
risotto_context: Context):
|
risotto_context: Context,
|
||||||
|
) -> str:
|
||||||
|
if not risotto_context.paths:
|
||||||
|
return ''
|
||||||
paths = risotto_context.paths
|
paths = risotto_context.paths
|
||||||
if risotto_context.type:
|
if risotto_context.type:
|
||||||
paths_msg = f' {risotto_context.type} '
|
paths_msg = f' {risotto_context.type} '
|
||||||
@ -43,44 +98,114 @@ class Logger:
|
|||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
arguments,
|
arguments,
|
||||||
error: str,
|
error: str,
|
||||||
msg: str=''):
|
msg: str='',
|
||||||
|
):
|
||||||
""" send message when an error append
|
""" send message when an error append
|
||||||
"""
|
"""
|
||||||
paths_msg = self._get_message_paths(risotto_context)
|
paths_msg = self._get_message_paths(risotto_context)
|
||||||
print(_(f'{risotto_context.username}: ERROR: {error} ({paths_msg} with arguments "{arguments}": {msg})'))
|
print(_(f'{risotto_context.username}: ERROR: {error} ({paths_msg} with arguments "{arguments}": {msg})'))
|
||||||
await self.insert(msg,
|
await self.insert(msg,
|
||||||
paths_msg,
|
|
||||||
risotto_context,
|
risotto_context,
|
||||||
'Error',
|
'Error',
|
||||||
arguments)
|
arguments,
|
||||||
|
)
|
||||||
|
|
||||||
async def info_msg(self,
|
async def info_msg(self,
|
||||||
risotto_context: Context,
|
risotto_context: Context,
|
||||||
arguments: Dict,
|
arguments: Dict,
|
||||||
msg: str=''):
|
msg: str='',
|
||||||
|
) -> None:
|
||||||
""" send message with common information
|
""" send message with common information
|
||||||
"""
|
"""
|
||||||
if risotto_context.paths:
|
paths_msg = self._get_message_paths(risotto_context)
|
||||||
paths_msg = self._get_message_paths(risotto_context)
|
|
||||||
else:
|
|
||||||
paths_msg = ''
|
|
||||||
if get_config()['global']['debug']:
|
if get_config()['global']['debug']:
|
||||||
print(_(f'{risotto_context.username}: INFO:{paths_msg}'))
|
print(_(f'{risotto_context.username}: INFO:{paths_msg}: {msg}'))
|
||||||
await self.insert(msg,
|
await self.insert(msg,
|
||||||
paths_msg,
|
|
||||||
risotto_context,
|
risotto_context,
|
||||||
'Info',
|
'Info',
|
||||||
arguments)
|
arguments,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def start(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
arguments: dict,
|
||||||
|
msg: str,
|
||||||
|
) -> None:
|
||||||
|
paths_msg = self._get_message_paths(risotto_context)
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
if risotto_context.context_id != None:
|
||||||
|
context = f'({risotto_context.context_id})'
|
||||||
|
else:
|
||||||
|
context = ''
|
||||||
|
print(_(f'{risotto_context.username}: START{context}:{paths_msg}: {msg}'))
|
||||||
|
await self.insert(msg,
|
||||||
|
risotto_context,
|
||||||
|
'Started',
|
||||||
|
arguments,
|
||||||
|
start=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def success(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
returns: Optional[dict]=None,
|
||||||
|
) -> None:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
paths_msg = self._get_message_paths(risotto_context)
|
||||||
|
print(_(f'{risotto_context.username}: SUCCESS({risotto_context.context_id}):{paths_msg}'))
|
||||||
|
sql = """UPDATE RisottoLog
|
||||||
|
SET StopDate = $2,
|
||||||
|
Status = $3
|
||||||
|
"""
|
||||||
|
args = [datetime.now(), LEVELS.index('Success')]
|
||||||
|
if returns:
|
||||||
|
sql += """, Returns = $4
|
||||||
|
"""
|
||||||
|
args.append(dumps(returns))
|
||||||
|
sql += """WHERE LogId = $1
|
||||||
|
"""
|
||||||
|
async with database_lock:
|
||||||
|
connection = await self.get_connection(risotto_context)
|
||||||
|
await connection.execute(sql,
|
||||||
|
risotto_context.start_id,
|
||||||
|
*args,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def failed(self,
|
||||||
|
risotto_context: Context,
|
||||||
|
err: str,
|
||||||
|
) -> None:
|
||||||
|
if get_config()['global']['debug']:
|
||||||
|
paths_msg = self._get_message_paths(risotto_context)
|
||||||
|
if risotto_context.context_id != None:
|
||||||
|
context = f'({risotto_context.context_id})'
|
||||||
|
else:
|
||||||
|
context = ''
|
||||||
|
print(_(f'{risotto_context.username}: FAILED({risotto_context.context_id}):{paths_msg}: {err}'))
|
||||||
|
sql = """UPDATE RisottoLog
|
||||||
|
SET StopDate = $2,
|
||||||
|
Status = $4,
|
||||||
|
Msg = $3
|
||||||
|
WHERE LogId = $1
|
||||||
|
"""
|
||||||
|
async with database_lock:
|
||||||
|
connection = await self.get_connection(risotto_context)
|
||||||
|
await connection.execute(sql,
|
||||||
|
risotto_context.start_id,
|
||||||
|
datetime.now(),
|
||||||
|
err[:254],
|
||||||
|
LEVELS.index('Failure'),
|
||||||
|
)
|
||||||
|
|
||||||
async def info(self,
|
async def info(self,
|
||||||
risotto_context,
|
risotto_context,
|
||||||
msg):
|
msg,
|
||||||
|
):
|
||||||
if get_config()['global']['debug']:
|
if get_config()['global']['debug']:
|
||||||
print(msg)
|
print(msg)
|
||||||
await self.insert(msg,
|
await self.insert(msg,
|
||||||
None,
|
|
||||||
risotto_context,
|
risotto_context,
|
||||||
'Info')
|
'Info',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = Logger()
|
log = Logger()
|
||||||
|
@ -1,22 +1,26 @@
|
|||||||
from collections import OrderedDict
|
|
||||||
from os.path import join, basename, dirname
|
|
||||||
from glob import glob
|
|
||||||
|
|
||||||
from tiramisu import StrOption, IntOption, BoolOption, ChoiceOption, OptionDescription, SymLinkOption, \
|
|
||||||
Calculation, Params, ParamOption, ParamValue, calc_value, calc_value_property_help, \
|
|
||||||
groups, Option
|
|
||||||
|
|
||||||
from yaml import load, SafeLoader
|
|
||||||
from os import listdir
|
from os import listdir
|
||||||
from os.path import isfile
|
from os.path import join, basename, dirname, isfile
|
||||||
from ..config import get_config
|
from glob import glob
|
||||||
from ..utils import _
|
from gettext import translation
|
||||||
|
try:
|
||||||
|
from tiramisu3 import StrOption, IntOption, BoolOption, ChoiceOption, OptionDescription, \
|
||||||
|
SymLinkOption, FloatOption, Calculation, Params, ParamOption, \
|
||||||
|
ParamValue, calc_value, calc_value_property_help, groups, Option
|
||||||
|
except:
|
||||||
|
from tiramisu import StrOption, IntOption, BoolOption, ChoiceOption, OptionDescription, \
|
||||||
|
SymLinkOption, FloatOption, Calculation, Params, ParamOption, \
|
||||||
|
ParamValue, calc_value, calc_value_property_help, groups, Option
|
||||||
|
from yaml import load, SafeLoader
|
||||||
|
|
||||||
|
|
||||||
|
from .config import get_config
|
||||||
|
from .utils import _
|
||||||
|
|
||||||
|
|
||||||
MESSAGE_ROOT_PATH = get_config()['global']['message_root_path']
|
MESSAGE_ROOT_PATH = get_config()['global']['message_root_path']
|
||||||
MODULE_NAME = get_config()['global']['module_name']
|
|
||||||
CUSTOMTYPES = {}
|
|
||||||
|
|
||||||
groups.addgroup('message')
|
groups.addgroup('message')
|
||||||
|
CUSTOMTYPES = None
|
||||||
|
MESSAGE_TRANSLATION = None
|
||||||
|
|
||||||
|
|
||||||
class DictOption(Option):
|
class DictOption(Option):
|
||||||
@ -43,27 +47,35 @@ class MessageDefinition:
|
|||||||
A MessageDefinition is a representation of a message in the Zephir application messaging context
|
A MessageDefinition is a representation of a message in the Zephir application messaging context
|
||||||
"""
|
"""
|
||||||
__slots__ = ('version',
|
__slots__ = ('version',
|
||||||
'uri',
|
'message',
|
||||||
'description',
|
'description',
|
||||||
'parameters',
|
'parameters',
|
||||||
|
'default_roles',
|
||||||
'errors',
|
'errors',
|
||||||
'pattern',
|
'pattern',
|
||||||
'related',
|
'related',
|
||||||
'response')
|
'response',
|
||||||
|
'options',
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, raw_def, message):
|
def __init__(self,
|
||||||
|
raw_def,
|
||||||
|
version,
|
||||||
|
message):
|
||||||
# default value for non mandatory key
|
# default value for non mandatory key
|
||||||
self.version = u''
|
self.version = version
|
||||||
self.parameters = OrderedDict()
|
self.parameters = {}
|
||||||
self.errors = []
|
self.errors = []
|
||||||
self.related = []
|
self.related = []
|
||||||
|
self.default_roles = []
|
||||||
self.response = None
|
self.response = None
|
||||||
self.uri = message
|
self.message = message
|
||||||
|
self.options = None
|
||||||
|
|
||||||
# loads yaml information into object
|
# loads yaml information into object
|
||||||
for key, value in raw_def.items():
|
for key, value in raw_def.items():
|
||||||
if key == 'uri':
|
if key == 'message':
|
||||||
raise Exception('uri in not allowed in message')
|
raise Exception('message in not allowed in message')
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = value.strip()
|
value = value.strip()
|
||||||
if key == 'pattern':
|
if key == 'pattern':
|
||||||
@ -72,13 +84,18 @@ class MessageDefinition:
|
|||||||
elif key == 'parameters':
|
elif key == 'parameters':
|
||||||
if 'type' in value and isinstance(value['type'], str):
|
if 'type' in value and isinstance(value['type'], str):
|
||||||
# should be a customtype
|
# should be a customtype
|
||||||
value = CUSTOMTYPES[value['type']].properties
|
value = CUSTOMTYPES[self.version][value['type']].properties
|
||||||
else:
|
else:
|
||||||
value = _parse_parameters(value)
|
value = _parse_parameters(value,
|
||||||
|
self.version)
|
||||||
elif key == 'response':
|
elif key == 'response':
|
||||||
value = ResponseDefinition(value)
|
value = ResponseDefinition(value,
|
||||||
elif key == 'errors':
|
self.version)
|
||||||
value = _parse_error_definition(value)
|
elif key == 'description':
|
||||||
|
value = value.strip().rstrip()
|
||||||
|
if value.endswith('.'):
|
||||||
|
value = value[:-1]
|
||||||
|
value = MESSAGE_TRANSLATION(value)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
# check mandatory keys
|
# check mandatory keys
|
||||||
for key in self.__slots__:
|
for key in self.__slots__:
|
||||||
@ -97,7 +114,10 @@ class ParameterDefinition:
|
|||||||
'ref',
|
'ref',
|
||||||
'shortarg')
|
'shortarg')
|
||||||
|
|
||||||
def __init__(self, name, raw_def):
|
def __init__(self,
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
raw_def):
|
||||||
self.name = name
|
self.name = name
|
||||||
# default value for non mandatory key
|
# default value for non mandatory key
|
||||||
self.help = None
|
self.help = None
|
||||||
@ -113,14 +133,18 @@ class ParameterDefinition:
|
|||||||
tvalue = value[2:]
|
tvalue = value[2:]
|
||||||
else:
|
else:
|
||||||
tvalue = value
|
tvalue = value
|
||||||
if tvalue in CUSTOMTYPES:
|
if tvalue in CUSTOMTYPES[version]:
|
||||||
if value.startswith('[]'):
|
if value.startswith('[]'):
|
||||||
value = '[]{}'.format(CUSTOMTYPES[tvalue].type)
|
value = '[]{}'.format(CUSTOMTYPES[version][tvalue].type)
|
||||||
else:
|
else:
|
||||||
value = CUSTOMTYPES[value].type
|
value = CUSTOMTYPES[version][value].type
|
||||||
else:
|
else:
|
||||||
self._valid_type(value)
|
self._valid_type(value)
|
||||||
#self._valid_type(value)
|
#self._valid_type(value)
|
||||||
|
elif key == 'description':
|
||||||
|
if value.endswith('.'):
|
||||||
|
value = value[:-1]
|
||||||
|
value = MESSAGE_TRANSLATION(value)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
# check mandatory keys
|
# check mandatory keys
|
||||||
for key in self.__slots__:
|
for key in self.__slots__:
|
||||||
@ -133,7 +157,7 @@ class ParameterDefinition:
|
|||||||
def _valid_type(self, typ):
|
def _valid_type(self, typ):
|
||||||
if typ.startswith('[]'):
|
if typ.startswith('[]'):
|
||||||
self._valid_type(typ[2:])
|
self._valid_type(typ[2:])
|
||||||
elif typ not in ['Boolean', 'String', 'Number', 'File', 'Dict', 'Any']:
|
elif typ not in ['Boolean', 'String', 'Number', 'File', 'Dict', 'Any', 'Float']:
|
||||||
raise Exception(_('unknown parameter type: {}').format(typ))
|
raise Exception(_('unknown parameter type: {}').format(typ))
|
||||||
|
|
||||||
|
|
||||||
@ -148,7 +172,9 @@ class ResponseDefinition:
|
|||||||
'required',
|
'required',
|
||||||
'multi')
|
'multi')
|
||||||
|
|
||||||
def __init__(self, responses):
|
def __init__(self,
|
||||||
|
responses,
|
||||||
|
version):
|
||||||
self.ref = None
|
self.ref = None
|
||||||
self.parameters = None
|
self.parameters = None
|
||||||
self.multi = False
|
self.multi = False
|
||||||
@ -162,15 +188,18 @@ class ResponseDefinition:
|
|||||||
self.multi = True
|
self.multi = True
|
||||||
else:
|
else:
|
||||||
tvalue = value
|
tvalue = value
|
||||||
if tvalue in CUSTOMTYPES:
|
if tvalue in CUSTOMTYPES[version]:
|
||||||
self.parameters = CUSTOMTYPES[tvalue].properties
|
self.parameters = CUSTOMTYPES[version][tvalue].properties
|
||||||
self.required = CUSTOMTYPES[tvalue].required
|
self.required = CUSTOMTYPES[version][tvalue].required
|
||||||
if value.startswith('[]'):
|
if value.startswith('[]'):
|
||||||
value = '[]{}'.format(CUSTOMTYPES[tvalue].type)
|
value = '[]{}'.format(CUSTOMTYPES[version][tvalue].type)
|
||||||
else:
|
else:
|
||||||
value = CUSTOMTYPES[value].type
|
value = CUSTOMTYPES[version][tvalue].type
|
||||||
|
self.description = CUSTOMTYPES[version][tvalue].description
|
||||||
else:
|
else:
|
||||||
self._valid_type(value)
|
raise Exception('only customtype is supported in response')
|
||||||
|
elif key == 'description':
|
||||||
|
raise Exception('description is not allowed in response')
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
# check mandatory keys
|
# check mandatory keys
|
||||||
for key in self.__slots__:
|
for key in self.__slots__:
|
||||||
@ -179,87 +208,68 @@ class ResponseDefinition:
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise Exception(_('mandatory key not set {}').format(key))
|
raise Exception(_('mandatory key not set {}').format(key))
|
||||||
|
|
||||||
def _valid_type(self, typ):
|
|
||||||
if typ.startswith('[]'):
|
|
||||||
self._valid_type(typ[2:])
|
|
||||||
elif typ not in ['Boolean', 'String', 'Number', 'File', 'Dict']:
|
|
||||||
raise Exception(_('unknown parameter type: {}').format(typ))
|
|
||||||
|
|
||||||
|
def _parse_parameters(raw_defs,
|
||||||
class ErrorDefinition:
|
version):
|
||||||
"""
|
parameters = {}
|
||||||
An ErrorDefinition is a representation of an error in the Zephir application messaging context
|
|
||||||
"""
|
|
||||||
__slots__ = ('uri',)
|
|
||||||
|
|
||||||
def __init__(self, raw_err):
|
|
||||||
extra_keys = set(raw_err) - set(self.__slots__)
|
|
||||||
if extra_keys:
|
|
||||||
raise Exception(_('extra keys for errors: {}').format(extra_keys))
|
|
||||||
self.uri = raw_err['uri']
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_error_definition(raw_defs):
|
|
||||||
new_value = []
|
|
||||||
for raw_err in raw_defs:
|
|
||||||
new_value.append(ErrorDefinition(raw_err))
|
|
||||||
return new_value
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_parameters(raw_defs):
|
|
||||||
parameters = OrderedDict()
|
|
||||||
for name, raw_def in raw_defs.items():
|
for name, raw_def in raw_defs.items():
|
||||||
parameters[name] = ParameterDefinition(name, raw_def)
|
parameters[name] = ParameterDefinition(name,
|
||||||
|
version,
|
||||||
|
raw_def)
|
||||||
return parameters
|
return parameters
|
||||||
|
|
||||||
|
|
||||||
def parse_definition(filecontent: bytes,
|
def get_message(uri: str,
|
||||||
message: str):
|
current_module_names: str,
|
||||||
return MessageDefinition(load(filecontent, Loader=SafeLoader), message)
|
) -> MessageDefinition:
|
||||||
|
|
||||||
|
|
||||||
def is_message_defined(uri):
|
|
||||||
version, message = split_message_uri(uri)
|
|
||||||
path = get_message_file_path(version, message)
|
|
||||||
return isfile(path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_message(uri):
|
|
||||||
load_customtypes()
|
|
||||||
try:
|
try:
|
||||||
version, message = split_message_uri(uri)
|
version, message = uri.split('.', 1)
|
||||||
path = get_message_file_path(version, message)
|
path = get_message_file_path(version,
|
||||||
|
message,
|
||||||
|
current_module_names)
|
||||||
with open(path, "r") as message_file:
|
with open(path, "r") as message_file:
|
||||||
message_content = parse_definition(message_file.read(), message)
|
return MessageDefinition(load(message_file.read(), Loader=SafeLoader),
|
||||||
message_content.version = version
|
version,
|
||||||
return message_content
|
message)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise Exception(_('cannot parse message {}: {}').format(uri, str(err)))
|
raise Exception(_(f'cannot parse message {uri}: {err}'))
|
||||||
|
|
||||||
|
|
||||||
def split_message_uri(uri):
|
def get_message_file_path(version,
|
||||||
return uri.split('.', 1)
|
message,
|
||||||
|
current_module_names):
|
||||||
|
|
||||||
def get_message_file_path(version, message):
|
|
||||||
module_name, filename = message.split('.', 1)
|
module_name, filename = message.split('.', 1)
|
||||||
if module_name != MODULE_NAME:
|
if current_module_names and module_name not in current_module_names:
|
||||||
raise Exception(f'should only load message for {MODULE_NAME}, not {message}')
|
raise Exception(f'should only load message for {current_module_names}, not {message}')
|
||||||
return join(MESSAGE_ROOT_PATH, version, MODULE_NAME, 'messages', filename + '.yml')
|
return join(MESSAGE_ROOT_PATH, version, module_name, 'messages', filename + '.yml')
|
||||||
|
|
||||||
|
|
||||||
def list_messages(uris):
|
def list_messages(uris,
|
||||||
versions = listdir(join(MESSAGE_ROOT_PATH))
|
current_module_names,
|
||||||
|
current_version,
|
||||||
|
):
|
||||||
|
def get_module_paths(current_module_names):
|
||||||
|
if current_module_names is None:
|
||||||
|
current_module_names = listdir(join(MESSAGE_ROOT_PATH, version))
|
||||||
|
for module_name in current_module_names:
|
||||||
|
yield module_name, join(MESSAGE_ROOT_PATH, version, module_name, 'messages')
|
||||||
|
|
||||||
|
if current_version:
|
||||||
|
versions = [current_version]
|
||||||
|
else:
|
||||||
|
versions = listdir(join(MESSAGE_ROOT_PATH))
|
||||||
versions.sort()
|
versions.sort()
|
||||||
for version in versions:
|
for version in versions:
|
||||||
for message in listdir(join(MESSAGE_ROOT_PATH, version, MODULE_NAME, 'messages')):
|
for module_name, message_path in get_module_paths(current_module_names):
|
||||||
if message.endswith('.yml'):
|
for message in listdir(message_path):
|
||||||
uri = version + '.' + MODULE_NAME + '.' + message.rsplit('.', 1)[0]
|
if message.endswith('.yml'):
|
||||||
if uris is not None and uri not in uris:
|
uri = version + '.' + module_name + '.' + message.rsplit('.', 1)[0]
|
||||||
continue
|
# if uris is not None, return only is in uris' list
|
||||||
yield uri
|
if uris is not None and uri not in uris:
|
||||||
|
continue
|
||||||
|
yield uri
|
||||||
|
|
||||||
|
|
||||||
class CustomParam:
|
class CustomParam:
|
||||||
@ -283,6 +293,10 @@ class CustomParam:
|
|||||||
value = self._convert_type(value, raw_def)
|
value = self._convert_type(value, raw_def)
|
||||||
elif key == 'items':
|
elif key == 'items':
|
||||||
continue
|
continue
|
||||||
|
elif key == 'description':
|
||||||
|
if value.endswith('.'):
|
||||||
|
value = value[:-1]
|
||||||
|
value = MESSAGE_TRANSLATION(value)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
# check mandatory keys
|
# check mandatory keys
|
||||||
@ -299,8 +313,10 @@ class CustomParam:
|
|||||||
'string': 'String',
|
'string': 'String',
|
||||||
'number': 'Number',
|
'number': 'Number',
|
||||||
'object': 'Dict',
|
'object': 'Dict',
|
||||||
|
'any': 'Any',
|
||||||
'array': 'Array',
|
'array': 'Array',
|
||||||
'file': 'File'}
|
'file': 'File',
|
||||||
|
'float': 'Float'}
|
||||||
|
|
||||||
if typ not in list(types.keys()):
|
if typ not in list(types.keys()):
|
||||||
# validate after
|
# validate after
|
||||||
@ -315,7 +331,7 @@ class CustomParam:
|
|||||||
|
|
||||||
|
|
||||||
def _parse_custom_params(raw_defs, required):
|
def _parse_custom_params(raw_defs, required):
|
||||||
parameters = OrderedDict()
|
parameters = {}
|
||||||
for name, raw_def in raw_defs.items():
|
for name, raw_def in raw_defs.items():
|
||||||
parameters[name] = CustomParam(name, raw_def, required)
|
parameters[name] = CustomParam(name, raw_def, required)
|
||||||
return parameters
|
return parameters
|
||||||
@ -341,6 +357,10 @@ class CustomType:
|
|||||||
value = self._convert_type(value, raw_def)
|
value = self._convert_type(value, raw_def)
|
||||||
elif key == 'properties':
|
elif key == 'properties':
|
||||||
value = _parse_custom_params(value, raw_def.get('required', {}))
|
value = _parse_custom_params(value, raw_def.get('required', {}))
|
||||||
|
elif key == 'description':
|
||||||
|
if value.endswith('.'):
|
||||||
|
value = value[:-1]
|
||||||
|
value = MESSAGE_TRANSLATION(value)
|
||||||
|
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
# check mandatory keys
|
# check mandatory keys
|
||||||
@ -372,38 +392,30 @@ class CustomType:
|
|||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
|
|
||||||
def load_customtypes():
|
def load_customtypes() -> None:
|
||||||
if not CUSTOMTYPES:
|
versions = listdir(MESSAGE_ROOT_PATH)
|
||||||
versions = listdir(MESSAGE_ROOT_PATH)
|
versions.sort()
|
||||||
versions.sort()
|
ret = {}
|
||||||
for version in versions:
|
for version in versions:
|
||||||
for message in listdir(join(MESSAGE_ROOT_PATH, version, MODULE_NAME, 'types')):
|
if version not in ret:
|
||||||
|
ret[version] = {}
|
||||||
|
for current_module_name in listdir(join(MESSAGE_ROOT_PATH, version)):
|
||||||
|
types_path = join(MESSAGE_ROOT_PATH,
|
||||||
|
version,
|
||||||
|
current_module_name,
|
||||||
|
'types')
|
||||||
|
for message in listdir(types_path):
|
||||||
if message.endswith('.yml'):
|
if message.endswith('.yml'):
|
||||||
path = join(MESSAGE_ROOT_PATH, version, MODULE_NAME, 'types', message)
|
path = join(types_path, message)
|
||||||
|
# remove extension
|
||||||
message = message.rsplit('.', 1)[0]
|
message = message.rsplit('.', 1)[0]
|
||||||
with open(path, "r") as message_file:
|
with open(path, "r") as message_file:
|
||||||
try:
|
try:
|
||||||
ret = CustomType(load(message_file, Loader=SafeLoader))
|
custom_type = CustomType(load(message_file, Loader=SafeLoader))
|
||||||
CUSTOMTYPES[ret.getname()] = ret
|
ret[version][custom_type.getname()] = custom_type
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
import traceback
|
raise Exception(_(f'enable to load type "{message}": {err}'))
|
||||||
traceback.print_exc()
|
return ret
|
||||||
raise Exception('{} for {}'.format(err, message))
|
|
||||||
for customtype in CUSTOMTYPES.values():
|
|
||||||
properties = {}
|
|
||||||
for key, value in customtype.properties.items():
|
|
||||||
type_ = value.type
|
|
||||||
if type_.startswith('[]'):
|
|
||||||
ttype_ = type_[2:]
|
|
||||||
else:
|
|
||||||
ttype_ = type_
|
|
||||||
if ttype_ in CUSTOMTYPES:
|
|
||||||
if type_.startswith('[]'):
|
|
||||||
raise Exception(_('cannot have []CustomType'))
|
|
||||||
properties[key] = CUSTOMTYPES[ttype_]
|
|
||||||
else:
|
|
||||||
properties[key] = value
|
|
||||||
customtype.properties = properties
|
|
||||||
|
|
||||||
|
|
||||||
def _get_description(description,
|
def _get_description(description,
|
||||||
@ -415,26 +427,29 @@ def _get_description(description,
|
|||||||
doc = name
|
doc = name
|
||||||
if doc.endswith('.'):
|
if doc.endswith('.'):
|
||||||
doc= description[:-1]
|
doc= description[:-1]
|
||||||
|
doc = MESSAGE_TRANSLATION(doc)
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
|
|
||||||
def _get_option(name,
|
def _get_option(name,
|
||||||
arg,
|
arg,
|
||||||
file_path,
|
uri,
|
||||||
select_option,
|
select_option,
|
||||||
optiondescription):
|
):
|
||||||
"""generate option
|
"""generate option
|
||||||
"""
|
"""
|
||||||
props = []
|
props = []
|
||||||
if not hasattr(arg, 'default'):
|
if not hasattr(arg, 'default'):
|
||||||
props.append('mandatory')
|
props.append('mandatory')
|
||||||
props.append(Calculation(calc_value,
|
if select_option:
|
||||||
Params(ParamValue('disabled'),
|
props.append(Calculation(calc_value,
|
||||||
kwargs={'condition': ParamOption(select_option, todict=True),
|
Params(ParamValue('disabled'),
|
||||||
'expected': ParamValue(optiondescription),
|
kwargs={'condition': ParamOption(select_option, todict=True),
|
||||||
'reverse_condition': ParamValue(True)}),
|
'expected': ParamValue(uri),
|
||||||
calc_value_property_help))
|
'reverse_condition': ParamValue(True)}),
|
||||||
|
calc_value_property_help))
|
||||||
|
|
||||||
|
props.append('notunique')
|
||||||
description = arg.description.strip().rstrip()
|
description = arg.description.strip().rstrip()
|
||||||
kwargs = {'name': name,
|
kwargs = {'name': name,
|
||||||
'doc': _get_description(description, name),
|
'doc': _get_description(description, name),
|
||||||
@ -457,49 +472,47 @@ def _get_option(name,
|
|||||||
obj = IntOption(**kwargs)
|
obj = IntOption(**kwargs)
|
||||||
elif type_ == 'Boolean':
|
elif type_ == 'Boolean':
|
||||||
obj = BoolOption(**kwargs)
|
obj = BoolOption(**kwargs)
|
||||||
|
elif type_ == 'Float':
|
||||||
|
obj = FloatOption(**kwargs)
|
||||||
else:
|
else:
|
||||||
raise Exception('unsupported type {} in {}'.format(type_, file_path))
|
raise Exception('unsupported type {} in {}'.format(type_, uri))
|
||||||
obj.impl_set_information('ref', arg.ref)
|
obj.impl_set_information('ref', arg.ref)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def _parse_args(message_def,
|
def get_options(message_def,
|
||||||
options,
|
uri,
|
||||||
file_path,
|
|
||||||
needs,
|
|
||||||
select_option,
|
select_option,
|
||||||
optiondescription,
|
load_shortarg,
|
||||||
load_shortarg):
|
):
|
||||||
"""build option with args/kwargs
|
"""build option with args/kwargs
|
||||||
"""
|
"""
|
||||||
new_options = OrderedDict()
|
options =[]
|
||||||
for name, arg in message_def.parameters.items():
|
for name, arg in message_def.parameters.items():
|
||||||
#new_options[name] = arg
|
|
||||||
# if arg.ref:
|
|
||||||
# needs.setdefault(message_def.uri, {}).setdefault(arg.ref, []).append(name)
|
|
||||||
#for name, arg in new_options.items():
|
|
||||||
current_opt = _get_option(name,
|
current_opt = _get_option(name,
|
||||||
arg,
|
arg,
|
||||||
file_path,
|
uri,
|
||||||
select_option,
|
select_option,
|
||||||
optiondescription)
|
)
|
||||||
options.append(current_opt)
|
options.append(current_opt)
|
||||||
if hasattr(arg, 'shortarg') and arg.shortarg and load_shortarg:
|
if hasattr(arg, 'shortarg') and arg.shortarg and load_shortarg:
|
||||||
options.append(SymLinkOption(arg.shortarg, current_opt))
|
options.append(SymLinkOption(arg.shortarg, current_opt))
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
def _parse_responses(message_def,
|
def _parse_responses(message_def,
|
||||||
file_path):
|
uri,
|
||||||
|
):
|
||||||
"""build option with returns
|
"""build option with returns
|
||||||
"""
|
"""
|
||||||
if message_def.response.parameters is None:
|
if message_def.response.parameters is None:
|
||||||
raise Exception('uri "{}" did not returned any valid parameters.'.format(message_def.uri))
|
raise Exception(f'message "{message_def.message}" did not returned any valid parameters')
|
||||||
|
|
||||||
options = []
|
options = []
|
||||||
names = []
|
names = []
|
||||||
for name, obj in message_def.response.parameters.items():
|
for name, obj in message_def.response.parameters.items():
|
||||||
if name in names:
|
if name in names:
|
||||||
raise Exception('multi response with name {} in {}'.format(name, file_path))
|
raise Exception(f'multi response with name "{name}" in "{uri}"')
|
||||||
names.append(name)
|
names.append(name)
|
||||||
|
|
||||||
kwargs = {'name': name,
|
kwargs = {'name': name,
|
||||||
@ -512,45 +525,29 @@ def _parse_responses(message_def,
|
|||||||
'Number': IntOption,
|
'Number': IntOption,
|
||||||
'Boolean': BoolOption,
|
'Boolean': BoolOption,
|
||||||
'Dict': DictOption,
|
'Dict': DictOption,
|
||||||
|
'Any': AnyOption,
|
||||||
|
'Float': FloatOption,
|
||||||
# FIXME
|
# FIXME
|
||||||
'File': StrOption}.get(type_)
|
'File': StrOption}.get(type_)
|
||||||
if not option:
|
if not option:
|
||||||
raise Exception(f'unknown param type {obj.type} in responses of message {message_def.uri}')
|
raise Exception(f'unknown param type {obj.type} in responses of message {message_def.message}')
|
||||||
if hasattr(obj, 'default'):
|
if hasattr(obj, 'default'):
|
||||||
kwargs['default'] = obj.default
|
kwargs['default'] = obj.default
|
||||||
|
kwargs['properties'] = ('notunique',)
|
||||||
else:
|
else:
|
||||||
kwargs['properties'] = ('mandatory',)
|
kwargs['properties'] = ('mandatory', 'notunique')
|
||||||
options.append(option(**kwargs))
|
options.append(option(**kwargs))
|
||||||
od = OptionDescription(message_def.uri,
|
od = OptionDescription(uri,
|
||||||
message_def.response.description,
|
message_def.response.description,
|
||||||
options)
|
options,
|
||||||
|
)
|
||||||
od.impl_set_information('multi', message_def.response.multi)
|
od.impl_set_information('multi', message_def.response.multi)
|
||||||
return od
|
return od
|
||||||
|
|
||||||
|
|
||||||
def _getoptions_from_yml(message_def,
|
def _get_root_option(select_option,
|
||||||
version,
|
optiondescriptions,
|
||||||
optiondescriptions,
|
):
|
||||||
file_path,
|
|
||||||
needs,
|
|
||||||
select_option,
|
|
||||||
load_shortarg):
|
|
||||||
if message_def.pattern == 'event' and message_def.response:
|
|
||||||
raise Exception('event with response?: {}'.format(file_path))
|
|
||||||
if message_def.pattern == 'rpc' and not message_def.response:
|
|
||||||
print('rpc without response?: {}'.format(file_path))
|
|
||||||
options = []
|
|
||||||
# options = [StrOption('version',
|
|
||||||
# 'version',
|
|
||||||
# version,
|
|
||||||
# properties=frozenset(['hidden']))]
|
|
||||||
_parse_args(message_def, options, file_path, needs, select_option, message_def.uri, load_shortarg)
|
|
||||||
name = message_def.uri
|
|
||||||
description = message_def.description.strip().rstrip()
|
|
||||||
optiondescriptions[name] = (description, options)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_root_option(select_option, optiondescriptions):
|
|
||||||
"""get root option
|
"""get root option
|
||||||
"""
|
"""
|
||||||
def _get_od(curr_ods):
|
def _get_od(curr_ods):
|
||||||
@ -589,36 +586,54 @@ def _get_root_option(select_option, optiondescriptions):
|
|||||||
return OptionDescription('root', 'root', options_obj)
|
return OptionDescription('root', 'root', options_obj)
|
||||||
|
|
||||||
|
|
||||||
def get_messages(load_shortarg=False,
|
def get_messages(current_module_names,
|
||||||
uris=None):
|
load_shortarg=False,
|
||||||
|
current_version=None,
|
||||||
|
uris=None,
|
||||||
|
):
|
||||||
"""generate description from yml files
|
"""generate description from yml files
|
||||||
"""
|
"""
|
||||||
|
global MESSAGE_TRANSLATION, CUSTOMTYPES
|
||||||
|
if MESSAGE_TRANSLATION is None:
|
||||||
|
MESSAGE_TRANSLATION = translation('risotto-message', join(MESSAGE_ROOT_PATH, '..', 'locale')).gettext
|
||||||
|
if CUSTOMTYPES is None:
|
||||||
|
CUSTOMTYPES = load_customtypes()
|
||||||
optiondescriptions = {}
|
optiondescriptions = {}
|
||||||
optiondescriptions_info = {}
|
optiondescriptions_info = {}
|
||||||
needs = {}
|
messages = list(list_messages(uris,
|
||||||
messages = list(list_messages(uris))
|
current_module_names,
|
||||||
|
current_version,
|
||||||
|
))
|
||||||
messages.sort()
|
messages.sort()
|
||||||
optiondescriptions_name = [message_name.split('.', 1)[1] for message_name in messages]
|
# optiondescriptions_name = [message_name.split('.', 1)[1] for message_name in messages]
|
||||||
select_option = ChoiceOption('message',
|
select_option = ChoiceOption('message',
|
||||||
'Nom du message.',
|
'Nom du message.',
|
||||||
tuple(optiondescriptions_name),
|
tuple(messages),
|
||||||
properties=frozenset(['mandatory', 'positional']))
|
properties=frozenset(['mandatory', 'positional', 'notunique']))
|
||||||
for message_name in messages:
|
for uri in messages:
|
||||||
message_def = get_message(message_name)
|
message_def = get_message(uri,
|
||||||
optiondescriptions_info[message_def.uri] = {'pattern': message_def.pattern}
|
current_module_names,
|
||||||
|
)
|
||||||
|
optiondescriptions_info[message_def.message] = {'pattern': message_def.pattern,
|
||||||
|
'default_roles': message_def.default_roles,
|
||||||
|
'version': message_def.version,
|
||||||
|
}
|
||||||
if message_def.pattern == 'rpc':
|
if message_def.pattern == 'rpc':
|
||||||
optiondescriptions_info[message_def.uri]['response'] = _parse_responses(message_def,
|
if not message_def.response:
|
||||||
message_name)
|
raise Exception(f'rpc without response is not allowed {uri}')
|
||||||
|
optiondescriptions_info[message_def.message]['response'] = _parse_responses(message_def,
|
||||||
|
uri,
|
||||||
|
)
|
||||||
elif message_def.response:
|
elif message_def.response:
|
||||||
raise Exception(f'response not allowed for {message_def.uri}')
|
raise Exception(f'response is not allowed for {uri}')
|
||||||
version = message_name.split('.')[0]
|
message_def.options = get_options(message_def,
|
||||||
_getoptions_from_yml(message_def,
|
uri,
|
||||||
version,
|
select_option,
|
||||||
optiondescriptions,
|
load_shortarg,
|
||||||
message_name,
|
)
|
||||||
needs,
|
optiondescriptions[uri] = (message_def.description, message_def.options)
|
||||||
select_option,
|
|
||||||
load_shortarg)
|
|
||||||
|
|
||||||
root = _get_root_option(select_option, optiondescriptions)
|
root = _get_root_option(select_option,
|
||||||
|
optiondescriptions,
|
||||||
|
)
|
||||||
return optiondescriptions_info, root
|
return optiondescriptions_info, root
|
@ -1 +0,0 @@
|
|||||||
from .message import get_messages
|
|
@ -1,9 +1,14 @@
|
|||||||
from tiramisu import Config
|
try:
|
||||||
|
from tiramisu3 import Config
|
||||||
|
except:
|
||||||
|
from tiramisu import Config
|
||||||
from inspect import signature
|
from inspect import signature
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional, List
|
||||||
import asyncpg
|
from asyncpg import create_pool
|
||||||
from json import dumps, loads
|
from json import dumps, loads
|
||||||
|
from pkg_resources import iter_entry_points
|
||||||
|
from traceback import print_exc
|
||||||
|
import risotto
|
||||||
from .utils import _
|
from .utils import _
|
||||||
from .error import RegistrationError
|
from .error import RegistrationError
|
||||||
from .message import get_messages
|
from .message import get_messages
|
||||||
@ -12,8 +17,66 @@ from .config import get_config
|
|||||||
from .logger import log
|
from .logger import log
|
||||||
|
|
||||||
|
|
||||||
|
class Services():
|
||||||
|
services = {}
|
||||||
|
modules_loaded = False
|
||||||
|
services_loaded = False
|
||||||
|
|
||||||
|
def load_services(self):
|
||||||
|
for entry_point in iter_entry_points(group='risotto_services'):
|
||||||
|
self.services.setdefault(entry_point.name, {})
|
||||||
|
self.services_loaded = True
|
||||||
|
|
||||||
|
def load_modules(self,
|
||||||
|
limit_services: Optional[List[str]]=None,
|
||||||
|
) -> None:
|
||||||
|
for entry_point in iter_entry_points(group='risotto_modules'):
|
||||||
|
service_name, module_name = entry_point.name.split('.')
|
||||||
|
if limit_services is None or service_name in limit_services:
|
||||||
|
self.services[service_name][module_name] = entry_point.load()
|
||||||
|
self.modules_loaded = True
|
||||||
|
#
|
||||||
|
# def get_services(self):
|
||||||
|
# if not self.services_loaded:
|
||||||
|
# self.load_services()
|
||||||
|
# return [(service, getattr(self, service)) for service in self.services]
|
||||||
|
|
||||||
|
def get_modules(self,
|
||||||
|
limit_services: Optional[List[str]]=None,
|
||||||
|
) -> List[str]:
|
||||||
|
if not self.modules_loaded:
|
||||||
|
self.load_modules(limit_services=limit_services)
|
||||||
|
return [(module + '.' + submodule, entry_point) for module, submodules in self.services.items() for submodule, entry_point in submodules.items()]
|
||||||
|
|
||||||
|
def get_services_list(self):
|
||||||
|
return self.services.keys()
|
||||||
|
|
||||||
|
def get_modules_list(self):
|
||||||
|
return [m for s in self.services for m in self.services[s]]
|
||||||
|
|
||||||
|
def link_to_dispatcher(self,
|
||||||
|
dispatcher,
|
||||||
|
validate: bool=True,
|
||||||
|
test: bool=False,
|
||||||
|
limit_services: Optional[List[str]]=None,
|
||||||
|
):
|
||||||
|
for submodule_name, module in self.get_modules(limit_services=limit_services):
|
||||||
|
dispatcher.set_module(submodule_name,
|
||||||
|
module,
|
||||||
|
test,
|
||||||
|
)
|
||||||
|
if validate:
|
||||||
|
dispatcher.validate()
|
||||||
|
|
||||||
|
|
||||||
|
services = Services()
|
||||||
|
services.load_services()
|
||||||
|
setattr(risotto, 'services', services)
|
||||||
|
|
||||||
|
|
||||||
def register(uris: str,
|
def register(uris: str,
|
||||||
notification: str=None):
|
notification: str=None,
|
||||||
|
) -> None:
|
||||||
""" Decorator to register function to the dispatcher
|
""" Decorator to register function to the dispatcher
|
||||||
"""
|
"""
|
||||||
if not isinstance(uris, list):
|
if not isinstance(uris, list):
|
||||||
@ -21,11 +84,11 @@ def register(uris: str,
|
|||||||
|
|
||||||
def decorator(function):
|
def decorator(function):
|
||||||
for uri in uris:
|
for uri in uris:
|
||||||
version, message = uri.split('.', 1)
|
dispatcher.set_function(uri,
|
||||||
dispatcher.set_function(version,
|
|
||||||
message,
|
|
||||||
notification,
|
notification,
|
||||||
function)
|
function,
|
||||||
|
function.__module__
|
||||||
|
)
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@ -35,51 +98,57 @@ class RegisterDispatcher:
|
|||||||
self.injected_self = {}
|
self.injected_self = {}
|
||||||
# postgresql pool
|
# postgresql pool
|
||||||
self.pool = None
|
self.pool = None
|
||||||
|
# load tiramisu objects
|
||||||
|
self.risotto_modules = services.get_services_list()
|
||||||
|
messages, self.option = get_messages(self.risotto_modules)
|
||||||
# list of uris with informations: {"v1": {"module_name.xxxxx": yyyyyy}}
|
# list of uris with informations: {"v1": {"module_name.xxxxx": yyyyyy}}
|
||||||
self.messages = {}
|
self.messages = {}
|
||||||
# load tiramisu objects
|
|
||||||
messages, self.option = get_messages()
|
|
||||||
version = get_config()['global']['version']
|
|
||||||
self.messages[version] = {}
|
|
||||||
for tiramisu_message, obj in messages.items():
|
for tiramisu_message, obj in messages.items():
|
||||||
|
version = obj['version']
|
||||||
|
if version not in self.messages:
|
||||||
|
self.messages[version] = {}
|
||||||
|
obj['message'] = tiramisu_message
|
||||||
self.messages[version][tiramisu_message] = obj
|
self.messages[version][tiramisu_message] = obj
|
||||||
self.risotto_module = get_config()['global']['module_name']
|
|
||||||
|
|
||||||
def get_function_args(self,
|
def get_function_args(self,
|
||||||
function: Callable):
|
function: Callable):
|
||||||
# remove self
|
# remove self and risotto_context
|
||||||
first_argument_index = 1
|
first_argument_index = 2
|
||||||
return [param.name for param in list(signature(function).parameters.values())[first_argument_index:]]
|
return {param.name for param in list(signature(function).parameters.values())[first_argument_index:]}
|
||||||
|
|
||||||
|
async def get_message_args(self,
|
||||||
|
message: str,
|
||||||
|
version: str,
|
||||||
|
):
|
||||||
|
# load config
|
||||||
|
async with await Config(self.option, display_name=lambda self, dyn_name, suffix: self.impl_getname()) as config:
|
||||||
|
uri = f'{version}.{message}'
|
||||||
|
await config.property.read_write()
|
||||||
|
# set message to the message name
|
||||||
|
await config.option('message').value.set(uri)
|
||||||
|
# get message argument
|
||||||
|
dico = await config.option(uri).value.dict()
|
||||||
|
return set(dico.keys())
|
||||||
|
|
||||||
async def valid_rpc_params(self,
|
async def valid_rpc_params(self,
|
||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
function: Callable,
|
function: Callable,
|
||||||
module_name: str):
|
module_name: str,
|
||||||
|
):
|
||||||
""" parameters function must have strictly all arguments with the correct name
|
""" parameters function must have strictly all arguments with the correct name
|
||||||
"""
|
"""
|
||||||
async def get_message_args():
|
|
||||||
# load config
|
|
||||||
async with await Config(self.option) as config:
|
|
||||||
await config.property.read_write()
|
|
||||||
# set message to the uri name
|
|
||||||
await config.option('message').value.set(message)
|
|
||||||
# get message argument
|
|
||||||
dico = await config.option(message).value.dict()
|
|
||||||
return set(dico.keys())
|
|
||||||
|
|
||||||
def get_function_args():
|
|
||||||
function_args = self.get_function_args(function)
|
|
||||||
# risotto_context is a special argument, remove it
|
|
||||||
function_args = function_args[1:]
|
|
||||||
return set(function_args)
|
|
||||||
|
|
||||||
# get message arguments
|
# get message arguments
|
||||||
message_args = await get_message_args()
|
message_args = await self.get_message_args(message,
|
||||||
|
version,
|
||||||
|
)
|
||||||
# get function arguments
|
# get function arguments
|
||||||
function_args = get_function_args()
|
function_args = self.get_function_args(function)
|
||||||
# compare message arguments with function parameter
|
# compare message arguments with function parameter
|
||||||
# it must not have more or less arguments
|
# it must not have more or less arguments
|
||||||
|
for arg in function_args - message_args:
|
||||||
|
if arg.startswith('_'):
|
||||||
|
message_args.add(arg)
|
||||||
if message_args != function_args:
|
if message_args != function_args:
|
||||||
# raise if arguments are not equal
|
# raise if arguments are not equal
|
||||||
msg = []
|
msg = []
|
||||||
@ -97,29 +166,16 @@ class RegisterDispatcher:
|
|||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
function: Callable,
|
function: Callable,
|
||||||
module_name: str):
|
module_name: str,
|
||||||
|
):
|
||||||
""" parameters function validation for event messages
|
""" parameters function validation for event messages
|
||||||
"""
|
"""
|
||||||
async def get_message_args():
|
|
||||||
# load config
|
|
||||||
async with await Config(self.option) as config:
|
|
||||||
await config.property.read_write()
|
|
||||||
# set message to the message name
|
|
||||||
await config.option('message').value.set(message)
|
|
||||||
# get message argument
|
|
||||||
dico = await config.option(message).value.dict()
|
|
||||||
return set(dico.keys())
|
|
||||||
|
|
||||||
def get_function_args():
|
|
||||||
function_args = self.get_function_args(function)
|
|
||||||
# risotto_context is a special argument, remove it
|
|
||||||
function_args = function_args[1:]
|
|
||||||
return set(function_args)
|
|
||||||
|
|
||||||
# get message arguments
|
# get message arguments
|
||||||
message_args = await get_message_args()
|
message_args = await self.get_message_args(message,
|
||||||
|
version,
|
||||||
|
)
|
||||||
# get function arguments
|
# get function arguments
|
||||||
function_args = get_function_args()
|
function_args = self.get_function_args(function)
|
||||||
# compare message arguments with function parameter
|
# compare message arguments with function parameter
|
||||||
# it can have less arguments but not more
|
# it can have less arguments but not more
|
||||||
extra_function_args = function_args - message_args
|
extra_function_args = function_args - message_args
|
||||||
@ -130,34 +186,36 @@ class RegisterDispatcher:
|
|||||||
raise RegistrationError(_(f'error with {module_name}.{function_name} arguments: {msg}'))
|
raise RegistrationError(_(f'error with {module_name}.{function_name} arguments: {msg}'))
|
||||||
|
|
||||||
def set_function(self,
|
def set_function(self,
|
||||||
version: str,
|
uri: str,
|
||||||
message: str,
|
|
||||||
notification: str,
|
notification: str,
|
||||||
function: Callable):
|
function: Callable,
|
||||||
|
full_module_name: str,
|
||||||
|
):
|
||||||
""" register a function to an URI
|
""" register a function to an URI
|
||||||
URI is a message
|
URI is a message
|
||||||
"""
|
"""
|
||||||
|
version, message = uri.split('.', 1)
|
||||||
# check if message exists
|
# check if message exists
|
||||||
if message not in self.messages[version]:
|
if message not in self.messages[version]:
|
||||||
raise RegistrationError(_(f'the message {message} not exists'))
|
raise RegistrationError(_(f'the message {message} not exists'))
|
||||||
|
|
||||||
# xxx module can only be register with v1.xxxx..... message
|
# xxx submodule can only be register with v1.yyy.xxx..... message
|
||||||
module_name = function.__module__.split('.')[-2]
|
risotto_module_name, submodule_name = full_module_name.split('.')[-3:-1]
|
||||||
message_namespace = message.split('.', 1)[0]
|
module_name = risotto_module_name.split('_')[-1]
|
||||||
message_risotto_module, message_namespace, message_name = message.split('.', 2)
|
message_module, message_submodule, message_name = message.split('.', 2)
|
||||||
if message_risotto_module != self.risotto_module:
|
if message_module not in self.risotto_modules:
|
||||||
raise RegistrationError(_(f'cannot registered the "{message}" is not "{self.risotto_module}"'))
|
raise RegistrationError(_(f'cannot registered the "{message}" is not "{self.risotto_modules}"'))
|
||||||
if self.messages[version][message]['pattern'] == 'rpc' and message_namespace != module_name:
|
if self.messages[version][message]['pattern'] == 'rpc' and \
|
||||||
raise RegistrationError(_(f'cannot registered the "{message}" message in module "{module_name}"'))
|
module_name != message_module and \
|
||||||
|
message_submodule != submodule_name:
|
||||||
|
raise RegistrationError(_(f'cannot registered the "{message}" message in submodule "{module_name}.{submodule_name}"'))
|
||||||
|
|
||||||
# True if first argument is the risotto_context
|
# True if first argument is the risotto_context
|
||||||
function_args = self.get_function_args(function)
|
function_args = self.get_function_args(function)
|
||||||
function_args.pop(0)
|
|
||||||
|
|
||||||
# check if already register
|
# check if already register
|
||||||
if 'function' in self.messages[version][message]:
|
if 'function' in self.messages[version][message]:
|
||||||
raise RegistrationError(_(f'uri {version}.{message} already registered'))
|
raise RegistrationError(_(f'uri {uri} already registered'))
|
||||||
|
|
||||||
# register
|
# register
|
||||||
if self.messages[version][message]['pattern'] == 'rpc':
|
if self.messages[version][message]['pattern'] == 'rpc':
|
||||||
@ -166,19 +224,24 @@ class RegisterDispatcher:
|
|||||||
register = self.register_event
|
register = self.register_event
|
||||||
register(version,
|
register(version,
|
||||||
message,
|
message,
|
||||||
module_name,
|
f'{module_name}.{submodule_name}',
|
||||||
|
full_module_name,
|
||||||
function,
|
function,
|
||||||
function_args,
|
function_args,
|
||||||
notification)
|
notification,
|
||||||
|
)
|
||||||
|
|
||||||
def register_rpc(self,
|
def register_rpc(self,
|
||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
module_name: str,
|
module_name: str,
|
||||||
|
full_module_name: str,
|
||||||
function: Callable,
|
function: Callable,
|
||||||
function_args: list,
|
function_args: list,
|
||||||
notification: Optional[str]):
|
notification: Optional[str],
|
||||||
|
):
|
||||||
self.messages[version][message]['module'] = module_name
|
self.messages[version][message]['module'] = module_name
|
||||||
|
self.messages[version][message]['full_module_name'] = full_module_name
|
||||||
self.messages[version][message]['function'] = function
|
self.messages[version][message]['function'] = function
|
||||||
self.messages[version][message]['arguments'] = function_args
|
self.messages[version][message]['arguments'] = function_args
|
||||||
if notification:
|
if notification:
|
||||||
@ -188,26 +251,34 @@ class RegisterDispatcher:
|
|||||||
version: str,
|
version: str,
|
||||||
message: str,
|
message: str,
|
||||||
module_name: str,
|
module_name: str,
|
||||||
|
full_module_name: str,
|
||||||
function: Callable,
|
function: Callable,
|
||||||
function_args: list,
|
function_args: list,
|
||||||
notification: Optional[str]):
|
notification: Optional[str],
|
||||||
|
):
|
||||||
if 'functions' not in self.messages[version][message]:
|
if 'functions' not in self.messages[version][message]:
|
||||||
self.messages[version][message]['functions'] = []
|
self.messages[version][message]['functions'] = []
|
||||||
|
|
||||||
dico = {'module': module_name,
|
dico = {'module': module_name,
|
||||||
|
'full_module_name': full_module_name,
|
||||||
'function': function,
|
'function': function,
|
||||||
'arguments': function_args}
|
'arguments': function_args,
|
||||||
|
}
|
||||||
if notification and notification:
|
if notification and notification:
|
||||||
dico['notification'] = notification
|
dico['notification'] = notification
|
||||||
self.messages[version][message]['functions'].append(dico)
|
self.messages[version][message]['functions'].append(dico)
|
||||||
|
|
||||||
def set_module(self, module_name, module, test):
|
def set_module(self,
|
||||||
|
submodule_name,
|
||||||
|
module,
|
||||||
|
test,
|
||||||
|
):
|
||||||
""" register and instanciate a new module
|
""" register and instanciate a new module
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.injected_self[module_name] = module.Risotto(test)
|
self.injected_self[submodule_name] = module.Risotto(test)
|
||||||
except AttributeError as err:
|
except AttributeError as err:
|
||||||
raise RegistrationError(_(f'unable to register the module {module_name}, this module must have Risotto class'))
|
print(_(f'unable to register the module {submodule_name}, this module must have Risotto class'))
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
""" check if all messages have a function
|
""" check if all messages have a function
|
||||||
@ -217,13 +288,15 @@ class RegisterDispatcher:
|
|||||||
for message, message_obj in messages.items():
|
for message, message_obj in messages.items():
|
||||||
if not 'functions' in message_obj and not 'function' in message_obj:
|
if not 'functions' in message_obj and not 'function' in message_obj:
|
||||||
if message_obj['pattern'] == 'event':
|
if message_obj['pattern'] == 'event':
|
||||||
print(f'{message} prêche dans le désert')
|
print(f'{version}.{message} prêche dans le désert')
|
||||||
else:
|
else:
|
||||||
missing_messages.append(message)
|
missing_messages.append(f'{version}.{message}')
|
||||||
if missing_messages:
|
if missing_messages:
|
||||||
raise RegistrationError(_(f'missing uri {missing_messages}'))
|
raise RegistrationError(_(f'no matching function for uri {missing_messages}'))
|
||||||
|
|
||||||
async def on_join(self):
|
async def on_join(self,
|
||||||
|
truncate: bool=False,
|
||||||
|
) -> None:
|
||||||
internal_user = get_config()['global']['internal_user']
|
internal_user = get_config()['global']['internal_user']
|
||||||
async with self.pool.acquire() as connection:
|
async with self.pool.acquire() as connection:
|
||||||
await connection.set_type_codec(
|
await connection.set_type_codec(
|
||||||
@ -232,52 +305,53 @@ class RegisterDispatcher:
|
|||||||
decoder=loads,
|
decoder=loads,
|
||||||
schema='pg_catalog'
|
schema='pg_catalog'
|
||||||
)
|
)
|
||||||
|
if truncate:
|
||||||
|
async with connection.transaction():
|
||||||
|
await connection.execute('TRUNCATE InfraServer, InfraSite, InfraZone, Log, ProviderDeployment, ProviderFactoryCluster, ProviderFactoryClusterNode, SettingApplicationservice, SettingApplicationServiceDependency, SettingRelease, SettingServer, SettingServermodel, SettingSource, UserRole, UserRoleURI, UserURI, UserUser, InfraServermodel, ProviderZone, ProviderServer, ProviderSource, ProviderApplicationservice, ProviderServermodel')
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
for module_name, module in self.injected_self.items():
|
for submodule_name, module in self.injected_self.items():
|
||||||
risotto_context = Context()
|
risotto_context = Context()
|
||||||
risotto_context.username = internal_user
|
risotto_context.username = internal_user
|
||||||
risotto_context.paths.append(f'{module_name}.on_join')
|
risotto_context.paths.append(f'internal.{submodule_name}.on_join')
|
||||||
risotto_context.type = None
|
risotto_context.type = None
|
||||||
|
risotto_context.pool = self.pool
|
||||||
risotto_context.connection = connection
|
risotto_context.connection = connection
|
||||||
info_msg = _(f'in module {module_name}.on_join')
|
risotto_context.module = submodule_name.split('.', 1)[0]
|
||||||
|
info_msg = _(f'in function risotto_{submodule_name}.on_join')
|
||||||
await log.info_msg(risotto_context,
|
await log.info_msg(risotto_context,
|
||||||
None,
|
None,
|
||||||
info_msg)
|
info_msg)
|
||||||
await module.on_join(risotto_context)
|
try:
|
||||||
|
await module.on_join(risotto_context)
|
||||||
async def insert_message(self,
|
except Exception as err:
|
||||||
connection,
|
if get_config()['global']['debug']:
|
||||||
uri):
|
print_exc()
|
||||||
sql = """INSERT INTO URI(URIName) VALUES ($1)
|
msg = _(f'on_join returns an error in module {submodule_name}: {err}')
|
||||||
ON CONFLICT (URIName) DO NOTHING
|
await log.error_msg(risotto_context, {}, msg)
|
||||||
"""
|
|
||||||
await connection.fetchval(sql,
|
|
||||||
uri)
|
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
# valid function's arguments
|
# valid function's arguments
|
||||||
db_conf = get_config()['database']['dsn']
|
db_conf = get_config()['database']['dsn']
|
||||||
self.pool = await asyncpg.create_pool(db_conf)
|
self.pool = await create_pool(db_conf)
|
||||||
async with self.pool.acquire() as connection:
|
async with self.pool.acquire() as connection:
|
||||||
async with connection.transaction():
|
async with connection.transaction():
|
||||||
for version, messages in self.messages.items():
|
for version, messages in self.messages.items():
|
||||||
for message, message_infos in messages.items():
|
for message, message_infos in messages.items():
|
||||||
if message_infos['pattern'] == 'rpc':
|
if message_infos['pattern'] == 'rpc':
|
||||||
module_name = message_infos['module']
|
# module not available during test
|
||||||
function = message_infos['function']
|
if 'module' in message_infos:
|
||||||
await self.valid_rpc_params(version,
|
module_name = message_infos['module']
|
||||||
message,
|
function = message_infos['function']
|
||||||
function,
|
await self.valid_rpc_params(version,
|
||||||
module_name)
|
message,
|
||||||
else:
|
function,
|
||||||
if 'functions' in message_infos:
|
module_name)
|
||||||
for function_infos in message_infos['functions']:
|
elif 'functions' in message_infos:
|
||||||
module_name = function_infos['module']
|
# event with functions
|
||||||
function = function_infos['function']
|
for function_infos in message_infos['functions']:
|
||||||
await self.valid_event_params(version,
|
module_name = function_infos['module']
|
||||||
message,
|
function = function_infos['function']
|
||||||
function,
|
await self.valid_event_params(version,
|
||||||
module_name)
|
message,
|
||||||
await self.insert_message(connection,
|
function,
|
||||||
f'{version}.{message}')
|
module_name)
|
||||||
|
|
||||||
|
@ -1,59 +0,0 @@
|
|||||||
from aiohttp import ClientSession
|
|
||||||
from requests import get, post
|
|
||||||
from json import dumps
|
|
||||||
from tiramisu_api import Config
|
|
||||||
|
|
||||||
|
|
||||||
from .config import get_config
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# ALLOW_INSECURE_HTTPS = get_config()['submodule']['allow_insecure_https']
|
|
||||||
|
|
||||||
|
|
||||||
class Remote:
|
|
||||||
submodules = {}
|
|
||||||
|
|
||||||
async def _get_config(self,
|
|
||||||
submodule: str,
|
|
||||||
url: str) -> None:
|
|
||||||
if submodule not in self.submodules:
|
|
||||||
session = ClientSession()
|
|
||||||
async with session.get(url) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
try:
|
|
||||||
json = await resp.json()
|
|
||||||
err = json['error']['kwargs']['reason']
|
|
||||||
except:
|
|
||||||
err = await resp.text()
|
|
||||||
raise Exception(err)
|
|
||||||
json = await resp.json()
|
|
||||||
self.submodules[submodule] = json
|
|
||||||
return Config(self.submodules[submodule])
|
|
||||||
|
|
||||||
async def call_or_publish(self,
|
|
||||||
submodule: str,
|
|
||||||
version: str,
|
|
||||||
message: str,
|
|
||||||
payload) -> dict:
|
|
||||||
domain_name = get_config()['submodule'][submodule]
|
|
||||||
remote_url = f'http://{domain_name}:8080/api/{version}'
|
|
||||||
message_url = f'{remote_url}/{message}'
|
|
||||||
|
|
||||||
config = await self._get_config(submodule,
|
|
||||||
remote_url)
|
|
||||||
print(config)
|
|
||||||
for key, value in payload.items():
|
|
||||||
path = message + '.' + key
|
|
||||||
config.option(path).value.set(value)
|
|
||||||
session = ClientSession()
|
|
||||||
print(message_url)
|
|
||||||
async with session.post(message_url, data=dumps(payload)) as resp:
|
|
||||||
response = await resp.json()
|
|
||||||
if 'error' in response:
|
|
||||||
if 'reason' in response['error']['kwargs']:
|
|
||||||
raise Exception("{}".format(response['error']['kwargs']['reason']))
|
|
||||||
raise Exception('erreur inconnue')
|
|
||||||
return response['response']
|
|
||||||
|
|
||||||
|
|
||||||
remote = Remote()
|
|
@ -1,20 +1,33 @@
|
|||||||
from os import listdir
|
from os import listdir
|
||||||
from os.path import isdir, isfile, dirname, abspath, basename, join
|
from os.path import isdir, isfile, dirname, abspath, basename, join
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from ..dispatcher import dispatcher
|
# from ..dispatcher import dispatcher
|
||||||
|
|
||||||
|
|
||||||
def load_services(modules=None,
|
def list_modules():
|
||||||
validate: bool=True,
|
abs_here = dirname(abspath(__file__))
|
||||||
test: bool=False):
|
here = basename(abs_here)
|
||||||
|
return [name for name in listdir(abs_here) if not name.startswith('__') and isdir(join(abs_here, name))]
|
||||||
|
|
||||||
|
|
||||||
|
def load_submodules(dispatcher,
|
||||||
|
modules=None,
|
||||||
|
validate: bool=True,
|
||||||
|
test: bool=False):
|
||||||
abs_here = dirname(abspath(__file__))
|
abs_here = dirname(abspath(__file__))
|
||||||
here = basename(abs_here)
|
here = basename(abs_here)
|
||||||
module = basename(dirname(abs_here))
|
module = basename(dirname(abs_here))
|
||||||
if not modules:
|
if not modules:
|
||||||
modules = listdir(abs_here)
|
modules = listdir(abs_here)
|
||||||
for filename in modules:
|
for module in modules:
|
||||||
absfilename = join(abs_here, filename)
|
absmodule = join(abs_here, module)
|
||||||
if isdir(absfilename) and isfile(join(absfilename, '__init__.py')):
|
if isdir(absmodule):
|
||||||
dispatcher.set_module(filename, import_module(f'.{here}.{filename}', module), test)
|
for submodule in listdir(absmodule):
|
||||||
|
absfilename = join(absmodule, submodule)
|
||||||
|
if isdir(absfilename) and isfile(join(absfilename, '__init__.py')):
|
||||||
|
dispatcher.set_module(submodule,
|
||||||
|
import_module(f'.{here}.{module}.{submodule}',
|
||||||
|
f'risotto'),
|
||||||
|
test)
|
||||||
if validate:
|
if validate:
|
||||||
dispatcher.validate()
|
dispatcher.validate()
|
||||||
|
@ -1 +0,0 @@
|
|||||||
from .applicationservice import Risotto
|
|
@ -1,248 +0,0 @@
|
|||||||
from os import listdir
|
|
||||||
from os.path import join
|
|
||||||
from traceback import print_exc
|
|
||||||
from yaml import load, SafeLoader
|
|
||||||
from typing import Dict, List, Set
|
|
||||||
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...config import get_config
|
|
||||||
from ...error import ExecutionError
|
|
||||||
from ...context import Context
|
|
||||||
from ...utils import _
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
def __init__(self,
|
|
||||||
test: bool) -> None:
|
|
||||||
self.source_root_path = get_config().get('source').get('root_path')
|
|
||||||
self.internal_source_name = get_config()['servermodel']['internal_source']
|
|
||||||
self.internal_distribution_name = get_config()['servermodel']['internal_distribution']
|
|
||||||
self.internal_release_name = get_config()['servermodel']['internal_release_name']
|
|
||||||
super().__init__(test)
|
|
||||||
|
|
||||||
async def on_join(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
internal_source = await self.call('v1.setting.source.create',
|
|
||||||
risotto_context,
|
|
||||||
source_name=self.internal_source_name,
|
|
||||||
source_url='none')
|
|
||||||
internal_release = await self.call('v1.setting.source.release.create',
|
|
||||||
risotto_context,
|
|
||||||
source_name=self.internal_source_name,
|
|
||||||
release_name=self.internal_release_name,
|
|
||||||
release_distribution=self.internal_distribution_name)
|
|
||||||
self.internal_release_id = internal_release['release_id']
|
|
||||||
|
|
||||||
async def _applicationservice_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_name: str,
|
|
||||||
applicationservice_description: str,
|
|
||||||
applicationservice_dependencies: List[int],
|
|
||||||
release_id: int) -> Dict:
|
|
||||||
applicationservice_update_query = '''INSERT INTO Applicationservice(ApplicationserviceName, ApplicationserviceDescription, ApplicationserviceReleaseId)
|
|
||||||
VALUES ($1,$2,$3)
|
|
||||||
RETURNING ApplicationserviceId
|
|
||||||
'''
|
|
||||||
applicationservice_id = await risotto_context.connection.fetchval(applicationservice_update_query,
|
|
||||||
applicationservice_name,
|
|
||||||
applicationservice_description,
|
|
||||||
release_id)
|
|
||||||
await self.insert_dependency(risotto_context,
|
|
||||||
applicationservice_id,
|
|
||||||
applicationservice_dependencies)
|
|
||||||
return {'applicationservice_name': applicationservice_name,
|
|
||||||
'applicationservice_description': applicationservice_description,
|
|
||||||
'applicationservice_release_id': release_id,
|
|
||||||
'applicationservice_id': applicationservice_id}
|
|
||||||
|
|
||||||
async def insert_dependency(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id: int,
|
|
||||||
dependencies: list) -> None:
|
|
||||||
sql = '''INSERT INTO ApplicationserviceDependency(ApplicationserviceId, ApplicationserviceDependencyId)
|
|
||||||
VALUES ($1, $2)'''
|
|
||||||
for dependency in dependencies:
|
|
||||||
await risotto_context.connection.execute(sql,
|
|
||||||
applicationservice_id,
|
|
||||||
dependency)
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.dependency.add')
|
|
||||||
async def applicationservice_dependency_add(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_name: str,
|
|
||||||
applicationservice_dependency: str,
|
|
||||||
source_name: str,
|
|
||||||
release_distribution: str) -> Dict:
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
as_descr = await self._applicationservice_describe(risotto_context,
|
|
||||||
applicationservice_name,
|
|
||||||
self.internal_release_id)
|
|
||||||
dependency_descr = await self._applicationservice_describe(risotto_context,
|
|
||||||
applicationservice_dependency,
|
|
||||||
release['release_id'])
|
|
||||||
sql = '''SELECT ApplicationserviceDependencyId
|
|
||||||
FROM ApplicationserviceDependency
|
|
||||||
WHERE ApplicationserviceId = $1 AND ApplicationserviceDependencyId = $2'''
|
|
||||||
if await risotto_context.connection.fetchrow(sql,
|
|
||||||
as_descr['applicationservice_id'],
|
|
||||||
dependency_descr['applicationservice_id']):
|
|
||||||
raise Exception(_(f'{applicationservice_name} has already a dependency to {applicationservice_dependency}'))
|
|
||||||
await self.insert_dependency(risotto_context,
|
|
||||||
as_descr['applicationservice_id'],
|
|
||||||
[dependency_descr['applicationservice_id']])
|
|
||||||
await self.publish('v1.setting.applicationservice.updated',
|
|
||||||
risotto_context,
|
|
||||||
**as_descr)
|
|
||||||
await self.updated_related_applicationservice(risotto_context,
|
|
||||||
as_descr['applicationservice_id'])
|
|
||||||
return as_descr
|
|
||||||
|
|
||||||
async def updated_related_applicationservice(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id: int) -> None:
|
|
||||||
sql = """
|
|
||||||
SELECT ApplicationserviceId as applicationservice_id
|
|
||||||
FROM ApplicationserviceDependency
|
|
||||||
WHERE ApplicationserviceDependencyId = $1"""
|
|
||||||
for dependency in await risotto_context.connection.fetch(sql,
|
|
||||||
applicationservice_id):
|
|
||||||
dependency_id = dependency['applicationservice_id']
|
|
||||||
applicationservice = await self._applicationservice_get_by_id(risotto_context,
|
|
||||||
dependency_id)
|
|
||||||
await self.publish('v1.setting.applicationservice.updated',
|
|
||||||
risotto_context,
|
|
||||||
**applicationservice)
|
|
||||||
await self.updated_related_applicationservice(risotto_context,
|
|
||||||
dependency_id)
|
|
||||||
|
|
||||||
async def get_dependencies(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id: int) -> List[int]:
|
|
||||||
dependencies = set()
|
|
||||||
sql = """
|
|
||||||
SELECT ApplicationserviceDependencyId as applicationservice_dependency_id
|
|
||||||
FROM ApplicationserviceDependency
|
|
||||||
WHERE ApplicationserviceId = $1"""
|
|
||||||
for dependency in await risotto_context.connection.fetch(sql,
|
|
||||||
applicationservice_id):
|
|
||||||
dependencies.add(dependency['applicationservice_dependency_id'])
|
|
||||||
for dependency in dependencies.copy():
|
|
||||||
dependencies |= await self.get_dependencies(risotto_context,
|
|
||||||
dependency)
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.create')
|
|
||||||
async def applicationservice_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_name: str,
|
|
||||||
applicationservice_description: str,
|
|
||||||
applicationservice_dependencies: List[int]) -> Dict:
|
|
||||||
applicationservice = await self._applicationservice_create(risotto_context,
|
|
||||||
applicationservice_name,
|
|
||||||
applicationservice_description,
|
|
||||||
applicationservice_dependencies,
|
|
||||||
self.internal_release_id)
|
|
||||||
dependencies = list(await self.get_dependencies(risotto_context,
|
|
||||||
applicationservice['applicationservice_id']))
|
|
||||||
applicationservice['applicationservice_dependencies'] = dependencies
|
|
||||||
return applicationservice
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.dataset.updated')
|
|
||||||
async def applicationservice_update(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_name: str,
|
|
||||||
release_distribution: str) -> Dict:
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
applicationservice_path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release['release_name'],
|
|
||||||
'applicationservice')
|
|
||||||
release_id = release['release_id']
|
|
||||||
for service in listdir(applicationservice_path):
|
|
||||||
try:
|
|
||||||
applicationservice_description_path = join(applicationservice_path,
|
|
||||||
service,
|
|
||||||
'applicationservice.yml')
|
|
||||||
with open(applicationservice_description_path, 'r') as applicationservice_yml:
|
|
||||||
applicationservice_description = load(applicationservice_yml,
|
|
||||||
Loader=SafeLoader)
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
raise ExecutionError(_(f'Error while reading {applicationservice_description_path}: {err}'))
|
|
||||||
try:
|
|
||||||
await self._applicationservice_create(risotto_context,
|
|
||||||
applicationservice_description['name'],
|
|
||||||
applicationservice_description['description'],
|
|
||||||
[],
|
|
||||||
release_id)
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
raise ExecutionError(_(f"Error while injecting application service {applicationservice_description['name']} in database: {err}"))
|
|
||||||
return {'retcode': 0,
|
|
||||||
'returns': _('Application Services successfully loaded')}
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.get_by_id')
|
|
||||||
async def applicationservice_get_by_id(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id: int) -> Dict:
|
|
||||||
return await self._applicationservice_get_by_id(risotto_context,
|
|
||||||
applicationservice_id)
|
|
||||||
|
|
||||||
async def _applicationservice_get_by_id(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id: int) -> Dict:
|
|
||||||
applicationservice_query = """
|
|
||||||
SELECT ApplicationserviceId as applicationservice_id, ApplicationserviceName as applicationservice_name, ApplicationserviceReleaseId as applicationservice_release_id
|
|
||||||
FROM applicationservice
|
|
||||||
WHERE ApplicationserviceId=$1"""
|
|
||||||
applicationservice = await risotto_context.connection.fetchrow(applicationservice_query,
|
|
||||||
applicationservice_id)
|
|
||||||
if applicationservice is None:
|
|
||||||
raise Exception(_(f'unknown service with ID {applicationservice_id}'))
|
|
||||||
dependencies = list(await self.get_dependencies(risotto_context,
|
|
||||||
applicationservice['applicationservice_id']))
|
|
||||||
applicationservice = dict(applicationservice)
|
|
||||||
applicationservice['applicationservice_dependencies'] = dependencies
|
|
||||||
return applicationservice
|
|
||||||
|
|
||||||
async def _applicationservice_describe(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_name,
|
|
||||||
release_id):
|
|
||||||
applicationservice_query = """
|
|
||||||
SELECT ApplicationserviceId as applicationservice_id, ApplicationserviceName as applicationservice_name, ApplicationserviceReleaseId as applicationservice_release_id
|
|
||||||
FROM Applicationservice
|
|
||||||
WHERE ApplicationserviceName=$1 AND ApplicationserviceReleaseId=$2"""
|
|
||||||
applicationservice = await risotto_context.connection.fetchrow(applicationservice_query,
|
|
||||||
applicationservice_name,
|
|
||||||
release_id)
|
|
||||||
if applicationservice is None:
|
|
||||||
raise Exception(_(f'unknown service {applicationservice_name} in release ID {release_id}'))
|
|
||||||
return dict(applicationservice)
|
|
||||||
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.describe')
|
|
||||||
async def applicationservice_describe(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_name,
|
|
||||||
source_name,
|
|
||||||
release_distribution):
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
applicationservice = await self._applicationservice_describe(risotto_context,
|
|
||||||
applicationservice_name,
|
|
||||||
release['release_id'])
|
|
||||||
dependencies = list(await self.get_dependencies(risotto_context,
|
|
||||||
applicationservice['applicationservice_id']))
|
|
||||||
applicationservice['applicationservice_dependencies'] = dependencies
|
|
||||||
return applicationservice
|
|
@ -1 +0,0 @@
|
|||||||
from .config import Risotto
|
|
@ -1,428 +0,0 @@
|
|||||||
from lxml.etree import parse
|
|
||||||
from io import BytesIO
|
|
||||||
from os import unlink
|
|
||||||
from os.path import isdir, isfile, join
|
|
||||||
from traceback import print_exc
|
|
||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
from tiramisu import Storage, MixConfig
|
|
||||||
from tiramisu.error import PropertiesOptionError
|
|
||||||
from rougail import load as rougail_load
|
|
||||||
from rougail.config import dtdfilename
|
|
||||||
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...config import get_config
|
|
||||||
from ...context import Context
|
|
||||||
from ...utils import _
|
|
||||||
from ...error import CallError, RegistrationError
|
|
||||||
from ...logger import log
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
test) -> None:
|
|
||||||
global conf_storage
|
|
||||||
self.cache_root_path = join(get_config().get('cache').get('root_path'), 'servermodel')
|
|
||||||
if not isdir(self.cache_root_path):
|
|
||||||
raise RegistrationError(_(f'unable to find the cache dir "{self.cache_root_path}"'))
|
|
||||||
if not test:
|
|
||||||
db_conf = get_config()['database']['tiramisu_dsn']
|
|
||||||
self.save_storage = Storage(engine='postgres')
|
|
||||||
self.save_storage.setting(dsn=db_conf)
|
|
||||||
self.servermodel = {}
|
|
||||||
self.server = {}
|
|
||||||
super().__init__(test)
|
|
||||||
|
|
||||||
async def on_join(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
""" pre-load servermodel and server
|
|
||||||
"""
|
|
||||||
await self.load_servermodels(risotto_context)
|
|
||||||
await self.load_servers(risotto_context)
|
|
||||||
|
|
||||||
async def load_servermodels(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
""" load all available servermodels
|
|
||||||
"""
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
'Load servermodels')
|
|
||||||
servermodels = await self.call('v1.setting.servermodel.list',
|
|
||||||
risotto_context)
|
|
||||||
|
|
||||||
# load each servermodels
|
|
||||||
for servermodel in servermodels:
|
|
||||||
try:
|
|
||||||
await self.load_servermodel(risotto_context,
|
|
||||||
servermodel['servermodel_id'],
|
|
||||||
servermodel['servermodel_name'])
|
|
||||||
except CallError as err:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# do link to this servermodel
|
|
||||||
for servermodel in servermodels:
|
|
||||||
if 'servermodel_parents_id' in servermodel:
|
|
||||||
for servermodelparentid in servermodel['servermodel_parents_id']:
|
|
||||||
await self.servermodel_legacy(risotto_context,
|
|
||||||
servermodel['servermodel_name'],
|
|
||||||
servermodel['servermodel_id'],
|
|
||||||
servermodelparentid)
|
|
||||||
|
|
||||||
def get_funcs_filename(self,
|
|
||||||
servermodel_id: int):
|
|
||||||
return join(self.cache_root_path, str(servermodel_id), "funcs.py")
|
|
||||||
|
|
||||||
async def load_servermodel(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_name: str) -> None:
|
|
||||||
""" Loads a servermodel
|
|
||||||
"""
|
|
||||||
cache_file = join(self.cache_root_path, str(servermodel_id), "dictionaries.xml")
|
|
||||||
funcs_file = self.get_funcs_filename(servermodel_id)
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
f'Load servermodel {servermodel_name} ({servermodel_id})')
|
|
||||||
|
|
||||||
# use file in cache
|
|
||||||
with open(cache_file) as fileio:
|
|
||||||
xmlroot = parse(fileio).getroot()
|
|
||||||
try:
|
|
||||||
self.servermodel[servermodel_id] = await self.build_mixconfig(servermodel_id,
|
|
||||||
servermodel_name,
|
|
||||||
xmlroot,
|
|
||||||
funcs_file)
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
msg = _(f'unable to load {servermodel_name}: {err}')
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
|
|
||||||
async def build_mixconfig(self,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_name: str,
|
|
||||||
xmlroot: str,
|
|
||||||
funcs_file: str) -> MixConfig:
|
|
||||||
""" Build mixconfig for a servermodel
|
|
||||||
"""
|
|
||||||
# build tiramisu's session ID
|
|
||||||
session_id = f'v_{servermodel_id}'
|
|
||||||
optiondescription = rougail_load(xmlroot,
|
|
||||||
dtdfilename,
|
|
||||||
funcs_file)
|
|
||||||
|
|
||||||
# build servermodel mixconfig (v_xxx)
|
|
||||||
mixconfig = await MixConfig(children=[],
|
|
||||||
optiondescription=optiondescription,
|
|
||||||
session_id=session_id,
|
|
||||||
storage=self.save_storage)
|
|
||||||
# change default rights
|
|
||||||
ro_origin = await mixconfig.property.getdefault('read_only', 'append')
|
|
||||||
ro_append = frozenset(ro_origin - {'force_store_value'})
|
|
||||||
rw_origin = await mixconfig.property.getdefault('read_write', 'append')
|
|
||||||
rw_append = frozenset(rw_origin - {'force_store_value'})
|
|
||||||
await mixconfig.property.setdefault(ro_append, 'read_only', 'append')
|
|
||||||
await mixconfig.property.setdefault(rw_append, 'read_write', 'append')
|
|
||||||
|
|
||||||
await mixconfig.property.read_only()
|
|
||||||
await mixconfig.permissive.add('basic')
|
|
||||||
await mixconfig.permissive.add('normal')
|
|
||||||
await mixconfig.permissive.add('expert')
|
|
||||||
|
|
||||||
# set informtion and owner
|
|
||||||
await mixconfig.owner.set('v_{}'.format(servermodel_name))
|
|
||||||
await mixconfig.information.set('servermodel_id', servermodel_id)
|
|
||||||
await mixconfig.information.set('servermodel_name', servermodel_name)
|
|
||||||
|
|
||||||
# return configuration
|
|
||||||
return mixconfig
|
|
||||||
|
|
||||||
async def servermodel_legacy(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_parent_id: int) -> None:
|
|
||||||
""" Make link between parent and children
|
|
||||||
"""
|
|
||||||
if servermodel_parent_id is None:
|
|
||||||
return
|
|
||||||
if not self.servermodel.get(servermodel_parent_id):
|
|
||||||
msg = _(f'Servermodel with id {servermodel_parent_id} not loaded, skipping legacy for servermodel {servermodel_name} ({servermodel_id})')
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
return
|
|
||||||
servermodel_parent = self.servermodel[servermodel_parent_id]
|
|
||||||
servermodel_parent_name = await servermodel_parent.information.get('servermodel_name')
|
|
||||||
msg = _(f'Create legacy of servermodel {servermodel_name} ({servermodel_id}) with parent {servermodel_parent_name} ({servermodel_parent_id})')
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
|
|
||||||
# do link
|
|
||||||
try:
|
|
||||||
await servermodel_parent.config.add(self.servermodel[servermodel_id])
|
|
||||||
except Exception as err:
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
str(err))
|
|
||||||
|
|
||||||
async def load_servers(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
""" load all available servers
|
|
||||||
"""
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
f'Load servers')
|
|
||||||
# get all servers
|
|
||||||
servers = await self.call('v1.setting.server.list',
|
|
||||||
risotto_context)
|
|
||||||
# loads servers
|
|
||||||
for server in servers:
|
|
||||||
try:
|
|
||||||
if server['server_id'] in self.server:
|
|
||||||
return
|
|
||||||
await self.load_server(risotto_context,
|
|
||||||
server['server_id'],
|
|
||||||
server['server_name'],
|
|
||||||
server['server_servermodel_id'])
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
server_name = server['server_name']
|
|
||||||
server_id = server['server_id']
|
|
||||||
msg = _(f'unable to load server {server_name} ({server_id}): {err}')
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
|
|
||||||
async def load_server(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_id: int,
|
|
||||||
server_name: str,
|
|
||||||
server_servermodel_id: int) -> None:
|
|
||||||
""" Loads a server
|
|
||||||
"""
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
f'Load server {server_name} ({server_id})')
|
|
||||||
if not server_servermodel_id in self.servermodel:
|
|
||||||
msg = f'unable to find servermodel with id {server_servermodel_id}'
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
raise CallError(msg)
|
|
||||||
|
|
||||||
# check if server was already created
|
|
||||||
session_id = f's_{server_id}'
|
|
||||||
|
|
||||||
# get the servermodel's mixconfig
|
|
||||||
mixconfig = self.servermodel[server_servermodel_id]
|
|
||||||
|
|
||||||
# create server configuration and store it
|
|
||||||
self.server[server_id] = {'server': await self.build_config(session_id,
|
|
||||||
server_id,
|
|
||||||
server_name,
|
|
||||||
mixconfig),
|
|
||||||
'server_to_deploy': await self.build_config(f'std_{server_id}',
|
|
||||||
server_id,
|
|
||||||
server_name,
|
|
||||||
mixconfig,
|
|
||||||
std=True),
|
|
||||||
'funcs_file': self.get_funcs_filename(server_servermodel_id)}
|
|
||||||
|
|
||||||
async def build_config(self,
|
|
||||||
session_id: str,
|
|
||||||
server_id: int,
|
|
||||||
server_name: str,
|
|
||||||
mixconfig: MixConfig,
|
|
||||||
std: bool=False) -> None:
|
|
||||||
""" build server's config
|
|
||||||
"""
|
|
||||||
config = await mixconfig.config.new(session_id,
|
|
||||||
storage=self.save_storage)
|
|
||||||
await config.information.set('server_id', server_id)
|
|
||||||
await config.information.set('server_name', server_name)
|
|
||||||
option_value = config.option('creole.interface_0.domain_name_eth0').value
|
|
||||||
if std:
|
|
||||||
try:
|
|
||||||
await option_value.get()
|
|
||||||
except PropertiesOptionError:
|
|
||||||
await config.owner.set(server_name)
|
|
||||||
await config.property.read_write()
|
|
||||||
await config.option('creole.interface_0.domain_name_eth0').value.set(server_name)
|
|
||||||
await config.property.read_only()
|
|
||||||
return config
|
|
||||||
|
|
||||||
@register('v1.setting.server.created')
|
|
||||||
async def server_created(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_id: int,
|
|
||||||
server_name: str,
|
|
||||||
server_servermodel_id: int) -> None:
|
|
||||||
""" Loads server's configuration when a new server is created
|
|
||||||
"""
|
|
||||||
if server_id in self.server:
|
|
||||||
return
|
|
||||||
await self.load_server(risotto_context,
|
|
||||||
server_id,
|
|
||||||
server_name,
|
|
||||||
server_servermodel_id)
|
|
||||||
|
|
||||||
@register('v1.setting.server.deleted')
|
|
||||||
async def server_deleted(self,
|
|
||||||
server_id: int) -> None:
|
|
||||||
for server_type in ['server', 'server_to_deploy']:
|
|
||||||
config = self.server[server_id]['server']
|
|
||||||
for parent in await config.config.parents():
|
|
||||||
await parent.config.pop(await config.config.name())
|
|
||||||
await config.session.reset()
|
|
||||||
del self.server[server_id]
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.created')
|
|
||||||
async def servermodel_created(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_parents_id: List[int]) -> None:
|
|
||||||
""" when servermodels are created, load it and do link
|
|
||||||
"""
|
|
||||||
await self.load_and_link_servermodel(risotto_context,
|
|
||||||
servermodel_id,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_parents_id)
|
|
||||||
|
|
||||||
|
|
||||||
async def load_and_link_servermodel(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_parents_id: List[int]) -> None:
|
|
||||||
await self.load_servermodel(risotto_context,
|
|
||||||
servermodel_id,
|
|
||||||
servermodel_name)
|
|
||||||
if servermodel_parents_id is not None:
|
|
||||||
for servermodelparentid in servermodel_parents_id:
|
|
||||||
await self.servermodel_legacy(risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
servermodelparentid)
|
|
||||||
|
|
||||||
async def servermodel_delete(self,
|
|
||||||
servermodel_id: int) -> List[MixConfig]:
|
|
||||||
mixconfig = self.servermodel.pop(servermodel_id)
|
|
||||||
children = []
|
|
||||||
for child in await mixconfig.config.list():
|
|
||||||
if not (await child.session.id()).startswith('std_'):
|
|
||||||
children.append(child)
|
|
||||||
await mixconfig.config.pop(await child.session.id())
|
|
||||||
for parent in await mixconfig.config.parents():
|
|
||||||
await parent.config.pop(await mixconfig.session.id())
|
|
||||||
return children
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.updated')
|
|
||||||
async def servermodel_updated(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_id: int,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_parents_id: List[int]) -> None:
|
|
||||||
await log.info_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
f'Reload servermodel {servermodel_name} ({servermodel_id})')
|
|
||||||
# store all informations
|
|
||||||
if servermodel_id in self.servermodel:
|
|
||||||
children = await self.servermodel_delete(servermodel_id)
|
|
||||||
else:
|
|
||||||
children = []
|
|
||||||
|
|
||||||
# create new one
|
|
||||||
await self.load_and_link_servermodel(risotto_context,
|
|
||||||
servermodel_id,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_parents_id)
|
|
||||||
# recreate link to children
|
|
||||||
for child in children:
|
|
||||||
if await child.config.type() == 'config':
|
|
||||||
server_id = await child.information.get('server_id')
|
|
||||||
server_name = await child.information.get('server_name')
|
|
||||||
await self.load_server(risotto_context,
|
|
||||||
server_id,
|
|
||||||
server_name,
|
|
||||||
servermodel_id)
|
|
||||||
else:
|
|
||||||
await self.servermodel[servermodel_id].config.add(child)
|
|
||||||
|
|
||||||
@register('v1.setting.config.configuration.server.get')
|
|
||||||
async def get_configuration(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_name: str,
|
|
||||||
deployed: bool) -> dict:
|
|
||||||
server = await self.call('v1.setting.server.describe',
|
|
||||||
risotto_context,
|
|
||||||
server_name=server_name)
|
|
||||||
server_id = server['server_id']
|
|
||||||
if server_id not in self.server:
|
|
||||||
msg = _(f'cannot find server with id {server_id}')
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
raise CallError(msg)
|
|
||||||
|
|
||||||
if deployed:
|
|
||||||
server = self.server[server_id]['server']
|
|
||||||
else:
|
|
||||||
server = self.server[server_id]['server_to_deploy']
|
|
||||||
|
|
||||||
await server.property.read_only()
|
|
||||||
try:
|
|
||||||
configuration = await server.value.dict(fullpath=True,
|
|
||||||
leader_to_list=True)
|
|
||||||
except:
|
|
||||||
if deployed:
|
|
||||||
msg = _(f'No configuration available for server {server_id}')
|
|
||||||
else:
|
|
||||||
msg = _(f'No undeployed configuration available for server {server_id}')
|
|
||||||
await log.error_msg(risotto_context,
|
|
||||||
None,
|
|
||||||
msg)
|
|
||||||
raise CallError(msg)
|
|
||||||
return {'server_name': server_name,
|
|
||||||
'deployed': deployed,
|
|
||||||
'configuration': configuration}
|
|
||||||
|
|
||||||
@register('v1.setting.config.configuration.server.deploy')
|
|
||||||
async def deploy_configuration(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_name: str) -> Dict:
|
|
||||||
"""Copy values, permissions, permissives from config 'to deploy' to active config
|
|
||||||
"""
|
|
||||||
server = await self.call('v1.setting.server.describe',
|
|
||||||
risotto_context,
|
|
||||||
server_name=server_name)
|
|
||||||
server_id = server['server_id']
|
|
||||||
# FIXME is server_to_deploy working?
|
|
||||||
config = self.server[server_id]['server']
|
|
||||||
config_std = self.server[server_id]['server_to_deploy']
|
|
||||||
|
|
||||||
# when deploy, calculate force_store_value
|
|
||||||
ro = await config_std.property.getdefault('read_only', 'append')
|
|
||||||
if 'force_store_value' not in ro:
|
|
||||||
ro = frozenset(list(ro) + ['force_store_value'])
|
|
||||||
await config_std.property.setdefault(ro, 'read_only', 'append')
|
|
||||||
rw = await config_std.property.getdefault('read_write', 'append')
|
|
||||||
rw = frozenset(list(rw) + ['force_store_value'])
|
|
||||||
await config_std.property.setdefault(rw, 'read_write', 'append')
|
|
||||||
await config_std.property.add('force_store_value')
|
|
||||||
|
|
||||||
# copy informations from server 'to deploy' configuration to server configuration
|
|
||||||
await config.value.importation(await config_std.value.exportation())
|
|
||||||
await config.permissive.importation(await config_std.permissive.exportation())
|
|
||||||
await config.property.importation(await config_std.property.exportation())
|
|
||||||
|
|
||||||
return {'server_id': server_id,
|
|
||||||
'server_name': server_name,
|
|
||||||
'deployed': True}
|
|
@ -1 +0,0 @@
|
|||||||
from .server import Risotto
|
|
@ -1,71 +0,0 @@
|
|||||||
from typing import Dict
|
|
||||||
from tiramisu import DomainnameOption
|
|
||||||
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...context import Context
|
|
||||||
from ...config import get_config
|
|
||||||
from ...utils import _
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
def __init__(self,
|
|
||||||
test: bool) -> None:
|
|
||||||
self.internal_source_name = get_config()['servermodel']['internal_source']
|
|
||||||
|
|
||||||
@register('v1.setting.server.list')
|
|
||||||
async def server_list(self,
|
|
||||||
risotto_context: Context) -> Dict:
|
|
||||||
sql = '''
|
|
||||||
SELECT ServerId as server_id, ServerName as server_name, ServerDescription as server_description, ServerServermodelId as server_servermodel_id
|
|
||||||
FROM Server
|
|
||||||
'''
|
|
||||||
servers = await risotto_context.connection.fetch(sql)
|
|
||||||
return [dict(r) for r in servers]
|
|
||||||
|
|
||||||
@register('v1.setting.server.create', 'v1.setting.server.created')
|
|
||||||
async def server_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_name: str,
|
|
||||||
server_description: str,
|
|
||||||
servermodel_name: str,
|
|
||||||
release_distribution: str) -> Dict:
|
|
||||||
DomainnameOption('server_name', _('Server name'), server_name)
|
|
||||||
servermodel = await self.call('v1.setting.servermodel.describe',
|
|
||||||
risotto_context,
|
|
||||||
servermodel_name=servermodel_name,
|
|
||||||
source_name=self.internal_source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
server_insert = """INSERT INTO Server(ServerName, ServerDescription, ServerServermodelId)
|
|
||||||
VALUES ($1,$2,$3)
|
|
||||||
RETURNING ServerId
|
|
||||||
"""
|
|
||||||
server_id = await risotto_context.connection.fetchval(server_insert,
|
|
||||||
server_name,
|
|
||||||
server_description,
|
|
||||||
servermodel['servermodel_id'])
|
|
||||||
await self.call('v1.setting.user.role.create',
|
|
||||||
risotto_context,
|
|
||||||
user_login=risotto_context.username,
|
|
||||||
role_name='server_rw',
|
|
||||||
role_attribute='Server.ServerName',
|
|
||||||
role_attribute_value=server_name)
|
|
||||||
return {'server_id': server_id,
|
|
||||||
'server_name': server_name,
|
|
||||||
'server_description': server_description,
|
|
||||||
'server_servermodel_id': servermodel['servermodel_id']}
|
|
||||||
|
|
||||||
@register('v1.setting.server.describe')
|
|
||||||
async def server_describe(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_name: str) -> Dict:
|
|
||||||
sql = '''
|
|
||||||
SELECT ServerId as server_id, ServerName as server_name, ServerDescription as server_description, ServerServermodelId as server_servermodel_id
|
|
||||||
FROM Server
|
|
||||||
WHERE ServerName = $1
|
|
||||||
'''
|
|
||||||
server = await risotto_context.connection.fetchrow(sql,
|
|
||||||
server_name)
|
|
||||||
if not server:
|
|
||||||
raise Exception(_(f'unable to find server with name {server_name}'))
|
|
||||||
return dict(server)
|
|
@ -1 +0,0 @@
|
|||||||
from .servermodel import Risotto
|
|
@ -1,172 +0,0 @@
|
|||||||
from os.path import join, isdir, isfile
|
|
||||||
from os import listdir, makedirs
|
|
||||||
from shutil import rmtree, copyfile
|
|
||||||
from typing import Dict, List, Optional
|
|
||||||
from rougail import CreoleObjSpace
|
|
||||||
from rougail.config import dtdfilename
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...context import Context
|
|
||||||
from ...logger import log
|
|
||||||
from ...utils import _
|
|
||||||
|
|
||||||
|
|
||||||
class Generator(Controller):
|
|
||||||
async def generate(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_id: int,
|
|
||||||
dependencies: List[int],
|
|
||||||
generate_cache: Optional[Dict]=None) -> None:
|
|
||||||
if generate_cache is None:
|
|
||||||
generate_cache = {'applicationservice': {},
|
|
||||||
'release_id': {}}
|
|
||||||
await self.servermodel_gen_funcs(servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
dependencies,
|
|
||||||
generate_cache,
|
|
||||||
risotto_context)
|
|
||||||
await self.servermodel_gen_schema(servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
dependencies,
|
|
||||||
generate_cache,
|
|
||||||
risotto_context)
|
|
||||||
await self.servermodel_copy_templates(servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
dependencies,
|
|
||||||
generate_cache,
|
|
||||||
risotto_context)
|
|
||||||
|
|
||||||
async def servermodel_gen_funcs(self,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_id: int,
|
|
||||||
dependencies: Dict,
|
|
||||||
generate_cache: Dict,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
as_names = []
|
|
||||||
dest_file = self.get_servermodel_cache(servermodel_id, 'funcs.py')
|
|
||||||
with open(dest_file, 'wb') as funcs:
|
|
||||||
funcs.write(b'from tiramisu import valid_network_netmask, valid_ip_netmask, valid_broadcast, valid_in_network, valid_not_equal as valid_differ, valid_not_equal, calc_value\n\n')
|
|
||||||
for dependency in dependencies:
|
|
||||||
if dependency not in generate_cache['applicationservice']:
|
|
||||||
applicationservice = await self.call('v1.setting.applicationservice.get_by_id',
|
|
||||||
risotto_context,
|
|
||||||
applicationservice_id=dependency)
|
|
||||||
generate_cache['applicationservice'][dependency] = (applicationservice['applicationservice_name'],
|
|
||||||
applicationservice['applicationservice_release_id'])
|
|
||||||
applicationservice_name, release_id = generate_cache['applicationservice'][dependency]
|
|
||||||
if release_id not in generate_cache['release_id']:
|
|
||||||
release = await self.call('v1.setting.source.release.get_by_id',
|
|
||||||
risotto_context,
|
|
||||||
release_id=release_id)
|
|
||||||
generate_cache['release_id'][release_id] = (release['source_name'],
|
|
||||||
release['release_name'])
|
|
||||||
source_name, release_name = generate_cache['release_id'][release_id]
|
|
||||||
path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release_name,
|
|
||||||
'applicationservice',
|
|
||||||
applicationservice_name,
|
|
||||||
'funcs')
|
|
||||||
if isdir(path):
|
|
||||||
as_names.append(applicationservice_name)
|
|
||||||
for fil in listdir(path):
|
|
||||||
if not fil.endswith('.py'):
|
|
||||||
continue
|
|
||||||
fil_path = join(path, fil)
|
|
||||||
with open(fil_path, 'rb') as fh:
|
|
||||||
funcs.write(f'# {fil_path}\n'.encode())
|
|
||||||
funcs.write(fh.read())
|
|
||||||
funcs.write(b'\n')
|
|
||||||
|
|
||||||
as_names_str = '", "'.join(as_names)
|
|
||||||
await log.info(risotto_context,
|
|
||||||
_(f'gen funcs for "{servermodel_name}" with application services "{as_names_str}"'))
|
|
||||||
|
|
||||||
async def servermodel_gen_schema(self,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_id: int,
|
|
||||||
dependencies: Dict,
|
|
||||||
generate_cache: Dict,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
paths = []
|
|
||||||
extras = []
|
|
||||||
as_names = set()
|
|
||||||
for dependency in dependencies:
|
|
||||||
applicationservice_name, release_id = generate_cache['applicationservice'][dependency]
|
|
||||||
source_name, release_name = generate_cache['release_id'][release_id]
|
|
||||||
# load creole dictionaries
|
|
||||||
path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release_name,
|
|
||||||
'applicationservice',
|
|
||||||
applicationservice_name,
|
|
||||||
'dictionaries')
|
|
||||||
if isdir(path):
|
|
||||||
as_names.add(applicationservice_name)
|
|
||||||
paths.append(path)
|
|
||||||
|
|
||||||
# load extra dictionaries
|
|
||||||
path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release_name,
|
|
||||||
'applicationservice',
|
|
||||||
applicationservice_name,
|
|
||||||
'extras')
|
|
||||||
if isdir(path):
|
|
||||||
for namespace in listdir(path):
|
|
||||||
extra_dir = join(path, namespace)
|
|
||||||
if not isdir(extra_dir):
|
|
||||||
continue
|
|
||||||
as_names.add(applicationservice_name)
|
|
||||||
extras.append((namespace, [extra_dir]))
|
|
||||||
eolobj = CreoleObjSpace(dtdfilename)
|
|
||||||
as_names_str = '", "'.join(as_names)
|
|
||||||
await log.info(risotto_context,
|
|
||||||
_(f'gen schema for "{servermodel_name}" with application services "{as_names_str}"'))
|
|
||||||
eolobj.create_or_populate_from_xml('creole', paths)
|
|
||||||
for extra in extras:
|
|
||||||
eolobj.create_or_populate_from_xml(extra[0], extra[1])
|
|
||||||
# FIXME extra
|
|
||||||
funcs_file = self.get_servermodel_cache(servermodel_id, 'funcs.py')
|
|
||||||
eolobj.space_visitor(funcs_file)
|
|
||||||
dest_dir = self.get_servermodel_cache(servermodel_id, 'dictionaries.xml')
|
|
||||||
eolobj.save(dest_dir)
|
|
||||||
|
|
||||||
def get_servermodel_cache(self,
|
|
||||||
servermodel_id: int,
|
|
||||||
subdir: Optional[str]=None) -> str:
|
|
||||||
if subdir:
|
|
||||||
return join(self.cache_root_path, str(servermodel_id), subdir)
|
|
||||||
return join(self.cache_root_path, str(servermodel_id))
|
|
||||||
|
|
||||||
async def servermodel_copy_templates(self,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_id: int,
|
|
||||||
dependencies: Dict,
|
|
||||||
generate_cache: Dict,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
as_names = []
|
|
||||||
dest_dir = self.get_servermodel_cache(servermodel_id, 'templates')
|
|
||||||
if isdir(dest_dir):
|
|
||||||
rmtree(dest_dir)
|
|
||||||
makedirs(dest_dir)
|
|
||||||
for dependency in dependencies:
|
|
||||||
applicationservice_name, release_id = generate_cache['applicationservice'][dependency]
|
|
||||||
source_name, release_name = generate_cache['release_id'][release_id]
|
|
||||||
path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release_name,
|
|
||||||
'applicationservice',
|
|
||||||
applicationservice_name,
|
|
||||||
'templates')
|
|
||||||
if isdir(path):
|
|
||||||
for template in listdir(path):
|
|
||||||
template_path = join(dest_dir, template)
|
|
||||||
if isfile(template_path):
|
|
||||||
as_names_str = '", "'.join(as_names)
|
|
||||||
raise Exception(_(f'duplicate "{template}" when copying template from "{applicationservice_name}" to "{dest_dir}" for servermodel "{servermodel_name}" (previous application services was "{as_names_str}"'))
|
|
||||||
copyfile(join(path, template), template_path)
|
|
||||||
as_names.append(applicationservice_name)
|
|
||||||
as_names_str = '", "'.join(as_names)
|
|
||||||
await log.info(risotto_context,
|
|
||||||
_(f'copy templates for "{servermodel_name}" with application services "{as_names_str}"'))
|
|
@ -1,292 +0,0 @@
|
|||||||
from shutil import rmtree
|
|
||||||
from os import listdir, makedirs
|
|
||||||
from os.path import join, isdir
|
|
||||||
from yaml import load, SafeLoader
|
|
||||||
from traceback import print_exc
|
|
||||||
from typing import Dict, List, Optional
|
|
||||||
from .generator import Generator
|
|
||||||
from ...register import register
|
|
||||||
from ...utils import _
|
|
||||||
from ...context import Context
|
|
||||||
from ...config import get_config
|
|
||||||
from ...error import ExecutionError
|
|
||||||
from ...logger import log
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Generator):
|
|
||||||
def __init__(self,
|
|
||||||
test: bool) -> None:
|
|
||||||
self.source_root_path = get_config()['source']['root_path']
|
|
||||||
self.cache_root_path = join(get_config()['cache']['root_path'], 'servermodel')
|
|
||||||
self.internal_source_name = get_config()['servermodel']['internal_source']
|
|
||||||
self.internal_distribution_name = get_config()['servermodel']['internal_distribution']
|
|
||||||
self.internal_release_name = get_config()['servermodel']['internal_release_name']
|
|
||||||
if not isdir(self.cache_root_path):
|
|
||||||
makedirs(join(self.cache_root_path))
|
|
||||||
super().__init__(test)
|
|
||||||
|
|
||||||
async def on_join(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
print('===', await self.call('v1.pki.openssh.get', risotto_context))
|
|
||||||
internal_release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=self.internal_source_name,
|
|
||||||
release_distribution=self.internal_distribution_name)
|
|
||||||
self.internal_release_id = internal_release['release_id']
|
|
||||||
|
|
||||||
async def _servermodel_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_description: str,
|
|
||||||
servermodel_parents: List[Dict],
|
|
||||||
dependencies: List[int],
|
|
||||||
release_id: int,
|
|
||||||
generate_cache: Dict=None) -> Dict:
|
|
||||||
if generate_cache is None:
|
|
||||||
generate_cache = {'applicationservice': {},
|
|
||||||
'release_id': {}}
|
|
||||||
servermodel_insert = """INSERT INTO Servermodel(ServermodelName, ServermodelDescription, ServermodelParentsId, ServermodelReleaseId, ServermodelApplicationServiceId)
|
|
||||||
VALUES ($1,$2,$3,$4,$5)
|
|
||||||
RETURNING ServermodelId
|
|
||||||
"""
|
|
||||||
as_name = f"local_{servermodel_name}"
|
|
||||||
as_description = f'local application service for {servermodel_name}'
|
|
||||||
servermodel_parents_id = []
|
|
||||||
for servermodel_parent in servermodel_parents:
|
|
||||||
servermodel_parents_id.append(servermodel_parent['servermodel_id'])
|
|
||||||
dependencies.append(servermodel_parent['servermodel_applicationservice_id'])
|
|
||||||
applicationservice = await self.call('v1.setting.applicationservice.create',
|
|
||||||
risotto_context,
|
|
||||||
applicationservice_name=as_name,
|
|
||||||
applicationservice_description=as_description,
|
|
||||||
applicationservice_dependencies=dependencies)
|
|
||||||
applicationservice_id = applicationservice['applicationservice_id']
|
|
||||||
generate_cache['applicationservice'][applicationservice_id] = (as_name,
|
|
||||||
self.internal_release_id)
|
|
||||||
if self.internal_release_id not in generate_cache['release_id']:
|
|
||||||
generate_cache['release_id'][self.internal_release_id] = (self.internal_source_name,
|
|
||||||
self.internal_release_name)
|
|
||||||
|
|
||||||
servermodel_id = await risotto_context.connection.fetchval(servermodel_insert,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_description,
|
|
||||||
servermodel_parents_id,
|
|
||||||
release_id,
|
|
||||||
applicationservice_id)
|
|
||||||
dest_dir = self.get_servermodel_cache(servermodel_id)
|
|
||||||
if isdir(dest_dir):
|
|
||||||
rmtree(dest_dir)
|
|
||||||
makedirs(dest_dir)
|
|
||||||
dependencies = applicationservice['applicationservice_dependencies']
|
|
||||||
# for as_release_id in dependencies.values():
|
|
||||||
# applicationservice_name, as_release_id = applicationservice_infos
|
|
||||||
# if as_release_id not in release_cache:
|
|
||||||
# release_cache[as_release_id] = await self.call('v1.setting.source.release.get_by_id',
|
|
||||||
# risotto_context,
|
|
||||||
# release_id=as_release_id)
|
|
||||||
await self.generate(risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
dependencies,
|
|
||||||
generate_cache)
|
|
||||||
sm_dict = {'servermodel_name': servermodel_name,
|
|
||||||
'servermodel_description': servermodel_description,
|
|
||||||
'servermodel_parents_id': servermodel_parents_id,
|
|
||||||
'servermodel_applicationservice_id': applicationservice_id,
|
|
||||||
'release_id': release_id,
|
|
||||||
'servermodel_id': servermodel_id}
|
|
||||||
return sm_dict
|
|
||||||
|
|
||||||
def parse_parents(self,
|
|
||||||
servermodels: Dict,
|
|
||||||
servermodel: Dict,
|
|
||||||
parents: List=None) -> List:
|
|
||||||
if parents is None:
|
|
||||||
parents = [servermodel['name']]
|
|
||||||
parent = servermodel['parent']
|
|
||||||
if parent in servermodels:
|
|
||||||
parents.append(parent)
|
|
||||||
self.parse_parents(servermodels, servermodels[parent], parents)
|
|
||||||
return parents
|
|
||||||
|
|
||||||
@register('v1.setting.applicationservice.updated')
|
|
||||||
async def applicationservice_updated(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
applicationservice_id):
|
|
||||||
# FIXME applicationservices qui depend de ce services => updated
|
|
||||||
sql = '''
|
|
||||||
SELECT ServermodelId as servermodel_id, ServermodelName as servermodel_name, ServermodelDescription as servermodel_description, ServermodelParentsId as servermodel_parents_id, ServermodelReleaseId as release_id, ServermodelApplicationServiceId as servermodel_applicationservice_id
|
|
||||||
FROM Servermodel
|
|
||||||
WHERE ServermodelApplicationServiceId = $1
|
|
||||||
'''
|
|
||||||
servermodel = await risotto_context.connection.fetchrow(sql,
|
|
||||||
applicationservice_id)
|
|
||||||
if servermodel is not None:
|
|
||||||
servermodel_name = servermodel['servermodel_name']
|
|
||||||
servermodel_id = servermodel['servermodel_id']
|
|
||||||
release_id = servermodel['release_id']
|
|
||||||
applicationservice = await self.call('v1.setting.applicationservice.get_by_id',
|
|
||||||
risotto_context,
|
|
||||||
applicationservice_id=applicationservice_id)
|
|
||||||
dependencies = applicationservice['applicationservice_dependencies']
|
|
||||||
await self.generate(risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_id,
|
|
||||||
dependencies,
|
|
||||||
None)
|
|
||||||
await self.publish('v1.setting.servermodel.updated',
|
|
||||||
risotto_context,
|
|
||||||
**servermodel)
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.dataset.updated')
|
|
||||||
async def servermodel_dataset_updated(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_name: str,
|
|
||||||
release_distribution: int):
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
release_id = release['release_id']
|
|
||||||
generate_cache = {'applicationservice': {},
|
|
||||||
'release_id': {release['release_id']: (release['source_name'],
|
|
||||||
release['release_name'])}}
|
|
||||||
servermodel_path = join(self.source_root_path,
|
|
||||||
source_name,
|
|
||||||
release['release_name'],
|
|
||||||
'servermodel')
|
|
||||||
servermodels = {}
|
|
||||||
for servermodel in listdir(servermodel_path):
|
|
||||||
if not servermodel.endswith('.yml'):
|
|
||||||
continue
|
|
||||||
servermodel_description_path = join(servermodel_path, servermodel)
|
|
||||||
try:
|
|
||||||
with open(servermodel_description_path, 'r') as servermodel_yml:
|
|
||||||
servermodel_description = load(servermodel_yml,
|
|
||||||
Loader=SafeLoader)
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
raise ExecutionError(_(f'Error while reading {servermodel_description_path}: {err}'))
|
|
||||||
servermodels[servermodel_description['name']] = servermodel_description
|
|
||||||
servermodels[servermodel_description['name']]['done'] = False
|
|
||||||
|
|
||||||
for servermodel in servermodels.values():
|
|
||||||
if not servermodel['done']:
|
|
||||||
# parent needs to create before child, so retrieve all parents
|
|
||||||
parents = self.parse_parents(servermodels,
|
|
||||||
servermodel)
|
|
||||||
parents.reverse()
|
|
||||||
servermodel_parent = []
|
|
||||||
for new_servermodel in parents:
|
|
||||||
if not servermodels[new_servermodel]['done']:
|
|
||||||
servermodel_description = servermodels[new_servermodel]
|
|
||||||
parent = servermodel_description['parent']
|
|
||||||
if not servermodel_parent and parent is not None:
|
|
||||||
servermodel_parent = [await self._servermodel_describe(risotto_context,
|
|
||||||
parent,
|
|
||||||
release_id,
|
|
||||||
source_name,
|
|
||||||
release_distribution)]
|
|
||||||
# link application service with this servermodel
|
|
||||||
dependencies = []
|
|
||||||
for depend in servermodels[new_servermodel]['applicationservices']:
|
|
||||||
applicationservice = await self.call('v1.setting.applicationservice.describe',
|
|
||||||
risotto_context,
|
|
||||||
applicationservice_name=depend,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
dependencies.append(applicationservice['applicationservice_id'])
|
|
||||||
sm_name = servermodel_description['name']
|
|
||||||
sm_description = servermodel_description['description']
|
|
||||||
try:
|
|
||||||
servermodel_ob = await self._servermodel_create(risotto_context,
|
|
||||||
sm_name,
|
|
||||||
sm_description,
|
|
||||||
servermodel_parent,
|
|
||||||
dependencies,
|
|
||||||
release_id,
|
|
||||||
generate_cache)
|
|
||||||
await self.publish('v1.setting.servermodel.created',
|
|
||||||
risotto_context,
|
|
||||||
**servermodel_ob)
|
|
||||||
except Exception as err:
|
|
||||||
if get_config().get('global').get('debug'):
|
|
||||||
print_exc()
|
|
||||||
raise ExecutionError(_(f"Error while injecting servermodel {sm_name} in database: {err}"))
|
|
||||||
servermodel_parent = [servermodel_ob]
|
|
||||||
servermodel_description['done'] = True
|
|
||||||
return {'retcode': 0, 'returns': _('Servermodels successfully loaded')}
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.list')
|
|
||||||
async def servermodel_list(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_id: int):
|
|
||||||
sql = '''
|
|
||||||
SELECT ServermodelId as servermodel_id, ServermodelName as servermodel_name, ServermodelDescription as servermodel_description, ServermodelParentsId as servermodel_parents_id, ServermodelReleaseId as release_id, ServermodelApplicationServiceId as servermodel_applicationservice_id
|
|
||||||
FROM Servermodel
|
|
||||||
'''
|
|
||||||
servermodels = await risotto_context.connection.fetch(sql)
|
|
||||||
return [dict(r) for r in servermodels]
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.describe')
|
|
||||||
async def servermodel_describe(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name,
|
|
||||||
source_name,
|
|
||||||
release_distribution) -> Dict:
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=source_name,
|
|
||||||
release_distribution=release_distribution)
|
|
||||||
return await self._servermodel_describe(risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
release['release_id'],
|
|
||||||
source_name,
|
|
||||||
release_distribution)
|
|
||||||
|
|
||||||
async def _servermodel_describe(self,
|
|
||||||
risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
release_id,
|
|
||||||
source_name,
|
|
||||||
release_distribution):
|
|
||||||
sql = '''
|
|
||||||
SELECT ServermodelId as servermodel_id, ServermodelName as servermodel_name, ServermodelDescription as servermodel_description, ServermodelParentsId as servermodel_parents_id, ServermodelReleaseId as release_id, ServermodelApplicationServiceId as servermodel_applicationservice_id
|
|
||||||
FROM Servermodel
|
|
||||||
WHERE ServermodelName=$1 AND ServermodelReleaseId=$2
|
|
||||||
'''
|
|
||||||
servermodel = await risotto_context.connection.fetchrow(sql,
|
|
||||||
servermodel_name,
|
|
||||||
release_id)
|
|
||||||
if not servermodel:
|
|
||||||
raise Exception(_(f'"{servermodel_name}" is not a valid name for a servermodel in source "{source_name}" and release "{release_distribution}"'))
|
|
||||||
return dict(servermodel)
|
|
||||||
|
|
||||||
@register('v1.setting.servermodel.create', notification='v1.setting.servermodel.created')
|
|
||||||
async def create_servermodel(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name: str,
|
|
||||||
servermodel_description: str,
|
|
||||||
servermodel_parents_name: List[int],
|
|
||||||
servermodel_parents_source_name: str,
|
|
||||||
servermodel_parents_release_distribution: str) -> Dict:
|
|
||||||
release = await self.call('v1.setting.source.release.describe',
|
|
||||||
risotto_context,
|
|
||||||
source_name=servermodel_parents_source_name,
|
|
||||||
release_distribution=servermodel_parents_release_distribution)
|
|
||||||
release_id = release['release_id']
|
|
||||||
servermodel_parents = []
|
|
||||||
for servermodel_parent_name in servermodel_parents_name:
|
|
||||||
servermodel_parents.append(await self._servermodel_describe(risotto_context,
|
|
||||||
servermodel_parent_name,
|
|
||||||
release_id,
|
|
||||||
servermodel_parents_source_name,
|
|
||||||
servermodel_parents_release_distribution))
|
|
||||||
return await self._servermodel_create(risotto_context,
|
|
||||||
servermodel_name,
|
|
||||||
servermodel_description,
|
|
||||||
servermodel_parents,
|
|
||||||
[],
|
|
||||||
self.internal_release_id,
|
|
||||||
None)
|
|
@ -1 +0,0 @@
|
|||||||
from .session import Risotto
|
|
@ -1,316 +0,0 @@
|
|||||||
from os import urandom # , unlink
|
|
||||||
from binascii import hexlify
|
|
||||||
from traceback import print_exc
|
|
||||||
from typing import Dict, List, Optional, Any
|
|
||||||
from tiramisu import Storage
|
|
||||||
|
|
||||||
|
|
||||||
from ...http import register as register_http
|
|
||||||
from ...context import Context
|
|
||||||
from ...utils import _
|
|
||||||
from .storage import storage_server, storage_servermodel
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...dispatcher import dispatcher
|
|
||||||
from ...config import get_config
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
def __init__(self,
|
|
||||||
test):
|
|
||||||
self.modify_storage = Storage(engine='dictionary')
|
|
||||||
self.internal_source_name = get_config()['servermodel']['internal_source']
|
|
||||||
self.internal_distribution_name = get_config()['servermodel']['internal_distribution']
|
|
||||||
|
|
||||||
def get_storage(self,
|
|
||||||
type: str):
|
|
||||||
if type == 'server':
|
|
||||||
return storage_server
|
|
||||||
return storage_servermodel
|
|
||||||
|
|
||||||
def get_session(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
type: str) -> Dict:
|
|
||||||
""" Get session information from storage
|
|
||||||
"""
|
|
||||||
if type == 'server':
|
|
||||||
storage = storage_server
|
|
||||||
else:
|
|
||||||
storage = storage_servermodel
|
|
||||||
return storage.get_session(session_id,
|
|
||||||
risotto_context.username)
|
|
||||||
|
|
||||||
def get_session_informations(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
type: str) -> Dict:
|
|
||||||
""" format session with a session ID name
|
|
||||||
"""
|
|
||||||
session = self.get_session(risotto_context,
|
|
||||||
session_id,
|
|
||||||
type)
|
|
||||||
return self.format_session(session_id,
|
|
||||||
session)
|
|
||||||
|
|
||||||
def format_session(self,
|
|
||||||
session_name: str,
|
|
||||||
session: Dict) -> Dict:
|
|
||||||
""" format session
|
|
||||||
"""
|
|
||||||
return {'session_id': session_name,
|
|
||||||
'id': session['id'],
|
|
||||||
'username': session['username'],
|
|
||||||
'timestamp': session['timestamp'],
|
|
||||||
'namespace': session['namespace'],
|
|
||||||
'mode': session['mode'],
|
|
||||||
'debug': session['debug']}
|
|
||||||
|
|
||||||
@register('v1.setting.session.server.start')
|
|
||||||
async def start_session_server(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
server_name: str) -> Dict:
|
|
||||||
""" start a new config session for a server
|
|
||||||
"""
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
server = await self.call('v1.setting.server.describe',
|
|
||||||
risotto_context,
|
|
||||||
server_name=server_name)
|
|
||||||
if not server or server['server_id'] not in config_module.server:
|
|
||||||
raise Exception(_(f'cannot find server with name {server_name}'))
|
|
||||||
id = server['server_id']
|
|
||||||
config = config_module.server[id]['server_to_deploy']
|
|
||||||
|
|
||||||
storage = self.get_storage('server')
|
|
||||||
|
|
||||||
# check if a session already exists
|
|
||||||
sessions = storage.get_sessions()
|
|
||||||
for sess_id, session in sessions.items():
|
|
||||||
if session['id'] == id:
|
|
||||||
if session['username'] == risotto_context.username:
|
|
||||||
# same user so returns it
|
|
||||||
return self.format_session(sess_id,
|
|
||||||
session)
|
|
||||||
else:
|
|
||||||
raise Exception(_(f'{username} already edits this configuration'))
|
|
||||||
|
|
||||||
# create a new session
|
|
||||||
while True:
|
|
||||||
session_id = 'z' + hexlify(urandom(23)).decode()
|
|
||||||
if not session_id in sessions:
|
|
||||||
break
|
|
||||||
await storage.add_session(session_id,
|
|
||||||
config,
|
|
||||||
id,
|
|
||||||
risotto_context.username,
|
|
||||||
self.modify_storage)
|
|
||||||
|
|
||||||
# return session's information
|
|
||||||
return self.get_session_informations(risotto_context,
|
|
||||||
session_id,
|
|
||||||
'server')
|
|
||||||
|
|
||||||
@register('v1.setting.session.servermodel.start')
|
|
||||||
async def start_session_servermodel(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
servermodel_name: str) -> Dict:
|
|
||||||
""" start a new config session for a server or a servermodel
|
|
||||||
"""
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
servermodel = await self.call('v1.setting.servermodel.describe',
|
|
||||||
risotto_context,
|
|
||||||
servermodel_name=servermodel_name,
|
|
||||||
source_name=self.internal_source_name,
|
|
||||||
release_distribution=self.internal_distribution_name)
|
|
||||||
if not servermodel or servermodel['servermodel_id'] not in config_module.servermodel:
|
|
||||||
raise Exception(_(f'cannot find servermodel with name {servermodel_name}'))
|
|
||||||
id = servermodel['servermodel_id']
|
|
||||||
config = config_module.servermodel[id]
|
|
||||||
|
|
||||||
storage = self.get_storage('servermodel')
|
|
||||||
|
|
||||||
# check if a session already exists
|
|
||||||
sessions = storage.get_sessions()
|
|
||||||
for sess_id, session in sessions.items():
|
|
||||||
if session['id'] == id:
|
|
||||||
if session['username'] == risotto_context.username:
|
|
||||||
# same user so returns it
|
|
||||||
return self.format_session(sess_id,
|
|
||||||
session)
|
|
||||||
else:
|
|
||||||
raise Exception(_(f'{username} already edits this configuration'))
|
|
||||||
|
|
||||||
# create a new session
|
|
||||||
while True:
|
|
||||||
session_id = 'z' + hexlify(urandom(23)).decode()
|
|
||||||
if not session_id in sessions:
|
|
||||||
break
|
|
||||||
await storage.add_session(session_id,
|
|
||||||
config,
|
|
||||||
id,
|
|
||||||
risotto_context.username,
|
|
||||||
self.modify_storage)
|
|
||||||
|
|
||||||
# return session's information
|
|
||||||
return self.get_session_informations(risotto_context,
|
|
||||||
session_id,
|
|
||||||
'servermodel')
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.list', 'v1.setting.session.servermodel.list'])
|
|
||||||
async def list_session_server(self,
|
|
||||||
risotto_context: Context) -> Dict:
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
storage = self.get_storage(type)
|
|
||||||
return [self.format_session(session_id, session) for session_id, session in storage.get_sessions().items()]
|
|
||||||
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.filter', 'v1.setting.session.servermodel.filter'])
|
|
||||||
async def filter_session(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
namespace: str,
|
|
||||||
mode: str,
|
|
||||||
debug: Optional[bool]):
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
storage = self.get_storage(type)
|
|
||||||
# to validate the session right
|
|
||||||
storage.get_session(session_id,
|
|
||||||
risotto_context.username)
|
|
||||||
if namespace is not None:
|
|
||||||
storage.set_namespace(session_id,
|
|
||||||
namespace)
|
|
||||||
if mode is not None:
|
|
||||||
if mode not in ('basic', 'normal', 'expert'):
|
|
||||||
raise Exception(f'unknown mode {mode}')
|
|
||||||
await storage.set_config_mode(session_id,
|
|
||||||
mode)
|
|
||||||
if debug is not None:
|
|
||||||
await storage.set_config_debug(session_id,
|
|
||||||
debug)
|
|
||||||
return self.get_session_informations(risotto_context,
|
|
||||||
session_id,
|
|
||||||
type)
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.configure', 'v1.setting.session.servermodel.configure'])
|
|
||||||
async def configure_session(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
action: str,
|
|
||||||
name: str,
|
|
||||||
index: int,
|
|
||||||
value: Any,
|
|
||||||
value_multi: Optional[List]) -> Dict:
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
session = self.get_session(risotto_context,
|
|
||||||
session_id,
|
|
||||||
type)
|
|
||||||
# if multi and not follower the value is in fact in value_multi
|
|
||||||
# FIXME option = session['option'].option(name).option
|
|
||||||
option = session['config'].option(name).option
|
|
||||||
if await option.ismulti() and not await option.isfollower():
|
|
||||||
value = value_multi
|
|
||||||
#FIXME namespace = session['namespace']
|
|
||||||
#FIXME update = {'name': f'{namespace}.{name}',
|
|
||||||
update = {'name': name,
|
|
||||||
'action': action,
|
|
||||||
'value': value}
|
|
||||||
if index is not None:
|
|
||||||
update['index'] = index
|
|
||||||
updates = {'updates': [update]}
|
|
||||||
ret = await session['option'].updates(updates)
|
|
||||||
if update['name'] in ret:
|
|
||||||
for val in ret[update['name']][index]:
|
|
||||||
if isinstance(val, ValueError):
|
|
||||||
raise Exception(val)
|
|
||||||
ret = {'session_id': session_id,
|
|
||||||
'name': name}
|
|
||||||
if index is not None:
|
|
||||||
ret['index'] = index
|
|
||||||
return ret
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.validate', 'v1.setting.session.servermodel.validate'])
|
|
||||||
async def validate_session(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str) -> Dict:
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
session = self.get_session(risotto_context,
|
|
||||||
session_id,
|
|
||||||
type)
|
|
||||||
try:
|
|
||||||
await session['config'].forcepermissive.option(session['namespace']).value.dict()
|
|
||||||
except Exception as err:
|
|
||||||
raise Exception(str(err))
|
|
||||||
if type == 'server':
|
|
||||||
config = session['config']
|
|
||||||
await config.property.read_only()
|
|
||||||
mandatories = list(await config.value.mandatory())
|
|
||||||
await config.property.read_write()
|
|
||||||
if mandatories:
|
|
||||||
# FIXME mandatories = [mandatory.split('.', 1)[1] for mandatory in mandatories]
|
|
||||||
if len(mandatories) == 1:
|
|
||||||
mandatories = mandatories[0]
|
|
||||||
msg = _(f'the parameter "--{mandatories}" is mandatory')
|
|
||||||
else:
|
|
||||||
mandatories = '", "--'.join(mandatories)
|
|
||||||
msg = _(f'parameters "--{mandatories}" are mandatories')
|
|
||||||
raise Exception(msg)
|
|
||||||
return self.format_session(session_id,
|
|
||||||
session)
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.get', 'v1.setting.session.servermodel.get'])
|
|
||||||
async def get_session_server(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
name: Optional[str]) -> Dict:
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
session = self.get_session(risotto_context,
|
|
||||||
session_id,
|
|
||||||
type)
|
|
||||||
info = self.format_session(session_id, session)
|
|
||||||
if name is not None:
|
|
||||||
content = {name: await session['config'].option(name).value.get()}
|
|
||||||
else:
|
|
||||||
content = await session['option'].value.dict(fullpath=True,
|
|
||||||
leader_to_list=True)
|
|
||||||
info['content'] = content
|
|
||||||
return info
|
|
||||||
|
|
||||||
@register(['v1.setting.session.server.stop', 'v1.setting.session.servermodel.stop'])
|
|
||||||
async def stop_session(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str,
|
|
||||||
save: bool) -> Dict:
|
|
||||||
type = risotto_context.message.rsplit('.', 2)[-2]
|
|
||||||
storage = self.get_storage(type)
|
|
||||||
session = storage.get_session(session_id,
|
|
||||||
risotto_context.username)
|
|
||||||
id_ = session['id']
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
if type == 'server':
|
|
||||||
config = config_module.server[id_]['server_to_deploy']
|
|
||||||
else:
|
|
||||||
config = config_module.servermodel[id_]
|
|
||||||
if save:
|
|
||||||
modif_config = session['config']
|
|
||||||
await config.value.importation(await modif_config.value.exportation())
|
|
||||||
await config.permissive.importation(await modif_config.permissive.exportation())
|
|
||||||
await storage.del_session(session_id)
|
|
||||||
return self.format_session(session_id, session)
|
|
||||||
|
|
||||||
@register_http('v1', '/config/server/{session_id}')
|
|
||||||
async def get_server_api(self,
|
|
||||||
request,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str) -> Dict:
|
|
||||||
session = storage_server.get_session(session_id,
|
|
||||||
risotto_context.username)
|
|
||||||
return await session['option'].dict(remotable='all')
|
|
||||||
|
|
||||||
@register_http('v1', '/config/servermodel/{session_id}')
|
|
||||||
async def get_servermodel_api(self,
|
|
||||||
request,
|
|
||||||
risotto_context: Context,
|
|
||||||
session_id: str) -> Dict:
|
|
||||||
session = storage_servermodel.get_session(session_id,
|
|
||||||
risotto_context.username)
|
|
||||||
return await session['option'].dict(remotable='all')
|
|
@ -1,147 +0,0 @@
|
|||||||
import time
|
|
||||||
from typing import Dict
|
|
||||||
from tiramisu import Config
|
|
||||||
from rougail import modes
|
|
||||||
from ...error import CallError, NotAllowedError
|
|
||||||
|
|
||||||
|
|
||||||
class StorageError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Storage(object):
|
|
||||||
__slots__ = ('sessions',)
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.sessions = {}
|
|
||||||
|
|
||||||
async def add_session(self,
|
|
||||||
session_id: int,
|
|
||||||
orig_config: Config,
|
|
||||||
server_id: int,
|
|
||||||
username: str,
|
|
||||||
config_storage):
|
|
||||||
prefix_id = f'{session_id}_'
|
|
||||||
config_name = self.get_config_name(server_id)
|
|
||||||
config_id = f'{prefix_id}{config_name}'
|
|
||||||
|
|
||||||
# copy Config and all it's parents
|
|
||||||
meta = await orig_config.config.deepcopy(session_id=config_id,
|
|
||||||
storage=config_storage,
|
|
||||||
metaconfig_prefix=prefix_id)
|
|
||||||
|
|
||||||
# retrieve the copied config (not metaconfig)
|
|
||||||
config = meta
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
children = list(await config.config.list())
|
|
||||||
if not children:
|
|
||||||
# it's an empty metaconfig
|
|
||||||
break
|
|
||||||
config = children[0]
|
|
||||||
except:
|
|
||||||
# it's a config, so no "list" method
|
|
||||||
break
|
|
||||||
await config.property.read_write()
|
|
||||||
# set the default owner
|
|
||||||
await self.set_owner(config,
|
|
||||||
username)
|
|
||||||
|
|
||||||
# store it
|
|
||||||
self.sessions[session_id] = {'config': config,
|
|
||||||
# do not delete meta, so keep it!
|
|
||||||
'meta': meta,
|
|
||||||
'id': server_id,
|
|
||||||
'timestamp': int(time.time()),
|
|
||||||
'username': username}
|
|
||||||
await self.set_config_mode(session_id,
|
|
||||||
'normal')
|
|
||||||
await self.set_config_debug(session_id,
|
|
||||||
False)
|
|
||||||
self.set_namespace(session_id,
|
|
||||||
'creole')
|
|
||||||
|
|
||||||
async def set_config_mode(self,
|
|
||||||
id: int,
|
|
||||||
mode: str):
|
|
||||||
""" Define which edition mode to select
|
|
||||||
"""
|
|
||||||
config = self.sessions[id]['config']
|
|
||||||
for mode_level in modes.values():
|
|
||||||
if modes[mode] < mode_level:
|
|
||||||
await config.property.add(mode_level.name)
|
|
||||||
else:
|
|
||||||
await config.property.pop(mode_level.name)
|
|
||||||
self.sessions[id]['mode'] = mode
|
|
||||||
|
|
||||||
async def set_config_debug(self, id_, is_debug):
|
|
||||||
""" Enable/Disable debug mode
|
|
||||||
"""
|
|
||||||
config = self.sessions[id_]['config']
|
|
||||||
if is_debug:
|
|
||||||
await config.property.pop('hidden')
|
|
||||||
else:
|
|
||||||
await config.property.add('hidden')
|
|
||||||
self.sessions[id_]['debug'] = is_debug
|
|
||||||
|
|
||||||
def set_namespace(self,
|
|
||||||
session_id: int,
|
|
||||||
namespace: str):
|
|
||||||
self.sessions[session_id]['option'] = self.sessions[session_id]['config'].option(namespace)
|
|
||||||
self.sessions[session_id]['namespace'] = namespace
|
|
||||||
|
|
||||||
def get_sessions(self):
|
|
||||||
return self.sessions;
|
|
||||||
|
|
||||||
def get_session(self,
|
|
||||||
session_id: int,
|
|
||||||
username: str) -> Dict:
|
|
||||||
if session_id not in self.sessions:
|
|
||||||
raise Exception(f'the session "{session_id}" not exists')
|
|
||||||
session = self.sessions[session_id]
|
|
||||||
if username != session['username']:
|
|
||||||
raise NotAllowedError()
|
|
||||||
return session
|
|
||||||
|
|
||||||
async def del_session(self,
|
|
||||||
id: int):
|
|
||||||
config = self.sessions[id]['meta']
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
children = list(await config.config.list())
|
|
||||||
if not children:
|
|
||||||
# it's an empty metaconfig
|
|
||||||
break
|
|
||||||
config = children[0]
|
|
||||||
await config.session.reset()
|
|
||||||
except:
|
|
||||||
# it's a config, so no "list" method
|
|
||||||
break
|
|
||||||
await self.sessions[id]['meta'].session.reset()
|
|
||||||
del self.sessions[id]
|
|
||||||
|
|
||||||
|
|
||||||
class StorageServer(Storage):
|
|
||||||
def get_config_name(self,
|
|
||||||
server_id: int):
|
|
||||||
return f'std_{server_id}'
|
|
||||||
|
|
||||||
async def set_owner(self,
|
|
||||||
config: Config,
|
|
||||||
username: str):
|
|
||||||
await config.owner.set(username)
|
|
||||||
|
|
||||||
|
|
||||||
class StorageServermodel(Storage):
|
|
||||||
def get_config_name(self,
|
|
||||||
server_id: int):
|
|
||||||
return f'v_{server_id}'
|
|
||||||
|
|
||||||
async def set_owner(self,
|
|
||||||
config: Config,
|
|
||||||
username: str):
|
|
||||||
await config.owner.set('servermodel_' + username)
|
|
||||||
|
|
||||||
|
|
||||||
storage_server = StorageServer()
|
|
||||||
storage_servermodel = StorageServermodel()
|
|
@ -1 +0,0 @@
|
|||||||
from .source import Risotto
|
|
@ -1,150 +0,0 @@
|
|||||||
from typing import Dict, List
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...context import Context
|
|
||||||
import requests
|
|
||||||
import yaml
|
|
||||||
import os
|
|
||||||
from ...utils import _
|
|
||||||
from ...config import get_config
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
@register('v1.setting.source.create')
|
|
||||||
async def source_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_name: str,
|
|
||||||
source_url: str) -> Dict:
|
|
||||||
source_upsert = """INSERT INTO Source(SourceName, SourceURL) VALUES ($1, $2)
|
|
||||||
ON CONFLICT (SourceName) DO UPDATE SET SourceURL = $2
|
|
||||||
RETURNING SourceId
|
|
||||||
"""
|
|
||||||
# If given url is not 'none' (a.k.a internal source)
|
|
||||||
# Look for file releases.yml at given url
|
|
||||||
# If such a file exists, consider source a valid one and create source in database.
|
|
||||||
if source_url != 'none':
|
|
||||||
try:
|
|
||||||
releases = yaml.load(requests.get(source_url.rstrip('/') + '/releases.yml').content, Loader=yaml.SafeLoader)
|
|
||||||
except requests.exceptions.ConnectionError as err:
|
|
||||||
raise Exception(_('Invalid URL'))
|
|
||||||
except yaml.scanner.ScannerError as err:
|
|
||||||
raise Exception(_('Invalid releases.yml file'))
|
|
||||||
except:
|
|
||||||
raise Exception(_('Invalid source'))
|
|
||||||
else:
|
|
||||||
releases = {'1.0.0': {'distribution': 'last'}}
|
|
||||||
os.makedirs(os.path.join(get_config().get('source').get('root_path'), source_name), exist_ok=True)
|
|
||||||
with open(os.path.join(get_config().get('source').get('root_path'), source_name, 'releases.yml'), 'w') as release_file:
|
|
||||||
yaml.dump(releases, release_file)
|
|
||||||
source_id = await risotto_context.connection.fetchval(source_upsert,
|
|
||||||
source_name,
|
|
||||||
source_url)
|
|
||||||
return {'source_name': source_name,
|
|
||||||
'source_url': source_url,
|
|
||||||
'source_id': source_id}
|
|
||||||
|
|
||||||
@register('v1.setting.source.describe')
|
|
||||||
async def source_describe(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_name: str) -> Dict:
|
|
||||||
source_get = """SELECT SourceId as source_id, SourceName as source_name, SourceURL as source_url
|
|
||||||
FROM Source
|
|
||||||
WHERE SourceName = $1
|
|
||||||
"""
|
|
||||||
source = await risotto_context.connection.fetchrow(source_get,
|
|
||||||
source_name)
|
|
||||||
if not source:
|
|
||||||
raise Exception(_(f'unknown source with name {source_name}'))
|
|
||||||
return dict(source)
|
|
||||||
|
|
||||||
@register('v1.setting.source.list')
|
|
||||||
async def source_list(self,
|
|
||||||
risotto_context: Context) -> List[Dict]:
|
|
||||||
source_list = """SELECT SourceId as source_id, SourceName as source_name, SourceURL as source_url
|
|
||||||
FROM Source
|
|
||||||
"""
|
|
||||||
result = await risotto_context.connection.fetch(source_list)
|
|
||||||
return [dict(r) for r in result]
|
|
||||||
|
|
||||||
@register('v1.setting.source.dataset.update')
|
|
||||||
async def version_update(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_id: int,
|
|
||||||
release_name: str):
|
|
||||||
# source.release.create is an upsert, do not using it
|
|
||||||
release_insert = """INSERT INTO Release(ReleaseName, ReleaseSourceId) VALUES ($1, $2)
|
|
||||||
RETURNING ReleaseId
|
|
||||||
"""
|
|
||||||
release_id = await risotto_context.connection.fetchval(release_insert,
|
|
||||||
release_name,
|
|
||||||
source_id)
|
|
||||||
return {'release_id': release_id,
|
|
||||||
'release_name': release_name}
|
|
||||||
|
|
||||||
@register('v1.setting.source.release.create')
|
|
||||||
async def source_release_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
source_name: str,
|
|
||||||
release_name: str,
|
|
||||||
release_distribution: str) -> Dict:
|
|
||||||
source_get = """SELECT SourceId as source_id, SourceName as source_name, SourceURL as source_url
|
|
||||||
FROM Source
|
|
||||||
WHERE SourceName = $1
|
|
||||||
"""
|
|
||||||
release_upsert = """INSERT INTO Release(ReleaseName, ReleaseSourceId, ReleaseDistribution) VALUES ($1, $2, $3)
|
|
||||||
ON CONFLICT (ReleaseName, ReleaseSourceId) DO UPDATE SET ReleaseName = $1
|
|
||||||
RETURNING ReleaseId
|
|
||||||
"""
|
|
||||||
source_obj = await risotto_context.connection.fetchrow(source_get,
|
|
||||||
source_name)
|
|
||||||
if not source_obj:
|
|
||||||
raise Exception(_(f'unable to find a source with name {source_name}'))
|
|
||||||
source = dict(source_obj)
|
|
||||||
release_id = await risotto_context.connection.fetchval(release_upsert,
|
|
||||||
release_name,
|
|
||||||
source['source_id'],
|
|
||||||
release_distribution)
|
|
||||||
del source['source_id']
|
|
||||||
source['release_id'] = release_id
|
|
||||||
source['release_name'] = release_name
|
|
||||||
source['release_distribution'] = release_distribution
|
|
||||||
return source
|
|
||||||
|
|
||||||
@register('v1.setting.source.release.list')
|
|
||||||
async def release_list(self,
|
|
||||||
risotto_context,
|
|
||||||
source_name: str) -> Dict:
|
|
||||||
release_query = """SELECT ReleaseId as release_id, SourceName as source_name, SourceURL as source_url, ReleaseName as release_name, ReleaseDistribution as release_distribution
|
|
||||||
FROM Release, Source
|
|
||||||
WHERE Source.SourceName=$1 AND Source.SourceId=Release.ReleaseSourceId"""
|
|
||||||
result = await risotto_context.connection.fetch(release_query,
|
|
||||||
source_name)
|
|
||||||
return [dict(r) for r in result]
|
|
||||||
|
|
||||||
@register('v1.setting.source.release.describe')
|
|
||||||
async def release_describe(self,
|
|
||||||
risotto_context,
|
|
||||||
source_name: str,
|
|
||||||
release_distribution: str) -> Dict:
|
|
||||||
release_query = """SELECT ReleaseId as release_id, SourceName as source_name, SourceURL as source_url, ReleaseName as release_name, ReleaseDistribution as release_distribution
|
|
||||||
FROM Release, Source
|
|
||||||
WHERE Source.SourceName=$1 AND Source.SourceId=Release.ReleaseSourceId AND Release.ReleaseDistribution=$2"""
|
|
||||||
result = await risotto_context.connection.fetchrow(release_query,
|
|
||||||
source_name,
|
|
||||||
release_distribution)
|
|
||||||
if not result:
|
|
||||||
raise Exception(_(f'unknown release distribution {release_distribution} in source {source_name}'))
|
|
||||||
return dict(result)
|
|
||||||
|
|
||||||
@register('v1.setting.source.release.get_by_id')
|
|
||||||
async def release_get_by_id(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
release_id: int) -> Dict:
|
|
||||||
release_query = """SELECT ReleaseId as release_id, SourceName as source_name, SourceURL as source_url, ReleaseName as release_name, ReleaseDistribution as release_distribution
|
|
||||||
FROM Release, Source
|
|
||||||
WHERE Release.ReleaseId = $1 AND Source.SourceId = Release.ReleaseSourceId"""
|
|
||||||
result = await risotto_context.connection.fetchrow(release_query,
|
|
||||||
release_id)
|
|
||||||
if not result:
|
|
||||||
raise Exception(_(f'unknown release id {release_id}'))
|
|
||||||
return dict(result)
|
|
@ -1 +0,0 @@
|
|||||||
from .template import Risotto
|
|
@ -1,66 +0,0 @@
|
|||||||
from os import mkdir
|
|
||||||
from os.path import isdir, join
|
|
||||||
from shutil import rmtree
|
|
||||||
from typing import Dict
|
|
||||||
from rougail.template import generate
|
|
||||||
from tiramisu import Storage
|
|
||||||
from ...config import CONFIGURATION_DIR, TMP_DIR, get_config
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...dispatcher import dispatcher
|
|
||||||
from ...utils import _
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
def __init__(self,
|
|
||||||
test: bool) -> None:
|
|
||||||
self.storage = Storage(engine='dictionary')
|
|
||||||
self.cache_root_path = join(get_config().get('cache').get('root_path'), 'servermodel')
|
|
||||||
|
|
||||||
@register('v1.setting.template.generate')
|
|
||||||
async def template_get(self,
|
|
||||||
risotto_context,
|
|
||||||
server_name: str) -> Dict:
|
|
||||||
# get informations for server
|
|
||||||
server = await self.call('v1.setting.server.describe',
|
|
||||||
risotto_context,
|
|
||||||
server_name=server_name)
|
|
||||||
server_id = server['server_id']
|
|
||||||
servermodel_id = server['server_servermodel_id']
|
|
||||||
# verify if server has deployed configuration
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
server = config_module.server[server_id]
|
|
||||||
export = await server['server'].value.exportation()
|
|
||||||
if not export[0]:
|
|
||||||
raise Exception(_(f'configuration for server "{server_name}" is empty, you should deploy it first'))
|
|
||||||
# copy deployed configuration
|
|
||||||
async with await server['server'].config.deepcopy(storage=self.storage) as config:
|
|
||||||
meta = config
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
children = list(await config.config.list())
|
|
||||||
except:
|
|
||||||
break
|
|
||||||
if children:
|
|
||||||
config = children[0]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
configurations_dir = join(CONFIGURATION_DIR,
|
|
||||||
str(server_id))
|
|
||||||
if isdir(configurations_dir):
|
|
||||||
rmtree(configurations_dir)
|
|
||||||
mkdir(configurations_dir)
|
|
||||||
tmp_dir = join(TMP_DIR, str(server_id))
|
|
||||||
if isdir(tmp_dir):
|
|
||||||
rmtree(tmp_dir)
|
|
||||||
mkdir(tmp_dir)
|
|
||||||
templates_dir = join(self.cache_root_path, str(servermodel_id), 'templates')
|
|
||||||
await generate(config,
|
|
||||||
server['funcs_file'],
|
|
||||||
templates_dir,
|
|
||||||
tmp_dir,
|
|
||||||
configurations_dir)
|
|
||||||
del meta, config
|
|
||||||
# FIXME del session !
|
|
||||||
return {'server_name': server_name,
|
|
||||||
'template_dir': configurations_dir}
|
|
@ -1 +0,0 @@
|
|||||||
from .uri import Risotto
|
|
@ -1,117 +0,0 @@
|
|||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...context import Context
|
|
||||||
from ...utils import _
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
async def on_join(self,
|
|
||||||
risotto_context):
|
|
||||||
for uri in ['v1.setting.applicationservice.create',
|
|
||||||
'v1.setting.applicationservice.dataset.updated',
|
|
||||||
'v1.setting.applicationservice.dependency.add',
|
|
||||||
'v1.setting.server.create',
|
|
||||||
'v1.setting.servermodel.create',
|
|
||||||
'v1.setting.servermodel.dataset.updated',
|
|
||||||
'v1.setting.session.server.start',
|
|
||||||
'v1.setting.source.create',
|
|
||||||
'v1.setting.source.dataset.update',
|
|
||||||
'v1.setting.source.release.create',
|
|
||||||
'v1.setting.template.generate',
|
|
||||||
'v1.setting.uri.role.join',
|
|
||||||
'v1.setting.uri.role.list',
|
|
||||||
'v1.setting.user.create',
|
|
||||||
'v1.setting.user.delete',
|
|
||||||
'v1.setting.user.list',
|
|
||||||
'v1.setting.user.role.create',
|
|
||||||
'v1.setting.config.configuration.server.get',
|
|
||||||
'v1.setting.user.role.list']:
|
|
||||||
try:
|
|
||||||
await self._uri_role_join(risotto_context,
|
|
||||||
role_name='administrator',
|
|
||||||
uri_name=uri)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
for uri in ['v1.setting.applicationservice.describe',
|
|
||||||
'v1.setting.server.describe',
|
|
||||||
'v1.setting.server.list',
|
|
||||||
'v1.setting.servermodel.list',
|
|
||||||
'v1.setting.session.server.configure',
|
|
||||||
'v1.setting.session.server.filter',
|
|
||||||
'v1.setting.session.server.get',
|
|
||||||
'v1.setting.session.server.list',
|
|
||||||
'v1.setting.session.servermodel.configure',
|
|
||||||
'v1.setting.session.servermodel.filter',
|
|
||||||
'v1.setting.session.servermodel.get',
|
|
||||||
'v1.setting.session.servermodel.list',
|
|
||||||
'v1.setting.session.servermodel.start',
|
|
||||||
'v1.setting.session.servermodel.stop',
|
|
||||||
'v1.setting.session.servermodel.validate',
|
|
||||||
'v1.setting.session.server.stop',
|
|
||||||
'v1.setting.session.server.validate',
|
|
||||||
'v1.setting.source.describe',
|
|
||||||
'v1.setting.source.list',
|
|
||||||
'v1.setting.source.release.list']:
|
|
||||||
try:
|
|
||||||
await self._uri_role_join(risotto_context,
|
|
||||||
role_name='all',
|
|
||||||
uri_name=uri)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
for uri in ['v1.setting.server.describe',
|
|
||||||
'v1.setting.applicationservice.dependency.add',
|
|
||||||
'v1.setting.config.configuration.server.get',
|
|
||||||
'v1.setting.config.configuration.server.deploy',
|
|
||||||
'v1.setting.session.server.start',
|
|
||||||
'v1.setting.template.generate']:
|
|
||||||
try:
|
|
||||||
await self._uri_role_join(risotto_context,
|
|
||||||
role_name='server_rw',
|
|
||||||
uri_name=uri)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@register('v1.setting.uri.role.join')
|
|
||||||
async def uri_role_join(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
role_name: str,
|
|
||||||
uri_name: str) -> Dict:
|
|
||||||
return await self._uri_role_join(risotto_context,
|
|
||||||
role_name,
|
|
||||||
uri_name)
|
|
||||||
async def _uri_role_join(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
role_name: str,
|
|
||||||
uri_name: str) -> Dict:
|
|
||||||
# Verify if user exists and get ID
|
|
||||||
sql = '''
|
|
||||||
SELECT URIId
|
|
||||||
FROM URI
|
|
||||||
WHERE URIName = $1
|
|
||||||
'''
|
|
||||||
uri_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
uri_name)
|
|
||||||
if uri_id is None:
|
|
||||||
raise Exception(_(f'unable to find message {uri_name}'))
|
|
||||||
sql = '''
|
|
||||||
INSERT INTO RoleURI(RoleName, URIId)
|
|
||||||
VALUES ($1,$2)
|
|
||||||
ON CONFLICT DO NOTHING
|
|
||||||
'''
|
|
||||||
uri_id = await risotto_context.connection.fetchrow(sql,
|
|
||||||
role_name,
|
|
||||||
uri_id)
|
|
||||||
return {'role_name': role_name,
|
|
||||||
'uri_name': uri_name}
|
|
||||||
|
|
||||||
@register('v1.setting.uri.role.list')
|
|
||||||
async def uri_role_list(self,
|
|
||||||
risotto_context: Context) -> List[Dict]:
|
|
||||||
sql = '''
|
|
||||||
SELECT RoleName as role_name, URI.URIName as uri_name
|
|
||||||
FROM RoleURI, URI
|
|
||||||
WHERE RoleURI.URIId = URI.URIId
|
|
||||||
'''
|
|
||||||
return [dict(r) for r in await risotto_context.connection.fetch(sql)]
|
|
@ -1 +0,0 @@
|
|||||||
from .user import Risotto
|
|
@ -1,211 +0,0 @@
|
|||||||
from typing import Dict, Optional
|
|
||||||
|
|
||||||
from ...controller import Controller
|
|
||||||
from ...register import register
|
|
||||||
from ...context import Context
|
|
||||||
from ...utils import _
|
|
||||||
from ...config import get_config
|
|
||||||
|
|
||||||
|
|
||||||
class Risotto(Controller):
|
|
||||||
async def on_join(self,
|
|
||||||
risotto_context: Context) -> None:
|
|
||||||
""" pre-load servermodel and server
|
|
||||||
"""
|
|
||||||
user_login = get_config()['global']['admin_user']
|
|
||||||
sql = '''
|
|
||||||
SELECT UserId
|
|
||||||
FROM RisottoUser
|
|
||||||
WHERE UserLogin = $1
|
|
||||||
'''
|
|
||||||
if await risotto_context.connection.fetchval(sql,
|
|
||||||
user_login) is None:
|
|
||||||
await self._user_create(risotto_context,
|
|
||||||
user_login,
|
|
||||||
user_login,
|
|
||||||
user_login)
|
|
||||||
await self._user_role_create(risotto_context,
|
|
||||||
user_login,
|
|
||||||
'administrator',
|
|
||||||
None,
|
|
||||||
None)
|
|
||||||
|
|
||||||
async def _user_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: str,
|
|
||||||
user_name: str,
|
|
||||||
user_surname: str) -> Dict:
|
|
||||||
user_insert = """INSERT INTO RisottoUser(UserLogin, UserName, UserSurname)
|
|
||||||
VALUES ($1,$2,$3)
|
|
||||||
RETURNING UserId
|
|
||||||
"""
|
|
||||||
user_id = await risotto_context.connection.fetchval(user_insert,
|
|
||||||
user_login,
|
|
||||||
user_name,
|
|
||||||
user_surname)
|
|
||||||
await self.call('v1.setting.user.role.create',
|
|
||||||
risotto_context,
|
|
||||||
user_login=user_login,
|
|
||||||
role_name='all')
|
|
||||||
return {'user_id': user_id,
|
|
||||||
'user_login': user_login,
|
|
||||||
'user_name': user_name,
|
|
||||||
'user_surname': user_surname}
|
|
||||||
|
|
||||||
@register('v1.setting.user.create')
|
|
||||||
async def user_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: str,
|
|
||||||
user_name: str,
|
|
||||||
user_surname: str) -> Dict:
|
|
||||||
return await self._user_create(risotto_context,
|
|
||||||
user_login,
|
|
||||||
user_name,
|
|
||||||
user_surname)
|
|
||||||
|
|
||||||
@register('v1.setting.user.list')
|
|
||||||
async def user_list(self,
|
|
||||||
risotto_context: Context) -> Dict:
|
|
||||||
sql = '''
|
|
||||||
SELECT UserId as user_id, UserLogin as user_login, UserName as user_name, UserSurname as user_surname
|
|
||||||
FROM RisottoUser
|
|
||||||
'''
|
|
||||||
users = await risotto_context.connection.fetch(sql)
|
|
||||||
return [dict(r) for r in users]
|
|
||||||
|
|
||||||
@register('v1.setting.user.delete')
|
|
||||||
async def user_delete(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: str) -> Dict:
|
|
||||||
sql = '''
|
|
||||||
DELETE FROM RisottoUser
|
|
||||||
WHERE UserLogin = $1
|
|
||||||
RETURNING UserId as user_id, UserLogin as user_login, UserName as user_name, UserSurname as user_surname
|
|
||||||
'''
|
|
||||||
user = await risotto_context.connection.fetchrow(sql,
|
|
||||||
user_login)
|
|
||||||
if user is None:
|
|
||||||
raise Exception(_(f'unable to find user {user_login}'))
|
|
||||||
return dict(user)
|
|
||||||
|
|
||||||
async def _user_role_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: str,
|
|
||||||
role_name: str,
|
|
||||||
role_attribute: str,
|
|
||||||
role_attribute_value: str) -> Dict:
|
|
||||||
# Verify if user exists and get ID
|
|
||||||
sql = '''
|
|
||||||
SELECT UserId
|
|
||||||
FROM RisottoUser
|
|
||||||
WHERE UserLogin = $1
|
|
||||||
'''
|
|
||||||
user_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
user_login)
|
|
||||||
if user_id is None:
|
|
||||||
raise Exception(_(f'unable to find user {user_login}'))
|
|
||||||
if role_attribute == role_attribute_value == None:
|
|
||||||
sql = '''SELECT RoleId
|
|
||||||
FROM UserRole
|
|
||||||
WHERE RoleUserId = $1 AND RoleName = $2
|
|
||||||
'''
|
|
||||||
role_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
user_id,
|
|
||||||
role_name)
|
|
||||||
else:
|
|
||||||
sql = '''SELECT RoleId
|
|
||||||
FROM UserRole
|
|
||||||
WHERE RoleUserId = $1 AND RoleName = $2 AND RoleAttribute = $3 AND RoleAttributeValue = $4
|
|
||||||
'''
|
|
||||||
role_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
user_id,
|
|
||||||
role_name,
|
|
||||||
role_attribute,
|
|
||||||
role_attribute_value)
|
|
||||||
if role_id is None:
|
|
||||||
sql = '''INSERT INTO UserRole(RoleUserId, RoleName, RoleAttribute, RoleAttributeValue)
|
|
||||||
VALUES($1,$2,$3,$4)
|
|
||||||
RETURNING RoleId
|
|
||||||
'''
|
|
||||||
role_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
user_id,
|
|
||||||
role_name,
|
|
||||||
role_attribute,
|
|
||||||
role_attribute_value)
|
|
||||||
return {'role_id': role_id,
|
|
||||||
'user_login': user_login,
|
|
||||||
'role_name': role_name,
|
|
||||||
'role_attribute': role_attribute,
|
|
||||||
'role_attribute_value': role_attribute_value}
|
|
||||||
|
|
||||||
@register('v1.setting.user.role.create')
|
|
||||||
async def user_role_create(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: str,
|
|
||||||
role_name: str,
|
|
||||||
role_attribute: str,
|
|
||||||
role_attribute_value: str) -> Dict:
|
|
||||||
return await self._user_role_create(risotto_context,
|
|
||||||
user_login,
|
|
||||||
role_name,
|
|
||||||
role_attribute,
|
|
||||||
role_attribute_value)
|
|
||||||
|
|
||||||
@register('v1.setting.user.role.list')
|
|
||||||
async def user_role_list(self,
|
|
||||||
risotto_context: Context,
|
|
||||||
user_login: Optional[str]) -> Dict:
|
|
||||||
if not user_login:
|
|
||||||
sql = '''
|
|
||||||
SELECT RoleId as role_id, RoleName as role_name, RoleAttribute as role_attribute, RoleAttributeValue as role_attribute_value, RisottoUser.UserLogin as user_login
|
|
||||||
FROM UserRole, RisottoUser
|
|
||||||
WHERE UserRole.RoleUserId = RisottoUser.UserId
|
|
||||||
'''
|
|
||||||
roles = await risotto_context.connection.fetch(sql)
|
|
||||||
else:
|
|
||||||
# Verify if user exists and get ID
|
|
||||||
sql = '''
|
|
||||||
SELECT UserId
|
|
||||||
FROM RisottoUser
|
|
||||||
WHERE UserLogin = $1
|
|
||||||
'''
|
|
||||||
user_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
user_login)
|
|
||||||
if user_id is None:
|
|
||||||
raise Exception(_(f'unable to find user {user_login}'))
|
|
||||||
sql = '''
|
|
||||||
SELECT RoleId as role_id, RoleName as role_name, RoleAttribute as role_attribute, RoleAttributeValue as role_attribute_value, RisottoUser.UserLogin as user_login
|
|
||||||
FROM UserRole, RisottoUser
|
|
||||||
WHERE UserRole.RoleUserId = RisottoUser.UserId AND UserRole.RoleUserId = $1
|
|
||||||
'''
|
|
||||||
roles = await risotto_context.connection.fetch(sql,
|
|
||||||
user_id)
|
|
||||||
return [dict(r) for r in roles]
|
|
||||||
#
|
|
||||||
# FIXME comment savoir quel role il faut supprimer ? avec attribut ou juste l'ID ?
|
|
||||||
# @register('v1.setting.user.role.delete')
|
|
||||||
# async def user_role_delete(self,
|
|
||||||
# risotto_context: Context,
|
|
||||||
# user_login: str,
|
|
||||||
# role_name: str) -> Dict:
|
|
||||||
# # Verify if user exists and get ID
|
|
||||||
# sql = '''
|
|
||||||
# SELECT UserId
|
|
||||||
# FROM RisottoUser
|
|
||||||
# WHERE UserLogin = $1
|
|
||||||
# '''
|
|
||||||
# user_id = await risotto_context.connection.fetchval(sql,
|
|
||||||
# user_login)
|
|
||||||
# if user_id is None:
|
|
||||||
# raise Exception(_(f'unable to find user {user_login}'))
|
|
||||||
# sql = '''
|
|
||||||
# DELETE FROM RisottoRole
|
|
||||||
# WHERE RoleName = $1 AND UserId = $2
|
|
||||||
# RETURNING RoleId as role_id, RoleName as role_name, RoleAttribute as role_attribute, RoleAttributeValue as role_attribute_value
|
|
||||||
# '''
|
|
||||||
# role = await risotto_context.connection.fetchrow(sql,
|
|
||||||
# role_name,
|
|
||||||
# user_id)
|
|
||||||
# if role is None:
|
|
||||||
# raise Exception(_(f'unable to find role {role_name}'))
|
|
||||||
# return dict(role)
|
|
@ -1,9 +1,27 @@
|
|||||||
class Undefined:
|
class Undefined:
|
||||||
pass
|
pass
|
||||||
|
undefined = Undefined()
|
||||||
|
|
||||||
|
|
||||||
def _(s):
|
def _(s):
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
undefined = Undefined()
|
def tiramisu_display_name(kls,
|
||||||
|
dyn_name: 'Base'=None,
|
||||||
|
suffix: str=None,
|
||||||
|
) -> str:
|
||||||
|
if dyn_name is not None:
|
||||||
|
name = dyn_name
|
||||||
|
else:
|
||||||
|
name = kls.impl_getname()
|
||||||
|
doc = kls.impl_get_information('doc', None)
|
||||||
|
if doc:
|
||||||
|
doc = str(doc)
|
||||||
|
if doc.endswith('.'):
|
||||||
|
doc = doc[:-1]
|
||||||
|
if suffix:
|
||||||
|
doc += suffix
|
||||||
|
if name != doc:
|
||||||
|
name += f'" "{doc}'
|
||||||
|
return name
|
||||||
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
@ -1,5 +1,15 @@
|
|||||||
from tiramisu import Storage
|
try:
|
||||||
from risotto.config import DATABASE_DIR
|
from tiramisu3 import Storage
|
||||||
|
except:
|
||||||
|
from tiramisu import Storage
|
||||||
|
from os.path import isfile as _isfile
|
||||||
|
import os as _os
|
||||||
|
_envfile = '/etc/risotto/risotto.conf'
|
||||||
|
if _isfile(_envfile):
|
||||||
|
with open(_envfile, 'r') as fh_env:
|
||||||
|
for line in fh_env.readlines():
|
||||||
|
key, value = line.strip().split('=')
|
||||||
|
_os.environ[key] = value
|
||||||
|
|
||||||
|
|
||||||
STORAGE = Storage(engine='sqlite3', dir_database=DATABASE_DIR, name='test')
|
STORAGE = Storage(engine='sqlite3')
|
||||||
|
@ -1,20 +1,29 @@
|
|||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
import pytest
|
import pytest
|
||||||
from tiramisu import list_sessions, delete_session
|
try:
|
||||||
|
from tiramisu3 import list_sessions, delete_session as _delete_session
|
||||||
|
except:
|
||||||
|
from tiramisu import list_sessions, delete_session as _delete_session
|
||||||
|
|
||||||
from .storage import STORAGE
|
from .storage import STORAGE
|
||||||
|
from risotto import services
|
||||||
from risotto.context import Context
|
from risotto.context import Context
|
||||||
from risotto.services import load_services
|
#from risotto.services import load_services
|
||||||
from risotto.dispatcher import dispatcher
|
from risotto.dispatcher import dispatcher
|
||||||
|
|
||||||
|
|
||||||
|
SOURCE_NAME = 'test'
|
||||||
|
SERVERMODEL_NAME = 'sm1'
|
||||||
|
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
load_services(['config'],
|
# load_services(['config'],
|
||||||
validate=False)
|
# validate=False)
|
||||||
|
services.link_to_dispatcher(dispatcher, limit_services=['setting'], validate=False)
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
config_module.save_storage = STORAGE
|
config_module.save_storage = STORAGE
|
||||||
dispatcher.set_module('server', import_module(f'.server', 'fake_services'), True)
|
#dispatcher.set_module('server', import_module(f'.server', 'fake_services'), True)
|
||||||
dispatcher.set_module('servermodel', import_module(f'.servermodel', 'fake_services'), True)
|
#dispatcher.set_module('servermodel', import_module(f'.servermodel', 'fake_services'), True)
|
||||||
|
|
||||||
|
|
||||||
def setup_function(function):
|
def setup_function(function):
|
||||||
@ -23,11 +32,11 @@ def setup_function(function):
|
|||||||
config_module.servermodel = {}
|
config_module.servermodel = {}
|
||||||
|
|
||||||
|
|
||||||
def teardown_function(function):
|
async def delete_session():
|
||||||
# delete all sessions
|
# delete all sessions
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
for session in list_sessions(storage=config_module.save_storage):
|
for session in await list_sessions(storage=config_module.save_storage):
|
||||||
delete_session(storage=config_module.save_storage, session_id=session)
|
await _delete_session(storage=config_module.save_storage, session_id=session)
|
||||||
|
|
||||||
|
|
||||||
def get_fake_context(module_name):
|
def get_fake_context(module_name):
|
||||||
@ -38,127 +47,166 @@ def get_fake_context(module_name):
|
|||||||
return risotto_context
|
return risotto_context
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
async def onjoin(source=True):
|
||||||
async def test_on_join():
|
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
assert config_module.servermodel == {}
|
assert config_module.servermodel == {}
|
||||||
assert config_module.server == {}
|
assert config_module.server == {}
|
||||||
|
await delete_session()
|
||||||
#
|
#
|
||||||
|
#config_module.cache_root_path = 'tests/data'
|
||||||
|
await dispatcher.load()
|
||||||
|
await dispatcher.on_join(truncate=True)
|
||||||
|
if source:
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
await dispatcher.call('v1',
|
||||||
|
'setting.source.create',
|
||||||
|
fake_context,
|
||||||
|
source_name=SOURCE_NAME,
|
||||||
|
source_directory='tests/data',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
INTERNAL_SOURCE = {'source_name': 'internal', 'source_directory': '/srv/risotto/seed/internal'}
|
||||||
|
TEST_SOURCE = {'source_name': 'test', 'source_directory': 'tests/data'}
|
||||||
|
|
||||||
|
|
||||||
|
##############################################################################################################################
|
||||||
|
# Source / Release
|
||||||
|
##############################################################################################################################
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_source_on_join():
|
||||||
|
# onjoin must create internal source
|
||||||
|
sources = [INTERNAL_SOURCE]
|
||||||
|
await onjoin(False)
|
||||||
fake_context = get_fake_context('config')
|
fake_context = get_fake_context('config')
|
||||||
config_module.cache_root_path = 'tests/data'
|
assert await dispatcher.call('v1',
|
||||||
await config_module.on_join(fake_context)
|
'setting.source.list',
|
||||||
assert list(config_module.servermodel.keys()) == [1, 2]
|
fake_context,
|
||||||
assert list(config_module.server) == [3]
|
) == sources
|
||||||
assert set(config_module.server[3]) == {'server', 'server_to_deploy', 'funcs_file'}
|
await delete_session()
|
||||||
assert config_module.server[3]['funcs_file'] == 'tests/data/1/funcs.py'
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_server_created():
|
async def test_source_create():
|
||||||
|
sources = [INTERNAL_SOURCE, TEST_SOURCE]
|
||||||
|
await onjoin()
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
|
assert list(config_module.servermodel.keys()) == ['last_base']
|
||||||
|
assert list(config_module.server) == []
|
||||||
fake_context = get_fake_context('config')
|
fake_context = get_fake_context('config')
|
||||||
config_module.cache_root_path = 'tests/data'
|
assert await dispatcher.call('v1',
|
||||||
await config_module.on_join(fake_context)
|
'setting.source.list',
|
||||||
#
|
fake_context,
|
||||||
assert list(config_module.server) == [3]
|
) == sources
|
||||||
await dispatcher.publish('v1',
|
await delete_session()
|
||||||
'server.created',
|
|
||||||
fake_context,
|
|
||||||
server_id=4,
|
|
||||||
server_name='name3',
|
|
||||||
server_description='description3',
|
|
||||||
server_servermodel_id=2)
|
|
||||||
assert list(config_module.server) == [3, 4]
|
|
||||||
assert set(config_module.server[4]) == {'server', 'server_to_deploy', 'funcs_file'}
|
|
||||||
assert config_module.server[4]['funcs_file'] == 'tests/data/2/funcs.py'
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_server_deleted():
|
async def test_source_describe():
|
||||||
config_module = dispatcher.get_service('config')
|
await onjoin()
|
||||||
fake_context = get_fake_context('config')
|
fake_context = get_fake_context('config')
|
||||||
config_module.cache_root_path = 'tests/data'
|
assert await dispatcher.call('v1',
|
||||||
await config_module.on_join(fake_context)
|
'setting.source.describe',
|
||||||
#
|
fake_context,
|
||||||
assert list(config_module.server) == [3]
|
source_name='internal',
|
||||||
await dispatcher.publish('v1',
|
) == INTERNAL_SOURCE
|
||||||
'server.created',
|
assert await dispatcher.call('v1',
|
||||||
fake_context,
|
'setting.source.describe',
|
||||||
server_id=4,
|
fake_context,
|
||||||
server_name='name4',
|
source_name=SOURCE_NAME,
|
||||||
server_description='description4',
|
) == TEST_SOURCE
|
||||||
server_servermodel_id=2)
|
await delete_session()
|
||||||
assert list(config_module.server) == [3, 4]
|
|
||||||
await dispatcher.publish('v1',
|
|
||||||
'server.deleted',
|
@pytest.mark.asyncio
|
||||||
fake_context,
|
async def test_release_internal_list():
|
||||||
server_id=4)
|
releases = [{'release_distribution': 'last',
|
||||||
assert list(config_module.server) == [3]
|
'release_name': 'none',
|
||||||
|
'source_name': 'internal'}]
|
||||||
|
|
||||||
|
await onjoin()
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
assert await dispatcher.call('v1',
|
||||||
|
'setting.source.release.list',
|
||||||
|
fake_context,
|
||||||
|
source_name='internal',
|
||||||
|
) == releases
|
||||||
|
await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_release_list():
|
||||||
|
releases = [{'release_distribution': 'last',
|
||||||
|
'release_name': '1',
|
||||||
|
'source_name': 'test'}]
|
||||||
|
|
||||||
|
await onjoin()
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
assert await dispatcher.call('v1',
|
||||||
|
'setting.source.release.list',
|
||||||
|
fake_context,
|
||||||
|
source_name='test',
|
||||||
|
) == releases
|
||||||
|
await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_release_describe():
|
||||||
|
|
||||||
|
await onjoin()
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
assert await dispatcher.call('v1',
|
||||||
|
'setting.source.release.describe',
|
||||||
|
fake_context,
|
||||||
|
source_name='internal',
|
||||||
|
release_distribution='last',
|
||||||
|
) == {'release_distribution': 'last',
|
||||||
|
'release_name': 'none',
|
||||||
|
'source_name': 'internal'}
|
||||||
|
assert await dispatcher.call('v1',
|
||||||
|
'setting.source.release.describe',
|
||||||
|
fake_context,
|
||||||
|
source_name='test',
|
||||||
|
release_distribution='last',
|
||||||
|
) == {'release_distribution': 'last',
|
||||||
|
'release_name': '1',
|
||||||
|
'source_name': 'test'}
|
||||||
|
await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
##############################################################################################################################
|
||||||
|
# Servermodel
|
||||||
|
##############################################################################################################################
|
||||||
|
async def create_servermodel(name=SERVERMODEL_NAME,
|
||||||
|
parents_name=['base'],
|
||||||
|
):
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
await dispatcher.call('v1',
|
||||||
|
'setting.servermodel.create',
|
||||||
|
fake_context,
|
||||||
|
servermodel_name=name,
|
||||||
|
servermodel_description='servermodel 1',
|
||||||
|
parents_name=parents_name,
|
||||||
|
source_name=SOURCE_NAME,
|
||||||
|
release_distribution='last',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_servermodel_created():
|
async def test_servermodel_created():
|
||||||
|
await onjoin()
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
fake_context = get_fake_context('config')
|
|
||||||
config_module.cache_root_path = 'tests/data'
|
|
||||||
await config_module.on_join(fake_context)
|
|
||||||
#
|
#
|
||||||
assert list(config_module.servermodel) == [1, 2]
|
assert list(config_module.servermodel) == ['last_base']
|
||||||
servermodel = {'servermodeid': 3,
|
await create_servermodel()
|
||||||
'servermodelname': 'name3'}
|
assert list(config_module.servermodel) == ['last_base', 'last_sm1']
|
||||||
await dispatcher.publish('v1',
|
assert not list(await config_module.servermodel['last_base'].config.parents())
|
||||||
'servermodel.created',
|
assert len(list(await config_module.servermodel['last_sm1'].config.parents())) == 1
|
||||||
fake_context,
|
await delete_session()
|
||||||
servermodel_id=3,
|
#
|
||||||
servermodel_description='name3',
|
#
|
||||||
release_id=1,
|
|
||||||
servermodel_name='name3')
|
|
||||||
assert list(config_module.servermodel) == [1, 2, 3]
|
|
||||||
assert not list(await config_module.servermodel[3].config.parents())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_servermodel_herited_created():
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
fake_context = get_fake_context('config')
|
|
||||||
config_module.cache_root_path = 'tests/data'
|
|
||||||
await config_module.on_join(fake_context)
|
|
||||||
#
|
|
||||||
assert list(config_module.servermodel) == [1, 2]
|
|
||||||
await dispatcher.publish('v1',
|
|
||||||
'servermodel.created',
|
|
||||||
fake_context,
|
|
||||||
servermodel_id=3,
|
|
||||||
servermodel_name='name3',
|
|
||||||
release_id=1,
|
|
||||||
servermodel_description='name3',
|
|
||||||
servermodel_parents_id=[1])
|
|
||||||
assert list(config_module.servermodel) == [1, 2, 3]
|
|
||||||
assert len(list(await config_module.servermodel[3].config.parents())) == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_servermodel_multi_herited_created():
|
|
||||||
config_module = dispatcher.get_service('config')
|
|
||||||
fake_context = get_fake_context('config')
|
|
||||||
config_module.cache_root_path = 'tests/data'
|
|
||||||
await config_module.on_join(fake_context)
|
|
||||||
#
|
|
||||||
assert list(config_module.servermodel) == [1, 2]
|
|
||||||
await dispatcher.publish('v1',
|
|
||||||
'servermodel.created',
|
|
||||||
fake_context,
|
|
||||||
servermodel_id=3,
|
|
||||||
servermodel_name='name3',
|
|
||||||
release_id=1,
|
|
||||||
servermodel_description='name3',
|
|
||||||
servermodel_parents_id=[1, 2])
|
|
||||||
assert list(config_module.servermodel) == [1, 2, 3]
|
|
||||||
assert len(list(await config_module.servermodel[3].config.parents())) == 2
|
|
||||||
|
|
||||||
|
|
||||||
#@pytest.mark.asyncio
|
#@pytest.mark.asyncio
|
||||||
#async def test_servermodel_updated_not_exists():
|
#async def test_servermodel_herited_created():
|
||||||
# config_module = dispatcher.get_service('config')
|
# config_module = dispatcher.get_service('config')
|
||||||
# fake_context = get_fake_context('config')
|
# fake_context = get_fake_context('config')
|
||||||
# config_module.cache_root_path = 'tests/data'
|
# config_module.cache_root_path = 'tests/data'
|
||||||
@ -166,7 +214,28 @@ async def test_servermodel_multi_herited_created():
|
|||||||
# #
|
# #
|
||||||
# assert list(config_module.servermodel) == [1, 2]
|
# assert list(config_module.servermodel) == [1, 2]
|
||||||
# await dispatcher.publish('v1',
|
# await dispatcher.publish('v1',
|
||||||
# 'servermodel.updated',
|
# 'servermodel.created',
|
||||||
|
# fake_context,
|
||||||
|
# servermodel_id=3,
|
||||||
|
# servermodel_name='name3',
|
||||||
|
# release_id=1,
|
||||||
|
# servermodel_description='name3',
|
||||||
|
# servermodel_parents_id=[1])
|
||||||
|
# assert list(config_module.servermodel) == [1, 2, 3]
|
||||||
|
# assert len(list(await config_module.servermodel[3].config.parents())) == 1
|
||||||
|
# await delete_session()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#@pytest.mark.asyncio
|
||||||
|
#async def test_servermodel_multi_herited_created():
|
||||||
|
# config_module = dispatcher.get_service('config')
|
||||||
|
# fake_context = get_fake_context('config')
|
||||||
|
# config_module.cache_root_path = 'tests/data'
|
||||||
|
# await config_module.on_join(fake_context)
|
||||||
|
# #
|
||||||
|
# assert list(config_module.servermodel) == [1, 2]
|
||||||
|
# await dispatcher.publish('v1',
|
||||||
|
# 'servermodel.created',
|
||||||
# fake_context,
|
# fake_context,
|
||||||
# servermodel_id=3,
|
# servermodel_id=3,
|
||||||
# servermodel_name='name3',
|
# servermodel_name='name3',
|
||||||
@ -175,164 +244,311 @@ async def test_servermodel_multi_herited_created():
|
|||||||
# servermodel_parents_id=[1, 2])
|
# servermodel_parents_id=[1, 2])
|
||||||
# assert list(config_module.servermodel) == [1, 2, 3]
|
# assert list(config_module.servermodel) == [1, 2, 3]
|
||||||
# assert len(list(await config_module.servermodel[3].config.parents())) == 2
|
# assert len(list(await config_module.servermodel[3].config.parents())) == 2
|
||||||
|
# await delete_session()
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
# @pytest.mark.asyncio
|
##@pytest.mark.asyncio
|
||||||
# async def test_servermodel_updated1():
|
##async def test_servermodel_updated_not_exists():
|
||||||
# config_module = dispatcher.get_service('config')
|
## config_module = dispatcher.get_service('config')
|
||||||
# fake_context = get_fake_context('config')
|
## fake_context = get_fake_context('config')
|
||||||
# config_module.cache_root_path = 'tests/data'
|
## config_module.cache_root_path = 'tests/data'
|
||||||
# await config_module.on_join(fake_context)
|
## await config_module.on_join(fake_context)
|
||||||
# #
|
## #
|
||||||
# assert list(config_module.servermodel) == [1, 2]
|
## assert list(config_module.servermodel) == [1, 2]
|
||||||
# metaconfig1 = config_module.servermodel[1]
|
## await dispatcher.publish('v1',
|
||||||
# metaconfig2 = config_module.servermodel[2]
|
## 'servermodel.updated',
|
||||||
# mixconfig1 = (await metaconfig1.config.list())[0]
|
## fake_context,
|
||||||
# mixconfig2 = (await metaconfig2.config.list())[0]
|
## servermodel_id=3,
|
||||||
# assert len(list(await metaconfig1.config.parents())) == 0
|
## servermodel_name='name3',
|
||||||
# assert len(list(await metaconfig2.config.parents())) == 1
|
## release_id=1,
|
||||||
# assert len(list(await mixconfig1.config.list())) == 1
|
## servermodel_description='name3',
|
||||||
# assert len(list(await mixconfig2.config.list())) == 0
|
## servermodel_parents_id=[1, 2])
|
||||||
# #
|
## assert list(config_module.servermodel) == [1, 2, 3]
|
||||||
# await dispatcher.publish('v1',
|
## assert len(list(await config_module.servermodel[3].config.parents())) == 2
|
||||||
# 'servermodel.updated',
|
## await delete_session()
|
||||||
# fake_context,
|
##
|
||||||
# servermodel_id=1,
|
##
|
||||||
# servermodel_name='name1-1',
|
## @pytest.mark.asyncio
|
||||||
# release_id=1,
|
## async def test_servermodel_updated1():
|
||||||
# servermodel_description='name1-1')
|
## config_module = dispatcher.get_service('config')
|
||||||
# assert set(config_module.servermodel) == {1, 2}
|
## fake_context = get_fake_context('config')
|
||||||
# assert config_module.servermodel[1].information.get('servermodel_name') == 'name1-1'
|
## config_module.cache_root_path = 'tests/data'
|
||||||
# assert metaconfig1 != config_module.servermodel[1]
|
## await config_module.on_join(fake_context)
|
||||||
# assert metaconfig2 == config_module.servermodel[2]
|
## #
|
||||||
# metaconfig1 = config_module.servermodel[1]
|
## assert list(config_module.servermodel) == [1, 2]
|
||||||
# assert mixconfig1 != next(metaconfig1.config.list())
|
## metaconfig1 = config_module.servermodel[1]
|
||||||
# mixconfig1 = next(metaconfig1.config.list())
|
## metaconfig2 = config_module.servermodel[2]
|
||||||
# #
|
## mixconfig1 = (await metaconfig1.config.list())[0]
|
||||||
# assert len(list(await metaconfig1.config.parents())) == 0
|
## mixconfig2 = (await metaconfig2.config.list())[0]
|
||||||
# assert len(list(await metaconfig2.config.parents())) == 1
|
## assert len(list(await metaconfig1.config.parents())) == 0
|
||||||
# assert len(list(await mixconfig1.config.list())) == 1
|
## assert len(list(await metaconfig2.config.parents())) == 1
|
||||||
# assert len(list(await mixconfig2.config.list())) == 0
|
## assert len(list(await mixconfig1.config.list())) == 1
|
||||||
#
|
## assert len(list(await mixconfig2.config.list())) == 0
|
||||||
#
|
## #
|
||||||
# @pytest.mark.asyncio
|
## await dispatcher.publish('v1',
|
||||||
# async def test_servermodel_updated2():
|
## 'servermodel.updated',
|
||||||
# config_module = dispatcher.get_service('config')
|
## fake_context,
|
||||||
# fake_context = get_fake_context('config')
|
## servermodel_id=1,
|
||||||
# config_module.cache_root_path = 'tests/data'
|
## servermodel_name='name1-1',
|
||||||
# await config_module.on_join(fake_context)
|
## release_id=1,
|
||||||
# # create a new servermodel
|
## servermodel_description='name1-1')
|
||||||
# assert list(config_module.servermodel) == [1, 2]
|
## assert set(config_module.servermodel) == {1, 2}
|
||||||
# mixconfig1 = next(config_module.servermodel[1].config.list())
|
## assert config_module.servermodel[1].information.get('servermodel_name') == 'name1-1'
|
||||||
# mixconfig2 = next(config_module.servermodel[2].config.list())
|
## assert metaconfig1 != config_module.servermodel[1]
|
||||||
# assert len(list(mixconfig1.config.list())) == 1
|
## assert metaconfig2 == config_module.servermodel[2]
|
||||||
# assert len(list(mixconfig2.config.list())) == 0
|
## metaconfig1 = config_module.servermodel[1]
|
||||||
# await dispatcher.publish('v1',
|
## assert mixconfig1 != next(metaconfig1.config.list())
|
||||||
# 'servermodel.created',
|
## mixconfig1 = next(metaconfig1.config.list())
|
||||||
# fake_context,
|
## #
|
||||||
# servermodel_id=3,
|
## assert len(list(await metaconfig1.config.parents())) == 0
|
||||||
# servermodel_name='name3',
|
## assert len(list(await metaconfig2.config.parents())) == 1
|
||||||
# release_id=1,
|
## assert len(list(await mixconfig1.config.list())) == 1
|
||||||
# servermodel_description='name3',
|
## assert len(list(await mixconfig2.config.list())) == 0
|
||||||
# servermodel_parents_id=[1])
|
## await delete_session()
|
||||||
# assert list(config_module.servermodel) == [1, 2, 3]
|
##
|
||||||
# assert len(list(await config_module.servermodel[3].config.parents())) == 1
|
##
|
||||||
# assert await config_module.servermodel[3].information.get('servermodel_name') == 'name3'
|
## @pytest.mark.asyncio
|
||||||
# assert len(list(await mixconfig1.config.list())) == 2
|
## async def test_servermodel_updated2():
|
||||||
# assert len(list(await mixconfig2.config.list())) == 0
|
## config_module = dispatcher.get_service('config')
|
||||||
# #
|
## fake_context = get_fake_context('config')
|
||||||
# await dispatcher.publish('v1',
|
## config_module.cache_root_path = 'tests/data'
|
||||||
# 'servermodel.updated',
|
## await config_module.on_join(fake_context)
|
||||||
# fake_context,
|
## # create a new servermodel
|
||||||
# servermodel_id=3,
|
## assert list(config_module.servermodel) == [1, 2]
|
||||||
# servermodel_name='name3-1',
|
## mixconfig1 = next(config_module.servermodel[1].config.list())
|
||||||
# release_id=1,
|
## mixconfig2 = next(config_module.servermodel[2].config.list())
|
||||||
# servermodel_description='name3-1',
|
## assert len(list(mixconfig1.config.list())) == 1
|
||||||
# servermodel_parents_id=[1, 2])
|
## assert len(list(mixconfig2.config.list())) == 0
|
||||||
# assert list(config_module.servermodel) == [1, 2, 3]
|
## await dispatcher.publish('v1',
|
||||||
# assert config_module.servermodel[3].information.get('servermodel_name') == 'name3-1'
|
## 'servermodel.created',
|
||||||
# assert len(list(mixconfig1.config.list())) == 2
|
## fake_context,
|
||||||
# assert len(list(mixconfig2.config.list())) == 1
|
## servermodel_id=3,
|
||||||
#
|
## servermodel_name='name3',
|
||||||
#
|
## release_id=1,
|
||||||
# @pytest.mark.asyncio
|
## servermodel_description='name3',
|
||||||
# async def test_servermodel_updated_config():
|
## servermodel_parents_id=[1])
|
||||||
# config_module = dispatcher.get_service('config')
|
## assert list(config_module.servermodel) == [1, 2, 3]
|
||||||
# fake_context = get_fake_context('config')
|
## assert len(list(await config_module.servermodel[3].config.parents())) == 1
|
||||||
# config_module.cache_root_path = 'tests/data'
|
## assert await config_module.servermodel[3].information.get('servermodel_name') == 'name3'
|
||||||
# await config_module.on_join(fake_context)
|
## assert len(list(await mixconfig1.config.list())) == 2
|
||||||
# #
|
## assert len(list(await mixconfig2.config.list())) == 0
|
||||||
# config_module.servermodel[1].property.read_write()
|
## #
|
||||||
# assert config_module.servermodel[1].option('creole.general.mode_conteneur_actif').value.get() == 'non'
|
## await dispatcher.publish('v1',
|
||||||
# config_module.servermodel[1].option('creole.general.mode_conteneur_actif').value.set('oui')
|
## 'servermodel.updated',
|
||||||
# assert config_module.servermodel[1].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
## fake_context,
|
||||||
# #
|
## servermodel_id=3,
|
||||||
# await dispatcher.publish('v1',
|
## servermodel_name='name3-1',
|
||||||
# 'servermodel.updated',
|
## release_id=1,
|
||||||
# fake_context,
|
## servermodel_description='name3-1',
|
||||||
# servermodel_id=1,
|
## servermodel_parents_id=[1, 2])
|
||||||
# servermodel_name='name1-1',
|
## assert list(config_module.servermodel) == [1, 2, 3]
|
||||||
# release_id=1,
|
## assert config_module.servermodel[3].information.get('servermodel_name') == 'name3-1'
|
||||||
# servermodel_description='name1-1')
|
## assert len(list(mixconfig1.config.list())) == 2
|
||||||
# assert config_module.servermodel[1].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
## assert len(list(mixconfig2.config.list())) == 1
|
||||||
|
## await delete_session()
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## @pytest.mark.asyncio
|
||||||
|
## async def test_servermodel_updated_config():
|
||||||
|
## config_module = dispatcher.get_service('config')
|
||||||
|
## fake_context = get_fake_context('config')
|
||||||
|
## config_module.cache_root_path = 'tests/data'
|
||||||
|
## await config_module.on_join(fake_context)
|
||||||
|
## #
|
||||||
|
## config_module.servermodel[1].property.read_write()
|
||||||
|
## assert config_module.servermodel[1].option('configuration.general.mode_conteneur_actif').value.get() == 'non'
|
||||||
|
## config_module.servermodel[1].option('configuration.general.mode_conteneur_actif').value.set('oui')
|
||||||
|
## assert config_module.servermodel[1].option('configuration.general.mode_conteneur_actif').value.get() == 'oui'
|
||||||
|
## #
|
||||||
|
## await dispatcher.publish('v1',
|
||||||
|
## 'servermodel.updated',
|
||||||
|
## fake_context,
|
||||||
|
## servermodel_id=1,
|
||||||
|
## servermodel_name='name1-1',
|
||||||
|
## release_id=1,
|
||||||
|
## servermodel_description='name1-1')
|
||||||
|
## assert config_module.servermodel[1].option('configuration.general.mode_conteneur_actif').value.get() == 'oui'
|
||||||
|
## await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
##############################################################################################################################
|
||||||
|
# Server
|
||||||
|
##############################################################################################################################
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_server_created_base():
|
||||||
|
await onjoin()
|
||||||
|
config_module = dispatcher.get_service('config')
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
#
|
||||||
|
assert list(config_module.server) == []
|
||||||
|
await dispatcher.on_join(truncate=True)
|
||||||
|
server_name = 'dns.test.lan'
|
||||||
|
await dispatcher.publish('v1',
|
||||||
|
'infra.server.created',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
server_description='description_created',
|
||||||
|
servermodel_name='base',
|
||||||
|
release_distribution='last',
|
||||||
|
site_name='site_1',
|
||||||
|
zones_name=['zones'],
|
||||||
|
)
|
||||||
|
assert list(config_module.server) == [server_name]
|
||||||
|
assert set(config_module.server[server_name]) == {'server', 'server_to_deploy', 'funcs_file'}
|
||||||
|
assert config_module.server[server_name]['funcs_file'] == '/var/cache/risotto/servermodel/last/base/funcs.py'
|
||||||
|
await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_server_created_own_sm():
|
||||||
|
await onjoin()
|
||||||
|
config_module = dispatcher.get_service('config')
|
||||||
|
fake_context = get_fake_context('config')
|
||||||
|
await create_servermodel()
|
||||||
|
#
|
||||||
|
assert list(config_module.server) == []
|
||||||
|
await dispatcher.on_join(truncate=True)
|
||||||
|
server_name = 'dns.test.lan'
|
||||||
|
await dispatcher.publish('v1',
|
||||||
|
'infra.server.created',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
server_description='description_created',
|
||||||
|
servermodel_name=SERVERMODEL_NAME,
|
||||||
|
source_name=SOURCE_NAME,
|
||||||
|
release_distribution='last',
|
||||||
|
site_name='site_1',
|
||||||
|
zones_name=['zones'],
|
||||||
|
)
|
||||||
|
assert list(config_module.server) == [server_name]
|
||||||
|
assert set(config_module.server[server_name]) == {'server', 'server_to_deploy', 'funcs_file'}
|
||||||
|
assert config_module.server[server_name]['funcs_file'] == '/var/cache/risotto/servermodel/last/sm1/funcs.py'
|
||||||
|
await delete_session()
|
||||||
|
|
||||||
|
|
||||||
|
#@pytest.mark.asyncio
|
||||||
|
#async def test_server_deleted():
|
||||||
|
# config_module = dispatcher.get_service('config')
|
||||||
|
# config_module.cache_root_path = 'tests/data'
|
||||||
|
# await config_module.on_join(fake_context)
|
||||||
|
# #
|
||||||
|
# assert list(config_module.server) == [3]
|
||||||
|
# await dispatcher.publish('v1',
|
||||||
|
# 'server.created',
|
||||||
|
# fake_context,
|
||||||
|
# server_id=4,
|
||||||
|
# server_name='name4',
|
||||||
|
# server_description='description4',
|
||||||
|
# server_servermodel_id=2)
|
||||||
|
# assert list(config_module.server) == [3, 4]
|
||||||
|
# await dispatcher.publish('v1',
|
||||||
|
# 'server.deleted',
|
||||||
|
# fake_context,
|
||||||
|
# server_id=4)
|
||||||
|
# assert list(config_module.server) == [3]
|
||||||
|
# await delete_session()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_server_configuration_get():
|
async def test_server_configuration_get():
|
||||||
|
await onjoin()
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
fake_context = get_fake_context('config')
|
fake_context = get_fake_context('config')
|
||||||
config_module.cache_root_path = 'tests/data'
|
await create_servermodel()
|
||||||
await config_module.on_join(fake_context)
|
await dispatcher.on_join(truncate=True)
|
||||||
|
server_name = 'dns.test.lan'
|
||||||
|
await dispatcher.publish('v1',
|
||||||
|
'infra.server.created',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
server_description='description_created',
|
||||||
|
servermodel_name=SERVERMODEL_NAME,
|
||||||
|
source_name=SOURCE_NAME,
|
||||||
|
release_distribution='last',
|
||||||
|
site_name='site_1',
|
||||||
|
zones_name=['zones'],
|
||||||
|
)
|
||||||
#
|
#
|
||||||
await config_module.server[3]['server_to_deploy'].property.read_write()
|
await config_module.server[server_name]['server'].property.read_write()
|
||||||
assert await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.get() == 'non'
|
assert await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.get() == 1
|
||||||
await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.set('oui')
|
await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.set(2)
|
||||||
assert await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
assert await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.get() == 2
|
||||||
assert await config_module.server[3]['server'].option('creole.general.mode_conteneur_actif').value.get() == 'non'
|
assert await config_module.server[server_name]['server_to_deploy'].option('configuration.general.number_of_interfaces').value.get() == 1
|
||||||
#
|
#
|
||||||
|
configuration = {'server_name': server_name,
|
||||||
|
'deployed': False,
|
||||||
|
'configuration': {'configuration.general.number_of_interfaces': 1,
|
||||||
|
'configuration.general.interfaces_list': [0],
|
||||||
|
'configuration.interface_0.domain_name_eth0': 'dns.test.lan'
|
||||||
|
}
|
||||||
|
}
|
||||||
values = await dispatcher.call('v1',
|
values = await dispatcher.call('v1',
|
||||||
'config.configuration.server.get',
|
'setting.config.configuration.server.get',
|
||||||
fake_context,
|
fake_context,
|
||||||
server_id=3)
|
server_name=server_name,
|
||||||
configuration = {'configuration':
|
deployed=False,
|
||||||
{'creole.general.mode_conteneur_actif': 'non',
|
)
|
||||||
'creole.general.master.master': [],
|
|
||||||
'creole.general.master.slave1': [],
|
|
||||||
'creole.general.master.slave2': [],
|
|
||||||
'containers.container0.files.file0.mkdir': False,
|
|
||||||
'containers.container0.files.file0.name': '/etc/mailname',
|
|
||||||
'containers.container0.files.file0.rm': False,
|
|
||||||
'containers.container0.files.file0.source': 'mailname',
|
|
||||||
'containers.container0.files.file0.activate': True},
|
|
||||||
'server_id': 3,
|
|
||||||
'deployed': True}
|
|
||||||
assert values == configuration
|
assert values == configuration
|
||||||
#
|
#
|
||||||
values = await dispatcher.call('v1',
|
await delete_session()
|
||||||
'config.configuration.server.get',
|
|
||||||
fake_context,
|
|
||||||
server_id=3,
|
|
||||||
deployed=False)
|
|
||||||
configuration['configuration']['creole.general.mode_conteneur_actif'] = 'oui'
|
|
||||||
configuration['deployed'] = False
|
|
||||||
assert values == configuration
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_config_deployed():
|
async def test_server_configuration_deployed():
|
||||||
|
await onjoin()
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
fake_context = get_fake_context('config')
|
fake_context = get_fake_context('config')
|
||||||
config_module.cache_root_path = 'tests/data'
|
await create_servermodel()
|
||||||
await config_module.on_join(fake_context)
|
await dispatcher.on_join(truncate=True)
|
||||||
|
server_name = 'dns.test.lan'
|
||||||
|
await dispatcher.publish('v1',
|
||||||
|
'infra.server.created',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
server_description='description_created',
|
||||||
|
servermodel_name=SERVERMODEL_NAME,
|
||||||
|
source_name=SOURCE_NAME,
|
||||||
|
release_distribution='last',
|
||||||
|
site_name='site_1',
|
||||||
|
zones_name=['zones'],
|
||||||
|
)
|
||||||
#
|
#
|
||||||
await config_module.server[3]['server_to_deploy'].property.read_write()
|
await config_module.server[server_name]['server'].property.read_write()
|
||||||
assert await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.get() == 'non'
|
assert await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.get() == 1
|
||||||
await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.set('oui')
|
await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.set(2)
|
||||||
assert await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
assert await config_module.server[server_name]['server'].option('configuration.general.number_of_interfaces').value.get() == 2
|
||||||
assert await config_module.server[3]['server'].option('creole.general.mode_conteneur_actif').value.get() == 'non'
|
assert await config_module.server[server_name]['server_to_deploy'].option('configuration.general.number_of_interfaces').value.get() == 1
|
||||||
values = await dispatcher.publish('v1',
|
#
|
||||||
'config.configuration.server.deploy',
|
configuration = {'server_name': server_name,
|
||||||
fake_context,
|
'deployed': False,
|
||||||
server_id=3)
|
'configuration': {'configuration.general.number_of_interfaces': 1,
|
||||||
assert await config_module.server[3]['server_to_deploy'].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
'configuration.general.interfaces_list': [0],
|
||||||
assert await config_module.server[3]['server'].option('creole.general.mode_conteneur_actif').value.get() == 'oui'
|
'configuration.interface_0.domain_name_eth0': 'dns.test.lan'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
await dispatcher.call('v1',
|
||||||
|
'setting.config.configuration.server.get',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise Exception('should raise propertyerror')
|
||||||
|
|
||||||
|
values = await dispatcher.call('v1',
|
||||||
|
'setting.config.configuration.server.deploy',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
)
|
||||||
|
assert values == {'server_name': 'dns.test.lan', 'deployed': True}
|
||||||
|
await dispatcher.call('v1',
|
||||||
|
'setting.config.configuration.server.get',
|
||||||
|
fake_context,
|
||||||
|
server_name=server_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
#
|
||||||
|
await delete_session()
|
||||||
|
@ -2,7 +2,7 @@ from importlib import import_module
|
|||||||
import pytest
|
import pytest
|
||||||
from .storage import STORAGE
|
from .storage import STORAGE
|
||||||
from risotto.context import Context
|
from risotto.context import Context
|
||||||
from risotto.services import load_services
|
#from risotto.services import load_services
|
||||||
from risotto.dispatcher import dispatcher
|
from risotto.dispatcher import dispatcher
|
||||||
from risotto.services.session.storage import storage_server, storage_servermodel
|
from risotto.services.session.storage import storage_server, storage_servermodel
|
||||||
|
|
||||||
@ -16,9 +16,9 @@ def get_fake_context(module_name):
|
|||||||
|
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
load_services(['config', 'session'],
|
#load_services(['config', 'session'],
|
||||||
validate=False,
|
# validate=False,
|
||||||
test=True)
|
# test=True)
|
||||||
config_module = dispatcher.get_service('config')
|
config_module = dispatcher.get_service('config')
|
||||||
config_module.save_storage = STORAGE
|
config_module.save_storage = STORAGE
|
||||||
dispatcher.set_module('server', import_module(f'.server', 'fake_services'), True)
|
dispatcher.set_module('server', import_module(f'.server', 'fake_services'), True)
|
||||||
|
Reference in New Issue
Block a user