Compare commits

...

170 Commits

Author SHA1 Message Date
wpetit daa6987159 feat: add gitea-package.sh script 2024-05-13 09:16:20 +02:00
wpetit 90587b05a9 feat(standard-make): use details for large test outputs 2024-04-11 14:27:17 +02:00
wpetit 170f143b00 feat: add utils.hasChanges() utility method 2023-11-29 12:20:39 +01:00
wpetit fca4ac23d6 feat(container): use .trivyignore file when validation image with trivy 2023-11-17 14:01:45 +01:00
wpetit 0ac529eac7 feat(standard-make): set MKT_PROJECT_VERSION_BRANCH_NAME value by default 2023-11-16 14:00:32 +01:00
wpetit b98e8ad73b fix(standard-make): fix typo in commit link 2023-11-16 12:36:49 +01:00
wpetit f6d9255c5c feat(container): add floating latest tag when publishing images 2023-11-16 11:27:10 +01:00
wpetit 758f37e8e4 feat(standard-make-pipeline): use host network for test environment 2023-11-15 16:53:20 +01:00
wpetit 2849a3d8d0 feat(container): publish image with multiple tags 2023-11-15 12:54:35 +01:00
wpetit 2c182df6a6 feat(gitea-download): allow anonymous download 2023-10-19 13:14:38 +02:00
wpetit 3bf8e64888 fix(version): return command output 2023-08-24 08:42:27 -06:00
wpetit 7da0dd95b1 feat(utils): use commit timestamp to generate project version tags 2023-08-24 07:51:05 -06:00
wpetit efbdad226c feat(standard-make): add staging and master to default release branches 2023-08-23 15:16:33 -06:00
wpetit 1d956d60b1 doc: fix typo 2023-08-18 11:02:13 -06:00
wpetit cdaff5d8db Merge pull request 'Création d'un pipeline "standard" basé sur les tâches Make' (#6) from standard-make-pipeline into master
Reviewed-on: #6
2023-08-18 18:55:54 +02:00
wpetit 4d0070040a feat(standard-make): create standard make-based pipeline 2023-08-18 10:48:58 -06:00
Philippe Caseiro 120d5d5fb5 fix(kube): using make scan instead of trivy check 2023-06-21 09:34:12 +02:00
Philippe Caseiro 92bec0f127 fix(kube): use make scan for trivy scan 2023-06-19 16:59:05 +02:00
Philippe Caseiro d252924f22 fix(kube): replace special caracters in branch name again 2023-06-19 16:44:24 +02:00
Philippe Caseiro bad27aa934 fix(kube): replace special caracters in branch name 2023-06-19 16:37:59 +02:00
Philippe Caseiro 418891082a fix(kube): using current branch name in tags 2023-06-19 16:33:09 +02:00
Philippe Caseiro bdaef91251 feat(kube): adding new groovy script for kube projects 2023-06-19 16:22:43 +02:00
wpetit 83867e3a98 feat(gitea-download): accept self-signed certificates 2023-05-16 13:06:11 +02:00
wpetit 6ebceae3b9 feat(gitea,release): use multisort to bypass gitea created_at bug 2023-05-10 15:47:48 +02:00
Matthieu Lamalle abe8dd5f1b update sshkey for sonarqube 2023-05-03 15:58:38 +02:00
Laurent Gourvenec 7afabcedde feat(debian packaging): update pipeline 2023-04-27 14:45:28 +02:00
Matthieu Lamalle 1d35892e87 podman correction 2023-04-18 16:09:00 +02:00
wpetit 21c7fb0e61 feat(gitea,download): allow attachments filtering with regex 2023-03-29 14:35:00 +02:00
wpetit 199b0bf5d1 feat(gitea,release): auto cleanup prereleases 2023-03-29 13:05:48 +02:00
wpetit 3f1b8bcda1 feat(gitea,release): add retry flag and remove verbose one 2023-03-06 14:13:34 +01:00
wpetit 6c5c580156 Merge pull request 'Fixe erreur script sous manjaro' (#5) from issue-4 into master
Reviewed-on: #5
2023-03-01 10:12:33 +01:00
Benjamin Gaudé 2fbe2ab0b0 Gestion des chemins de destination via un tableau 2023-03-01 10:10:55 +01:00
Benjamin Gaudé 85cb43b272 Fixe erreur script sous manjaro 2023-02-28 15:04:41 +01:00
wpetit 13dc4cbd13 Utilisation du proxy cache Cadoles 2023-02-15 16:28:57 +01:00
wpetit 50310eee17 gitea: publication d'une notification sur le canal jenkins lors d'une release 2022-12-01 12:38:14 -06:00
wpetit c5fb279a11 pa11y: ajout variable PA11Y_IGNORE 2022-11-21 16:45:38 -06:00
wpetit 2c514cf61b pa11y: extraction des variables par défaut liées au MSE 2022-11-21 12:19:14 -06:00
Rudy Masson 67b6e21093 maj pa11y: ajout des variables d'environnement groovy 2022-11-18 16:17:22 +01:00
Rudy Masson 7ac1644563 maj pa11y: ajout de cookie poour choix de langue 2022-11-18 15:18:23 +01:00
Rudy Masson 11014fd443 alpine 3.16, reporter par défaut 2022-11-17 10:54:17 +01:00
Rudy Masson 4abd2c5fef modification Dockerfile pa11y, update pa11y@6.2.3 2022-11-17 09:14:46 +01:00
Rudy Masson 6d48542bfc test cli native 2022-11-16 13:31:05 +01:00
Rudy Masson 92fea37170 test reporter html 2022-11-16 13:07:23 +01:00
wpetit 1d8b6495b2 feat(cpkg): remove 'b' marker in tag generation 2022-10-26 07:48:36 -05:00
wpetit 9b58ab6971 feat(letsencrypt): RedHat compatible script 2022-10-25 12:37:38 -05:00
wpetit 190b01fa6d feat(gitea): add download() method 2022-10-22 00:49:11 +02:00
wpetit 522deb3c6a feat(gitea): use ipv4 to contact api 2022-10-21 23:29:04 +02:00
wpetit 13c89a3cde Merge branch 'podman' 2022-10-20 23:23:39 +02:00
wpetit 77de05e856 feat: add cleanup-worker script 2022-10-20 21:22:17 +02:00
wpetit d093542dc2 feat(podman): add method to generate podman based docker image package 2022-10-20 16:19:09 +02:00
wpetit 1ddb5691ca fix(cpkg): reuse latest version number 2022-10-17 23:48:47 +02:00
wpetit 77a7c46d3f doc(container): add buildAndPublishImage parameters documentation 2022-10-17 16:35:03 +02:00
wpetit 67f18d806d feat(container): publish on cadoles registry by default 2022-10-17 16:04:23 +02:00
wpetit 14220ae4e3 fix: publish on custom registry 2022-10-12 16:53:23 +02:00
wpetit 8e1b257144 feat(container): add image build validation steps 2022-10-10 17:34:12 +02:00
wpetit e670fb8bf6 feat: add container + nfpm utilities 2022-10-04 15:32:26 +02:00
wpetit 57194c84eb gitea.release(): allow release name customization 2022-09-30 17:27:42 +02:00
wpetit 14bf4665f1 symfonyAppPipeline: use 'symfony php' command ton run tools 2022-09-27 14:45:41 +02:00
wpetit 37445a815e cpkg: handles cpkg tags 2022-09-27 11:02:57 +02:00
wpetit 9c79307ef9 hook: check for null object before introspection 2022-09-21 11:33:58 +02:00
wpetit b28d9f9e69 hook: use workspace as root directory 2022-09-21 11:26:40 +02:00
wpetit a6b8706753 symfonyAppPipeline: fix hook stages 2022-09-20 16:28:03 +02:00
wpetit ff382b25ba symfonyAppPipeline: add build-symfony-image hook 2022-09-20 16:26:44 +02:00
wpetit 12bed86b97 symfonyAppPipeline: add actionable pre/post hooks 2022-09-20 16:24:05 +02:00
wpetit 3272427766 mse-rgaa: utilisation de credentials pour l'authentification basic auth 2022-09-14 09:14:34 +02:00
wpetit cff97b7e02 cpkg: ajout numéro de build en suffixe du numéro de version 2022-09-12 18:37:32 +02:00
wpetit bc8b1ec7d3 cpkg: force tag 2022-09-12 18:28:09 +02:00
wpetit 1d1a679d62 Passage du payload via un fichier temporaire pour la création de publication Gitea 2022-09-08 17:03:54 +02:00
wpetit c77c020be0 Utilisation de --rawfile pour l'injection du corps de texte de la release 2022-09-08 16:30:27 +02:00
wpetit 356dc6d16c Suppression automatique d'une release gitea pré-existante 2022-09-08 15:27:33 +02:00
wpetit bcf34404a2 Génération automatique de version avec incrément piloté par git 2022-09-08 11:32:50 +02:00
wpetit 45ff14ce10 Ajout d'une méthode gitea.release() 2022-09-05 14:39:45 +02:00
Matthieu Lamalle 97352aad87 correction commande apt 2022-09-02 12:03:15 +02:00
Benjamin Gaudé 502c2c7ed9 retrait remontee junit publishing checks 2022-07-19 08:55:11 +02:00
wpetit 9cf903ce41 feat(debian): possibilité d'interpreter ou non la version attendue comme un patron 2022-06-16 17:20:47 +02:00
wpetit cf66210f4e feat(debian): ajout d'info de debug sur la recherche de version de paquets 2022-06-16 16:52:52 +02:00
wpetit 83688cc56c pipeline(mse-rgaa): correction installation/execution pa11y
Voir CNOUS/mse#664
2022-06-16 12:56:31 +02:00
wpetit 8699ec0a9d pipeline(symfony): fix php-cs-fixer-stage 2022-05-18 17:33:25 +02:00
wpetit ed1fb84ea9 pipeline(symfony): correction exécution php-cs-fixer 2022-05-18 17:25:49 +02:00
wpetit d50a9c6b77 pipeline: add symfony app generic integration pipeline 2022-05-18 16:49:05 +02:00
Benjamin Bohard ce2c30003e Fonction pour l’envoi de paquets sur pulp (domaine par défaut à modifier) 2022-01-25 15:11:54 +01:00
wpetit f2602a8d27 debian: increase packages waiting timeout 2021-12-16 09:24:39 +01:00
wpetit 15adc72606 cpkg: ignore known hosts in git operations 2021-09-14 15:23:16 +02:00
wpetit 62615af5e6 cpkg: allow use of ssh credentials for git 2021-09-09 16:50:53 +02:00
wpetit c5684aafea cpkg: loose ssh host key checking 2021-09-09 15:29:16 +02:00
wpetit 2222c30054 Ajout pipeline SonarQube 2021-06-11 09:50:30 +02:00
wpetit b39c380368 Use curl | bash command to fetch LE install script 2021-06-01 12:35:08 +02:00
wpetit 0dd899a291 Set timeout to 30s for LE certificates downloading 2021-05-31 16:42:05 +02:00
wpetit a7a820ac6f Retry download LE certificates before failing 2021-05-31 16:36:53 +02:00
wpetit ce5192d1d9 Update lighthouse pipeline 2021-02-22 17:06:58 +01:00
wpetit 94abda3f1a Create common script to download LetsEncrypt CA in pipeline environments 2021-02-22 14:46:22 +01:00
wpetit 8f0d37213b Update Lighthouse Docker environment 2021-02-17 11:27:28 +01:00
wpetit d69ee2368a Cleanup Sentry pipeline workspace after run 2020-12-18 12:48:37 +01:00
Philippe Caseiro 8c23bc688e Updating docker image for letsencrypt CA and Alpine 3.12 2020-11-24 10:41:13 +01:00
wpetit 81017f0b3c More gifs 2020-11-13 09:41:33 +01:00
wpetit 18bf648aa7 Update w3af docker recipe 2020-11-06 11:59:15 +01:00
wpetit a021d96ca6 Add 'Sentry release' pipeline 2020-10-23 10:57:25 +02:00
Benjamin Bohard 499aaca632 Revert "Force commit to invalidate tamarin cache"
This reverts commit 61f5eb8d3d.
2020-09-16 17:08:15 +02:00
Benjamin Bohard 61f5eb8d3d Force commit to invalidate tamarin cache 2020-09-16 14:33:44 +02:00
Benjamin Bohard bfbef60b9d Revert "Force commit to invalidate tamarin cache"
This reverts commit 1ef8151a51.
2020-09-16 14:27:40 +02:00
Benjamin Bohard 1ef8151a51 Force commit to invalidate tamarin cache 2020-09-16 14:12:06 +02:00
Benjamin Bohard 279223b6f6 Revert "Force commit to invalidate tamarin cache"
This reverts commit 385a83d9ef.
2020-09-16 12:21:21 +02:00
Benjamin Bohard 385a83d9ef Force commit to invalidate tamarin cache 2020-09-16 11:59:51 +02:00
Benjamin Bohard 7b0159c351 Revert "Force commit to invalidate tamarin cache"
This reverts commit f09f349189.
2020-09-16 11:45:47 +02:00
Benjamin Bohard f09f349189 Force commit to invalidate tamarin cache 2020-09-16 11:33:18 +02:00
Benjamin Bohard b52cac4f42 Revert "Force commit to invalidate tamarin cache"
This reverts commit 7c80e8c6cc.
2020-09-16 10:32:07 +02:00
Benjamin Bohard 7c80e8c6cc Force commit to invalidate tamarin cache 2020-09-16 10:27:18 +02:00
Benjamin Bohard c653c09fbc Revert "Force commit to invalidate tamarin cache"
This reverts commit 14a7bef425.
2020-09-16 10:02:56 +02:00
Benjamin Bohard 14a7bef425 Force commit to invalidate tamarin cache 2020-09-16 09:58:49 +02:00
wpetit cd8525c8b1 Ajout pipeline audit RGAA MSE
Voir CNOUS/mse#664
2020-08-11 14:32:06 +02:00
wpetit 407fe87318 Correction message audit Lighthouse 2020-08-11 14:22:03 +02:00
wpetit 83893e65d4 pa11y: possibilité d'activer/désactiver les notices/warnings 2020-08-11 10:57:36 +02:00
wpetit 11f3ddc835 pa11y: possibilité de spécifier le standard d'audit 2020-08-11 10:22:57 +02:00
wpetit ffe16b7927 Pa11y: Injection des identifiants Basic Auth dans la configuration 2020-08-10 16:56:35 +02:00
wpetit f3e5f7b9ef Librairie d'audit d'accessibilité 2020-08-10 16:26:48 +02:00
wpetit 7f2ef25102 Séparation de pa11y et lighthouse 2020-08-10 15:03:40 +02:00
wpetit b2f0a6c182 Debian packaging: fix environment daily preparation 2020-05-27 12:02:16 +02:00
wpetit 1c8d53788f Fix spacing 2020-05-06 10:22:42 +02:00
wpetit ea7f44f0f1 debian packaging: wait for package availability before sending
Rocket.Chat notification
2020-05-06 10:22:06 +02:00
wpetit c76ca6cd5c Tamarin: fix packages copy 2020-03-30 11:35:08 +02:00
wpetit 073995cb1b tamarin: add prepareEnvironment() method 2020-03-27 14:40:12 +01:00
wpetit 3890170351 cpkg: trim git outputs 2020-03-24 10:52:32 +01:00
wpetit 8a89c4b3ae debian.waitForRepoPackage(): increse default timeout to 20m 2020-03-24 10:50:15 +01:00
wpetit ea2412eeb2 cpkg: set git identity before merge 2020-03-24 10:36:07 +01:00
wpetit 2f0f2c43ab cpkg: add git username/email to allow push 2020-03-24 10:15:56 +01:00
wpetit 22047c685a Fix cpkg skipCi/skipPush options handling 2020-03-24 10:00:48 +01:00
wpetit 7ac2319ae1 Add git.withHTTPCredentials() helper 2020-03-20 17:15:15 +01:00
wpetit 63bb0dcf08 cpkg: fix random failure with GIT_ASKPASS script 2020-03-20 17:02:41 +01:00
wpetit 0bef6cf450 Allow skip with [ci skip] marker in tag message 2020-03-19 17:01:17 +01:00
wpetit 297ead2d7e Add debian repository helpers 2020-03-19 17:00:36 +01:00
wpetit 31aa194399 Add CPKG port to trigger package from Jenkins
See https://forge.cadoles.com/Cadoles/cpkg
2020-03-19 14:38:06 +01:00
wpetit 43d24e4c99 Moaaar gifs 2020-03-18 23:18:27 +01:00
wpetit e6d59aa1bc Ajout Gif livraison de paquets 2020-03-12 09:47:39 +01:00
wpetit 095780e3ce Mise à jour pipeline de packaging 2020-03-04 16:55:46 +01:00
wpetit 8e2b7a214a Ajout librairie LoLOps 2020-03-04 16:51:30 +01:00
wpetit 81b4a52142 Tamarin: use forge.cadoles.com 2020-01-06 15:35:11 +01:00
wpetit d42e44c44c w3af: mode non verbeux par défaut 2019-12-26 14:47:58 +01:00
wpetit 5a1ece29c3 w3af: fix timeout default value 2019-12-26 13:58:42 +01:00
wpetit c6f1a2a2ef w3af: injection de BUILD_USER_ID en cas d'échec du pipeline 2019-12-26 13:50:00 +01:00
wpetit 161fe3092c w3af: fix targetUrl parameter 2019-12-26 13:47:28 +01:00
wpetit d9799626f1 Fix W3AF pipeline docker image build 2019-12-26 13:42:51 +01:00
wpetit 19884e6719 Lighthouse: execution de pa11y pour compléter l'audit d'accessibilité 2019-12-26 13:03:23 +01:00
wpetit 4fe6feb1a1 Ajout d'un pipeline Lighthouse
+ améliorations/corrections sur le pipeline d'audit W3AF
2019-12-24 15:37:22 +01:00
wpetit 5b1abee466 Modification temporaire de l'adresse de la forge 2019-09-30 11:31:45 +02:00
wpetit 84a27ae9f4 WebAudit: Use verbose mode 2019-07-26 10:54:08 +02:00
wpetit 2d26708e3b Add timeout to web audit pipeline 2019-07-24 09:46:00 +02:00
wpetit 02d67901c8 Fix w3af dockerfile 2019-07-22 22:33:52 +02:00
wpetit 50fb92339f Update w3af dependencies 2019-07-22 22:31:21 +02:00
wpetit 7d9bad2d18 fixup! Web security audit base pipeline 2019-04-29 17:41:07 +02:00
wpetit 6fb77f4e51 fixup! Web security audit base pipeline 2019-04-29 16:58:53 +02:00
wpetit 36165719d4 fixup! Web security audit base pipeline 2019-04-29 16:46:17 +02:00
wpetit a61c9485c3 fixup! Web security audit base pipeline 2019-04-15 21:33:26 +02:00
wpetit a58c546d38 fixup! Web security audit base pipeline 2019-04-15 21:32:30 +02:00
wpetit 875a083e23 fixup! Web security audit base pipeline 2019-04-15 21:30:57 +02:00
wpetit 3b8431e501 fixup! Web security audit base pipeline 2019-04-15 21:30:05 +02:00
wpetit 21b7f75124 fixup! Web security audit base pipeline 2019-04-15 21:13:23 +02:00
wpetit b424895f0a fixup! Web security audit base pipeline 2019-04-15 21:04:56 +02:00
wpetit 828d9ce4e8 fixup! Web security audit base pipeline 2019-04-15 21:02:07 +02:00
wpetit 002b1b22a3 fixup! Web security audit base pipeline 2019-04-15 20:56:52 +02:00
wpetit 3d3d43e029 fixup! Web security audit base pipeline 2019-04-15 20:50:44 +02:00
wpetit f1a2ff3b0c fixup! Web security audit base pipeline 2019-04-15 20:50:20 +02:00
wpetit 5b57e78e9e fixup! Web security audit base pipeline 2019-04-15 18:40:32 +02:00
wpetit 1246eb6943 Web security audit base pipeline 2019-04-15 18:35:34 +02:00
wpetit 27e49f4722 Fix git postBuffer configuration 2019-04-15 10:45:48 +02:00
wpetit eef92cec62 Add hooks to debian packages test pipeline 2019-04-10 12:10:08 +02:00
wpetit 97aa9b61dd Add hook utility basic implementation 2019-03-19 16:43:04 +01:00
wpetit ec523ddfb4 Nebula: add VM connection timeout 2019-03-15 12:18:39 +01:00
wpetit 78c8a9e74e Split packages by \n 2019-03-12 16:20:51 +01:00
wpetit 4cb30fa836 Refactorisation de l'API pour les opérations sur OpenNebula 2019-03-01 15:31:23 +01:00
wpetit 3b6e1e0f72 Séparation des jobs d'empaquetage en 2 phases 2019-03-01 15:16:42 +01:00
60 changed files with 7429 additions and 195 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/data

106
Makefile Normal file
View File

@ -0,0 +1,106 @@
DOCKER_ARGS ?=
W3AF_COMMAND ?=
LIGHTHOUSE_COMMAND ?=
LIGHTHOUSE_URL ?=
PA11Y_URL ?=
PA11Y_REPORTER ?=
PA11Y_USERNAME ?=
PA11Y_PASSWORD ?=
PA11Y_STANDARD ?=
PA11Y_COOKIE ?=
image-w3af:
docker build \
--build-arg=HTTP_PROXY=$(HTTP_PROXY) \
--build-arg=HTTPS_PROXY=$(HTTPS_PROXY) \
--build-arg=http_proxy=$(http_proxy) \
--build-arg=https_proxy=$(https_proxy) \
-t jenkins-w3af \
./resources/com/cadoles/w3af
interactive-w3af:
$(MAKE) W3AF_COMMAND="/bin/sh" w3af
audit-w3af:
$(MAKE) W3AF_COMMAND="/usr/local/bin/run-audit" w3af
w3af:
docker run \
-it --rm \
--net host \
-v "$(PWD)/resources/com/cadoles/w3af/audit.w3af.tmpl:/home/w3af/w3af/audit.w3af.tmpl:ro" \
$(DOCKER_ARGS) \
jenkins-w3af:latest \
$(W3AF_COMMAND)
image-lighthouse:
docker build \
--build-arg=HTTP_PROXY=$(HTTP_PROXY) \
--build-arg=HTTPS_PROXY=$(HTTPS_PROXY) \
--build-arg=http_proxy=$(http_proxy) \
--build-arg=https_proxy=$(https_proxy) \
-t jenkins-lighthouse \
./resources/com/cadoles/lighthouse
interactive-lighthouse:
$(MAKE) LIGHTHOUSE_COMMAND="/bin/sh" lighthouse
audit-lighthouse:
$(MAKE) LIGHTHOUSE_COMMAND="/usr/local/bin/run-audit" lighthouse
lighthouse:
mkdir -p "$(PWD)/data/lighthouse/reports"
docker run \
-it --rm \
--net host \
--cap-add=SYS_ADMIN \
-e HTTP_PROXY=$(HTTP_PROXY) \
-e HTTPS_PROXY=$(HTTPS_PROXY) \
-e http_proxy=$(http_proxy) \
-e https_proxy=$(https_proxy) \
-e LIGHTHOUSE_URL='$(LIGHTHOUSE_URL)' \
-u $(shell id -u $(USER)):$(shell id -g $(USER)) \
-v "$(PWD)/data/lighthouse/reports:/home/lighthouse/reports" \
$(DOCKER_ARGS) \
jenkins-lighthouse:latest \
$(LIGHTHOUSE_COMMAND)
pa11y:
mkdir -p "$(PWD)/data/pa11y/reports"
docker run \
-it --rm \
--net host \
--cap-add=SYS_ADMIN \
-e HTTP_PROXY=$(HTTP_PROXY) \
-e HTTPS_PROXY=$(HTTPS_PROXY) \
-e http_proxy=$(http_proxy) \
-e https_proxy=$(https_proxy) \
-e PA11Y_URL='$(PA11Y_URL)' \
-e PA11Y_REPORTER='$(PA11Y_REPORTER)' \
-e PA11Y_USERNAME='$(PA11Y_USERNAME)' \
-e PA11Y_PASSWORD='$(PA11Y_PASSWORD)' \
-e PA11Y_STANDARD='$(PA11Y_STANDARD)' \
-e PA11Y_COOKIE='$(PA11Y_COOKIE)' \
-e PA11Y_IGNORE='$(PA11Y_IGNORE)' \
-u $(shell id -u $(USER)):$(shell id -g $(USER)) \
-v "$(PWD)/data/pa11y/reports:/home/pa11y/reports" \
$(DOCKER_ARGS) \
jenkins-pa11y:latest \
$(PA11Y_COMMAND)
image-pa11y:
docker build \
--build-arg=HTTP_PROXY=$(HTTP_PROXY) \
--build-arg=HTTPS_PROXY=$(HTTPS_PROXY) \
--build-arg=http_proxy=$(http_proxy) \
--build-arg=https_proxy=$(https_proxy) \
-t jenkins-pa11y \
./resources/com/cadoles/pa11y
interactive-pa11y:
$(MAKE) PA11Y_COMMAND="/bin/sh" pa11y
audit-pa11y:
$(MAKE) PA11Y_COMMAND="/usr/local/bin/run-audit" pa11y
.PHONY: image-w3af image-lighthouse

View File

@ -2,29 +2,9 @@
Utilitaires pour la création de pipeline Jenkins dans l'environnement Cadoles.
## Pipelines
## Documentation
- [Pipeline d'empaquetage Debian](./pipelines/debian-packaging.jenkinsfile)
## Librairie
### Méthodes exposées
#### Création de paquets
- [`tamarin.buildPackage()`](./vars/tamarin.groovy#L48)
- [`tamarin.buildPackageWithCPKG()`](./vars/tamarin.groovy#L1)
#### Publication de paquets
- [`vulcain.publish()`](./vars/vulcain.groovy#L1)
#### Pilotage d'OpenNebula
- [`nebula.initWithCredentials()`](./vars/nebula.groovy#L125)
- [`nebula.runInNewVM() { client -> ... }`](./vars/nebula.groovy#L135)
- [`client.findVMTemplate()`](./vars/nebula.groovy#L65)
- [`client.withNewVM()`](./vars/nebula.groovy#L79)
Voir le répertoire [`./doc`](./doc)
## Licence

29
doc/README.md Normal file
View File

@ -0,0 +1,29 @@
# Documentation
## Tutoriels
- [Utilisation du pipeline `standardMakePipeline()`](./tutorials/standard-make-pipeline.md)
## Pipelines
- [Pipeline d'empaquetage Debian](../pipelines/debian-packaging.jenkinsfile)
## Librairie
### Méthodes exposées
#### Création de paquets
- [`tamarin.buildPackage()`](../vars/tamarin.groovy#L48)
- [`tamarin.buildPackageWithCPKG()`](../vars/tamarin.groovy#L1)
#### Publication de paquets
- [`vulcain.publish()`](../vars/vulcain.groovy#L1)
#### Pilotage d'OpenNebula
- [`nebula.initWithCredentials()`](../vars/nebula.groovy#L125)
- [`nebula.runInNewVM() { client -> ... }`](../vars/nebula.groovy#L135)
- [`client.findVMTemplate()`](../vars/nebula.groovy#L65)
- [`client.withNewVM()`](../vars/nebula.groovy#L79)

View File

@ -0,0 +1,123 @@
# Utilisation du pipeline `standardMakePipeline()`
> **Note** Vous travaillez sur un projet Symfony ? Dans ce cas référez vous au tutoriel ["Utiliser le pipeline Symfony](https://forge.cadoles.com/Cadoles/Jenkins/wiki/Utiliser-le-pipeline-%22Symfony%22).
Le pipeline [`standardMakePipeline()`](../../vars/standardMakePipeline.groovy) a pour objectif de permettre d'obtenir simplement et rapidement un pipeline générique pour un projet de développement ou d'intégration en utilisant et respectant quelques conventions de nommage dans ses tâches `Make`.
Globalement, le pipeline exécute les opérations suivantes:
- Il exécute la commande `make build` sur votre projet;
- Il exécute la commande `make test` sur votre projet et si votre branche est une PR, il créait un commentaire sur celle ci avec la sortie de ces tests;
- Si votre branche est une branche de "release" (par défaut les branches `develop`, `testing` et `stable`) il exécute la commande `make release` puis diffuse une notification sur le canal `#cadoles-jenkins`.
Le pipeline ne présume pas des opérations réalisées par ces 3 tâches. Il ne fait que les exécuter en partant du principe que votre projet suit un cycle conventionnel de développement. Mais globalement ces tâches devraient:
- `make build`: Construire votre projet (installer les dépendances, générer les assets, compiler le code source le cas échéant, etc);
- `make test`: Exécuter les tests automatisés associés à votre projet (unitaire, intégration, etc);
- `make release`: Diffuser une nouvelle version de votre projet (construire et déployer des artefacts comme des paquets ou des images de conteneur, exécuter un déploiement Ansible, etc).
> **Note:** La gestion des dépendances des tâches est à la charge du développeur (voir "Comment installer les dépendances NPM avant une tâche ?" dans la FAQ pour un exemple).
## Utilisation
Afin d'utiliser le pipeline, vous devez effectuer les opérations suivantes à l'initialisation de votre projet:
1. Créer votre fichier `Jenkinsfile` à la racine de votre projet
```groovy
@Library("cadoles") _
standardMakePipeline()
```
2. Créer votre fichier `Makefile` à la racine de votre projet
```makefile
test:
echo "Testing my project..."
build:
echo "Building my project..."
release:
echo "Releasing my project..."
```
3. Ajouter les deux fichiers à votre historique Git (`commit`) et pousser sur la branche de développement.
4. Accéder à [Jenkins](https://jenkins.cadol.es/) puis à l'organisation contenant votre projet. Dans la barre de gauche cliquer sur le bouton "Scan Gitea Organization Now"
> **Note:** Globalement un projet doit être partagé avec l'équipe "Bots" sur la forge afin que Jenkins puisse accéder aux sources de votre projet. Dans la majorité des organisations pré-existentes ce partage est déjà configuré.
5. Votre pipeline devrait s'exécuter sur Jenkins !
## Variables d'environnement pré-disponibles
Le pipeline injecte directement dans l'environnement d'exécution une série de variables d'environnement:
|Variable|Description|Valeurs possibles|
|--------|-----------|-----------------|
|`PROJECT_VERSION_TAG`|Tag conventionnel de la version du projet|Voir ["R14. Respecter le schéma d'identification des images publiées"](https://forge.cadoles.com/CadolesKube/KubeRules/wiki/Bonnes-pratiques-de-d%C3%A9veloppement-applicatif-en-vue-d%27un-d%C3%A9ploiement-sur-Kubernetes#r14-respecter-le-sch%C3%A9ma-d-identification-des-images-publi%C3%A9es)|
|`PROJECT_VERSION_SHORT_TAG`|Tag court conventionnel de la version du projet|Voir ["R14. Respecter le schéma d'identification des images publiées"](https://forge.cadoles.com/CadolesKube/KubeRules/wiki/Bonnes-pratiques-de-d%C3%A9veloppement-applicatif-en-vue-d%27un-d%C3%A9ploiement-sur-Kubernetes#r14-respecter-le-sch%C3%A9ma-d-identification-des-images-publi%C3%A9es)|
|`BRANCH_NAME`|Nom de la branche courante|Nom de la branche courante (préfixé par `PR-` le cas échéant)|
|`IS_PR`|Est ce que l'exécution courante s'effectue pour une PR ?|`true` ou `false`|
|`CI`|Est ce que l'exécution courante s'exécute sur le serveur d'intégration continue ?|`true`|
## FAQ
### Comment installer des dépendances supplémentaires dans l'environnement d'exécution ?
Par défaut l'environnement d'exécution du pipeline est un conteneur basé sur une image Ubuntu LTS (22.04 à ce jour). Dans cette image sont installées [des dépendances de base](../../resources/com/cadoles/standard-make/Dockerfile) généralement utilisées par les projets de développement.
Cependant si vous avez besoin d'autres dépendances systèmes il est possible d'étendre le fichier `Dockerfile` par défaut. Pour ce faire, éditer votre fichier `Jenkinsfile`:
```groovy
@Library("cadoles") _
// Exemple: installation du paquet ansible-lint
// dans l'environnement d'exécution
standardMakePipeline([
'dockerfileExtension': '''
RUN apt-get update -y \
&& apt-get install -y ansible-lint
'''
])
```
### Comment injecter des secrets dans l'environnement d'exécution ?
Parfois vous aurez besoin d'utiliser des secrets afin d'accéder soit à des projets privés sur la forge, soit pour publier des paquets ou des images de conteneur. Jenkins intègre [une gestion des secrets](https://jenkins.cadol.es/manage/credentials/) et ceux ci peuvent être récupérés dans votre environnement d'exécution sous diverses formes (variable d'environnement, fichiers, etc).
Pour ce faire, éditer votre fichier `Jenkinsfile`:
```groovy
@Library("cadoles") _
// Exemple: récupération des identifiants du compte
// "jenkins" sur la forge sous la forme des variables
// d'environnement FORGE_USERNAME et FORGE_PASSWORD
standardMakePipeline([
'credentials': [
usernamePassword([
credentialsId: 'forge-jenkins',
usernameVariable: 'FORGE_USERNAME',
passwordVariable: 'FORGE_PASSWORD',
]),
]
])
```
Les différents types d'entrées possible pour le tableau `credentials` sont décris [sur cette page](https://www.jenkins.io/doc/pipeline/steps/credentials-binding/).
### Comment installer les dépendances NPM avant une tâche ?
Pour cela vous pouvez utiliser les mécanismes de gestion des dépendances intégrées à Make. Par exemple:
```makefile
test: node_modules
npm run test
node_modules:
npm ci
```
De cette manière Make exécutera la commande `npm ci` si et seulement si le répertoire `node_modules` n'existe pas déjà.

161
misc/build-package-with-tamarin Executable file
View File

@ -0,0 +1,161 @@
#!/usr/bin/env bash
set -e
# build-package-from-git - Job handler for Marang
# Author: Willam Petit <wpetit@cadoles.com>
#
# Dependencies:
#
# - jq - https://stedolan.github.io/jq/ - apt-get install jq
# - git
#
# Arguments:
#
# - $1: Docker distribution to use for building package - Default: debian:jessie
# - $2: Limit the build for commits in this specified branch - Default: No branch limitation
#---------Config---------
TAMARIN_PATH=$(readlink -f ../../tamarin)
#------------------------
#---------Functions---------
function get_payload_opt {
echo "$MARANG_PAYLOAD" | jq -r "$1"
}
function clean_workspace {
# Clean workspace
echo "Cleaning workspace $workdir..."
rm -rf "$workdir"
}
#---------------------------
# Get handlers arguments
DISTRIB=$1
LIMIT_TO_BRANCH=$2
OUTPUT_DIR=$3
# Create temporary workspace
workdir=$(mktemp -d)
cd "$workdir"
trap clean_workspace EXIT
echo "Using workspace $workdir..."
# Create temporary dist directory
mkdir -p "$workdir/dist"
# Extract project info
project_name=$(get_payload_opt ".repository.name" | tr '[:upper:]' '[:lower:]')
repo_url=$(get_payload_opt ".repository.clone_url")
commit=$(get_payload_opt ".ref")
# Fetch project sources
GIT_SSL_NO_VERIFY=true git clone "$repo_url" "$project_name"
cd "$project_name"
# Limit the build to a specific branch if needed
if [ ! -z "$LIMIT_TO_BRANCH" ]; then
echo "The build processus is limited to $LIMIT_TO_BRANCH..."
git checkout "$LIMIT_TO_BRANCH"
branches_containing_commit=$(git branch --contains $commit)
if [[ ! "${branches_containing_commit[@]}" =~ "$LIMIT_TO_BRANCH" ]]; then
echo "The commit $commit is not part of $LIMIT_TO_BRANCH !"
clean_workspace
exit
fi
fi
# Checkout specified commit
git checkout "$commit"
git submodule init
git submodule update
echo "Building package..."
# Build on package per version
set +e
COMMIT_TAGS=$(git describe --exact-match --abbrev=0) #git tag -l --contains HEAD | grep "^pkg")
set -e
if [[ -z ${COMMIT_TAGS} ]]
then
echo "Nothing to build :"
echo " - No build build tags on last commit"
clean_workspace
exit
fi
for tag in ${COMMIT_TAGS}
do
PACKAGE_ENV=$(echo ${tag} | cut -d '/' -f 2)
PACKAGE_DISTRIB=$(echo ${tag} | cut -d '/' -f 3)
PKGVERSION=$(echo ${tag} | cut -d '/' -f 4)
# Setting package version
[[ -z ${PKGVERSION} ]] && PKGVERSION="NO-VERSION"
cd ${workdir}/${project_name}
tamarinrc="${workdir}/${project_name}/.tamarinrc"
[[ ! -f ${tamarinrc} ]] && touch ${tamarinrc}
set +e
grep -q "^project_version=.*" ${tamarinrc}
if [[ ${?} -eq 0 ]]
then
sed -i -e "s/^project_version=.*/project_version=${PKGVERSION}/" ${workdir}/${project_name}/.tamarinrc
else
echo "project_version=${PKGVERSION}" > ${tamarinrc}
fi
if [[ ${PACKAGE_ENV} =~ ^(staging|stable) ]]
then
grep -q "^no_version_suffix=.*" ${tamarinrc}
if [[ ${?} -eq 0 ]]
then
sed -i -e "s/no_version_suffix=.*/no_version_suffix=yes/" ${tamarinrc}
else
echo "no_version_suffix=yes" >> ${tamarinrc}
fi
fi
set -e
# Build package with Tamarin for specified distrib
echo
echo "Building package with $TAMARIN_PATH/package (${tag})"
echo
"$TAMARIN_PATH/package" "$workdir/$project_name" -o "$workdir/dist" -b "$DISTRIB"
# Copy debian packages to destination directory if arguments is specified
if [ ! -z $OUTPUT_DIR ]; then
DEST_DIR="$OUTPUT_DIR/$LIMIT_TO_BRANCH/$project_name"
mkdir -p "$DEST_DIR"
echo "Copying packages to $DEST_DIR/..."
cp $workdir/dist/*.deb "$DEST_DIR/"
else
echo "No output directory specified."
fi
# Deploy packages automatically based on tags
# Check that the package environment matches the expected ones
if [[ "$PACKAGE_ENV" =~ ^(dev|staging|stable)$ ]]; then
echo
echo "Pushing packages to matching '$PACKAGE_ENV' repository ..."
echo
ssh aptly@vulcain.cadoles.com mkdir -p "/home/aptly/packages/$PACKAGE_ENV/$LIMIT_TO_BRANCH"
scp -r $workdir/dist/*.deb "aptly@vulcain.cadoles.com:/home/aptly/packages/$PACKAGE_ENV/$LIMIT_TO_BRANCH/"
echo "Cleaning builded package"
rm -rf $workdir/dist/*
else
echo "Packaging tag prefix 'pkg' found but the environment token does not match any of 'dev', 'staging' or 'stable'. Ignoring..."
fi
done

26
misc/clean-worker.sh Normal file
View File

@ -0,0 +1,26 @@
#!/bin/sh
cleanup_docker() {
RUNNING_CONTAINERS=$(docker ps -q)
if [ ! -z "$RUNNING_CONTAINERS" ]; then
docker stop $RUNNING_CONTAINERS
fi
docker system prune -f -a --volumes
docker network prune -f
service docker restart
}
cleanup_old_workspaces() {
# Suppression des workspaces dont la dernière date
# de modification est supérieure à 7 jours.
find /workspace -maxdepth 1 -type d -mtime +7 -exec rm -rf {} \;
}
main() {
cleanup_docker
cleanup_old_workspaces
}
main

9
misc/creolelint.html Normal file
View File

@ -0,0 +1,9 @@
<!-- saved from url=(0093)https://paste.cadol.es/zerobin/?470ce84f85952743#7MFADgBkp4AWfELYBOPusM6y9Zq8dXF4e7e7D6uV358= -->
<html><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head><body><pre>root@eolebase-2:~# CreoleLint -t haproxy_00-global.cfg
Template Non valide (valid_parse_tmpl:ERROR:error)
\-- /usr/share/eole/creole/distrib/haproxy_00-global.cfg
('/usr/share/eole/creole/distrib/haproxy_00-global.cfg', ':', Exception('Il manque une option', NotFound(u"cannot find 'haStatPort'",), 'avec le dictionnaire', {'activer_haproxy': u'oui', 'haIpPublique': u'192.168.0.1', 'activer_haproxy_stats': u'non'}))
root@eolebase-2:~#
</pre></body></html>

2897
misc/templates_formatted.xml Normal file
View File

@ -0,0 +1,2897 @@
<?xml version="1.0"?>
<VMTEMPLATE_POOL>
<VMTEMPLATE>
<ID>0</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>Install-eole-2.6.0-10G</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1485254775</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<CPU_COST><![CDATA[1]]></CPU_COST>
<DESCRIPTION><![CDATA[Modèle pour installer EOLE 2.6.0 amd64 avec un disque de 10G]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[iso-eole-2.6.0-amd64]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK>
<IMAGE><![CDATA[DD-VIDE-10G]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK_COST><![CDATA[6]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1048576]]></MEMORY>
<MEMORY_COST><![CDATA[2]]></MEMORY_COST>
<NIC>
<NETWORK><![CDATA[CR_openspace]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0,disk1]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..1|0.02]]></CPU>
<MEMORY><![CDATA[M|list||1024,2048,3072,4096|1048576]]></MEMORY>
<VCPU><![CDATA[O|range||1..8|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>1</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>eolebase-2.6.0-amd64-fresh</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1485261232</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.04]]></CPU>
<CPU_COST><![CDATA[0.0001]]></CPU_COST>
<DESCRIPTION><![CDATA[Fresh install d'une EOLEBASE 2.6.0]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[eolebase-2.6.0-amd64-fresh]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK_COST><![CDATA[0.0003]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[524288]]></MEMORY>
<MEMORY_COST><![CDATA[0.0002]]></MEMORY_COST>
<NIC>
<NETWORK><![CDATA[CR_openspace]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..0.5|0.04]]></CPU>
<MEMORY><![CDATA[M|range||512..2048|524288]]></MEMORY>
<VCPU><![CDATA[O|range||1..8|1]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[1]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>4</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>Alpine Linux 3.5.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1488810222</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<CPU_COST><![CDATA[0]]></CPU_COST>
<DESCRIPTION><![CDATA[Alpine linux fresh install image
root/eole]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[FI-Alpine-Virt-3.5.2]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK_COST><![CDATA[0]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[524288]]></MEMORY>
<MEMORY_COST><![CDATA[0]]></MEMORY_COST>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..2|0.02]]></CPU>
<MEMORY><![CDATA[M|range||512..2048|524288]]></MEMORY>
<VCPU><![CDATA[O|range||2..8|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>6</ID>
<UID>3</UID>
<GID>1</GID>
<UNAME>pcaseiro</UNAME>
<GNAME>users</GNAME>
<NAME>test</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1488882614</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.01]]></CPU>
<DESCRIPTION><![CDATA[test]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[FI-Alpine-Virt-3.5.2]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>9</ID>
<UID>5</UID>
<GID>1</GID>
<UNAME>wpetit</UNAME>
<GNAME>users</GNAME>
<NAME>faketools</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1490622404</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DESCRIPTION><![CDATA[Image de base: Alpine 3.5.2
Services: Faketools, SSH, Docker
Identifiants: root / eole
]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[alpine-3.5.2-docker-faketools]]></IMAGE>
<IMAGE_UNAME><![CDATA[wpetit]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[ps2]]></BUS>
<TYPE><![CDATA[mouse]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>10</ID>
<UID>3</UID>
<GID>1</GID>
<UNAME>pcaseiro</UNAME>
<GNAME>users</GNAME>
<NAME>SETH</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1496931671</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.08]]></CPU>
<DISK>
<IMAGE><![CDATA[DD-SETH-2.6.0-FI]]></IMAGE>
<IMAGE_UNAME><![CDATA[pcaseiro]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<NIC>
<NETWORK><![CDATA[CR_openspace]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0,nic0]]></BOOT>
</OS>
<VCPU><![CDATA[4]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>17</ID>
<UID>3</UID>
<GID>1</GID>
<UNAME>pcaseiro</UNAME>
<GNAME>users</GNAME>
<NAME>Alpine Linux 3.5.2-copy</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1499181433</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<CPU_COST><![CDATA[0]]></CPU_COST>
<DESCRIPTION><![CDATA[Alpine Pcaseiro
root/eole]]></DESCRIPTION>
<DISK>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[24]]></IMAGE_ID>
<READONLY><![CDATA[no]]></READONLY>
<SIZE><![CDATA[10240]]></SIZE>
</DISK>
<DISK>
<IMAGE><![CDATA[dd-data-pcaseiro]]></IMAGE>
<IMAGE_UNAME><![CDATA[pcaseiro]]></IMAGE_UNAME>
</DISK>
<DISK_COST><![CDATA[0]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_COST><![CDATA[0]]></MEMORY_COST>
<NIC>
<NETWORK_ID><![CDATA[2]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..2|0.02]]></CPU>
<MEMORY><![CDATA[M|range||512..2048|524288]]></MEMORY>
<VCPU><![CDATA[O|range||2..8|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>20</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>Windows10</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500477980</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DISK>
<IMAGE><![CDATA[DD-Windows-50G]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[mouse]]></TYPE>
</INPUT>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[interne]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<OS>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>21</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>amonecole-2.4.2.1</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500480420</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DISK>
<IMAGE><![CDATA[amonecole-2.4.2.1-fi-50G]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<NIC>
<NETWORK><![CDATA[intranet]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>22</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>Windows10-1703-Pro</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500535603</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DISK>
<IMAGE><![CDATA[Windows10-1703-Pro]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[mouse]]></TYPE>
</INPUT>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[intranet]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<OS>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>23</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>Amonecole-2.5.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500562959</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.5]]></CPU>
<DISK>
<IMAGE><![CDATA[Amonecole-2.5.2-fi]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[4096]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<NIC>
<NETWORK><![CDATA[intranet]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>25</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>Ubuntu 16.04.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500651324</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DESCRIPTION><![CDATA[Fresh install Ubuntu 16.04.2
cadoles/cadoles;21]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[DD-Ubuntu-16.04.2-FI-AMD64]]></IMAGE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..0.6|0.02]]></CPU>
<MEMORY><![CDATA[M|range||256..2048|1024]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>26</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>Amonecole-2.6.1</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500878834</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.5]]></CPU>
<DISK>
<IMAGE><![CDATA[Amonecole-2.6.1-fi]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[4096]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<NIC>
<NETWORK><![CDATA[intranet]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>27</ID>
<UID>10</UID>
<GID>1</GID>
<UNAME>bbohard</UNAME>
<GNAME>users</GNAME>
<NAME>install_pitaya_os</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1500982221</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.1]]></CPU>
<DISK>
<IMAGE><![CDATA[Pitaya_OS]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<DISK>
<IMAGE><![CDATA[pitaya_os-i386]]></IMAGE>
<IMAGE_UNAME><![CDATA[bbohard]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK><![CDATA[intranet]]></NETWORK>
<NETWORK_UNAME><![CDATA[bbohard]]></NETWORK_UNAME>
</NIC>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>33</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>ubuntu-server-16.04-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1501059937</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[ubuntu-server-16.04-cadoles-20170802]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>34</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>alpine-3.6-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1503412403</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Alpine Linux 3.6 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[alpine-3.6-cadoles-20180202]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>35</ID>
<UID>3</UID>
<GID>1</GID>
<UNAME>pcaseiro</UNAME>
<GNAME>users</GNAME>
<NAME>FreeIPA</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1511946345</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<CPU_COST><![CDATA[0]]></CPU_COST>
<DESCRIPTION><![CDATA[Alpine Pcaseiro
root/eole]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[FreeIPA]]></IMAGE>
<IMAGE_UNAME><![CDATA[pcaseiro]]></IMAGE_UNAME>
</DISK>
<DISK_COST><![CDATA[0]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_COST><![CDATA[0]]></MEMORY_COST>
<NIC>
<NETWORK_ID><![CDATA[2]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..2|0.02]]></CPU>
<MEMORY><![CDATA[M|range||512..2048|524288]]></MEMORY>
<VCPU><![CDATA[O|range||2..8|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>36</ID>
<UID>7</UID>
<GID>1</GID>
<UNAME>vfebvre</UNAME>
<GNAME>users</GNAME>
<NAME>Etherpad</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>1</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1512639936</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DESCRIPTION><![CDATA[test mise en place etherpad]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[DS-Etherpad]]></IMAGE>
<IMAGE_UNAME><![CDATA[vfebvre]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>37</ID>
<UID>13</UID>
<GID>1</GID>
<UNAME>afornerot</UNAME>
<GNAME>users</GNAME>
<NAME>Demo</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1513949907</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<DISK>
<IMAGE><![CDATA[DS-Demo]]></IMAGE>
<IMAGE_UNAME><![CDATA[afornerot]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>39</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Etherpad</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1515425634</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[63]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[2]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10000]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/b7d2c121be8f578b07b5847697f72755]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
<SECURITY_GROUPS><![CDATA[0]]></SECURITY_GROUPS>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>41</ID>
<UID>13</UID>
<GID>1</GID>
<UNAME>afornerot</UNAME>
<GNAME>users</GNAME>
<NAME>sso.cadol.es</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1515764929</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.04]]></CPU>
<CPU_COST><![CDATA[0.0001]]></CPU_COST>
<DESCRIPTION><![CDATA[Fresh install d'une EOLEBASE 2.6.0]]></DESCRIPTION>
<DISK>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[65]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[2]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10240]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/bd5c35bf320cd492816212c5d74aaef8]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<DISK_COST><![CDATA[0.0003]]></DISK_COST>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_COST><![CDATA[0.0002]]></MEMORY_COST>
<NIC>
<NETWORK_ID><![CDATA[2]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.01..0.5|0.04]]></CPU>
<MEMORY><![CDATA[M|range||512..2048|524288]]></MEMORY>
<VCPU><![CDATA[O|range||1..8|1]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[1]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>44</ID>
<UID>3</UID>
<GID>1</GID>
<UNAME>pcaseiro</UNAME>
<GNAME>users</GNAME>
<NAME>Desktop Ubuntu</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1517220763</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.04]]></CPU>
<DESCRIPTION><![CDATA[Desktop Ubuntu]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[DD-VIDE-50G]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK>
<IMAGE><![CDATA[ISO Ubuntu Desktop 17.10 amd64]]></IMAGE>
<IMAGE_UNAME><![CDATA[pcaseiro]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[SPICE]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<NIC>
<NETWORK><![CDATA[CR_openspace]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0,disk1]]></BOOT>
</OS>
<VCPU><![CDATA[4]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>45</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Ateliers</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1517221877</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[67]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[2]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10000]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/b7d2c121be8f578b07b5847697f72755]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>47</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Polynum</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519316601</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[UbuntuPolynum]]></IMAGE>
<IMAGE_UNAME><![CDATA[gremond]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>48</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Polynum-copy</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519316656</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[raw]]></DRIVER>
<IMAGE_ID><![CDATA[72]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[1]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[16384]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/56c890c328e63d8fc8f309e41ef7c61e]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>49</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>mse-portal-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519656420</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine mse-portal avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[mse-portal-cadoles-201807232351]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>50</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>mse-mysql-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519727803</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine mse-mysql avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[mse-mysql-cadoles-201807232357]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>51</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>mse-ldap-cadoles </NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519727854</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine mse-ldap avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[mse-ldap-cadoles-201807240004]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>52</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>rhel-polynum</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519808942</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Polynum sur Red Hat Entreprise Linux 7.4 x86_64]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[DD-VIDE-20G]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<DISK>
<IMAGE><![CDATA[RHEL 7.4 x86_64 DVD ISO]]></IMAGE>
<IMAGE_UNAME><![CDATA[gloaec]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/redhat.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk1,disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>60</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>rhel-polynum-copy</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519814383</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Polynum sur Red Hat Entreprise Linux 7.4 x86_64]]></DESCRIPTION>
<DISK>
<ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[93]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[1]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<ORIGINAL_SIZE><![CDATA[20480]]></ORIGINAL_SIZE>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[20480]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/f95f90bbc00217cc3cccb322c41e3ecd]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<DISK>
<ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS>
<CLONE><![CDATA[NO]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[iso-images]]></DATASTORE>
<DATASTORE_ID><![CDATA[102]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[hd]]></DEV_PREFIX>
<DISK_ID><![CDATA[1]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[raw]]></DRIVER>
<IMAGE_ID><![CDATA[94]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[1]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<ORIGINAL_SIZE><![CDATA[3871]]></ORIGINAL_SIZE>
<READONLY><![CDATA[YES]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[3871]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/102/7c1d2e00558c8fa8671654e8f024aec4]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[CDROM]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/redhat.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk1,disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>61</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>eolebase-2.6.2-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519981626</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine eolebase-2.6.2 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[eolebase-2.6.2-cadoles]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>63</ID>
<UID>7</UID>
<GID>1</GID>
<UNAME>vfebvre</UNAME>
<GNAME>users</GNAME>
<NAME>draaf-annuaire-2.6.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>1</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1519983537</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine eolebase-2.6.2 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[annuaire-draaf]]></IMAGE>
<IMAGE_UNAME><![CDATA[vfebvre]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>64</ID>
<UID>7</UID>
<GID>1</GID>
<UNAME>vfebvre</UNAME>
<GNAME>users</GNAME>
<NAME>draaf-lemonLDAP-2.6.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1520347845</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine eolebase-2.6.2 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[lemonLDAP-draaf-2.6.2]]></IMAGE>
<IMAGE_UNAME><![CDATA[vfebvre]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>65</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>alpine-virt-3.7-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1520501483</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Alpine Linux 3.7 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[alpine-virt-3.7-cadoles-201902181003]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>67</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Tuleap</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1522313641</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.3]]></CPU>
<DISK>
<IMAGE><![CDATA[centos-7-mini64]]></IMAGE>
<IMAGE_UNAME><![CDATA[gloaec]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/centos.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>71</ID>
<UID>6</UID>
<GID>1</GID>
<UNAME>gloaec</UNAME>
<GNAME>users</GNAME>
<NAME>Tuleap-copy</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1522314193</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.3]]></CPU>
<DISK>
<ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[154]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[1]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<ORIGINAL_SIZE><![CDATA[10000]]></ORIGINAL_SIZE>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10000]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/4aaf4998e3843b924d9c392157deef11]]></SOURCE>
<TARGET><![CDATA[vda]]></TARGET>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/centos.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>72</ID>
<UID>14</UID>
<GID>1</GID>
<UNAME>gdemedeiros</UNAME>
<GNAME>users</GNAME>
<NAME>Odoo</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1522315655</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu Server 16.04 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/wpetit/cadoles-vm]]></DESCRIPTION>
<DISK>
<ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE_ID><![CDATA[155]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[2]]></IMAGE_STATE>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<ORIGINAL_SIZE><![CDATA[10000]]></ORIGINAL_SIZE>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10000]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/b7d2c121be8f578b07b5847697f72755]]></SOURCE>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||512..4096|2048]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>73</ID>
<UID>8</UID>
<GID>1</GID>
<UNAME>bgaude</UNAME>
<GNAME>users</GNAME>
<NAME>MSE-eole-2.5.2</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1525422156</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DISK>
<IMAGE><![CDATA[DS-MSE-2.5.2]]></IMAGE>
<IMAGE_UNAME><![CDATA[bgaude]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||..1|0.02]]></CPU>
<MEMORY><![CDATA[M|range||1024..1024|1024]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>74</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>ubuntu-16.04-light-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1526482854</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Ubuntu 16.04 "light" avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[ubuntu-16.04-light-cadoles-201902181020]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|512]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>75</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>crous-sympa-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1526569782</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[CROUS Sympa avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[crous-sympa-cadoles-201902181021]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|512]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>76</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>centos-7-mini-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1526634955</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[CentOS 7 "mini" avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[centos-7-mini64-cadoles-201902181021]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|512]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>77</ID>
<UID>7</UID>
<GID>1</GID>
<UNAME>vfebvre</UNAME>
<GNAME>users</GNAME>
<NAME>scribe-2.6.2-FI</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1530608517</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DISK>
<IMAGE><![CDATA[EOLE-Scribe-2.6.2-FI]]></IMAGE>
<IMAGE_UNAME><![CDATA[vfebvre]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>78</ID>
<UID>7</UID>
<GID>1</GID>
<UNAME>vfebvre</UNAME>
<GNAME>users</GNAME>
<NAME>Wallis-scribe-ldap1</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1530621192</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.02]]></CPU>
<DISK>
<IMAGE><![CDATA[DS-Wallis-scribe-ldap1]]></IMAGE>
<IMAGE_UNAME><![CDATA[vfebvre]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[2048]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>83</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>jenkins-master-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1532273024</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine Jenkins (master) avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Compte administrateur Jenkins: admin
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[jenkins-master-201902181003]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|512]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>84</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>jenkins-slave-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1532273111</REGTIME>
<TEMPLATE>
<CONTEXT>
<JENKINS_MASTER_URL><![CDATA[$JENKINS_MASTER_URL]]></JENKINS_MASTER_URL>
<JENKINS_SLAVE_LABELS><![CDATA[$JENKINS_SLAVE_LABELS]]></JENKINS_SLAVE_LABELS>
<JENKINS_SLAVE_PASSWORD><![CDATA[$JENKINS_SLAVE_PASSWORD]]></JENKINS_SLAVE_PASSWORD>
<JENKINS_SLAVE_USERNAME><![CDATA[$JENKINS_SLAVE_USERNAME]]></JENKINS_SLAVE_USERNAME>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine Jenkins (slave) avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[jenkins-slave-201902181003]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[JENKINS_MASTER_URL,JENKINS_SLAVE_USERNAME,JENKINS_SLAVE_PASSWORD,JENKINS_SLAVE_LABELS]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[512]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<JENKINS_MASTER_URL><![CDATA[M|text|URL du "master" Jenkins pour l'enrôlement]]></JENKINS_MASTER_URL>
<JENKINS_SLAVE_LABELS><![CDATA[O|text|Label(s) à associer à l'exécuteur Jenkins (séparés par des espaces)]]></JENKINS_SLAVE_LABELS>
<JENKINS_SLAVE_PASSWORD><![CDATA[M|password|Mot de passe de l'utilisateur Jenkins]]></JENKINS_SLAVE_PASSWORD>
<JENKINS_SLAVE_USERNAME><![CDATA[M|text|Nom de l'utilisateur Jenkins]]></JENKINS_SLAVE_USERNAME>
<MEMORY><![CDATA[M|range||..4096|512]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>85</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>zephir2-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1534845479</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine "Zephir2" avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[zephir2-dev-201811050001]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/linux.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
</USER_INPUTS>
<VCPU><![CDATA[4]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>88</ID>
<UID>19</UID>
<GID>1</GID>
<UNAME>egarette</UNAME>
<GNAME>users</GNAME>
<NAME>VM-Zephir-PVE</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1548861248</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.1]]></CPU>
<DESCRIPTION><![CDATA[Zéphir de test pour le PVE]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[DISK-Zephir-PVE]]></IMAGE>
<IMAGE_UNAME><![CDATA[egarette]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<MEMORY><![CDATA[4096]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[CR_devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[oneadmin]]></NETWORK_UNAME>
</NIC>
<OS>
<BOOT><![CDATA[]]></BOOT>
</OS>
<VCPU><![CDATA[1]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
<VMTEMPLATE>
<ID>91</ID>
<UID>2</UID>
<GID>1</GID>
<UNAME>eoleone</UNAME>
<GNAME>users</GNAME>
<NAME>eolebase-2.7.0-cadoles</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>1</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<REGTIME>1549554116</REGTIME>
<TEMPLATE>
<CONTEXT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[$USER[SSH_PUBLIC_KEY]]]></SSH_PUBLIC_KEY>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DESCRIPTION><![CDATA[Machine eolebase-2.7.0 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<DISK>
<IMAGE><![CDATA[eole-2.7.0-cadoles-201902181021]]></IMAGE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<INPUTS_ORDER><![CDATA[]]></INPUTS_ORDER>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY><![CDATA[1024]]></MEMORY>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<NIC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
<VCPU><![CDATA[2]]></VCPU>
</TEMPLATE>
</VMTEMPLATE>
</VMTEMPLATE_POOL>

3
misc/vm.xml Normal file
View File

@ -0,0 +1,3 @@
<VM><ID>346</ID><UID>20</UID><GID>1</GID><UNAME>****</UNAME><GNAME>users</GNAME><NAME>one-346</NAME><PERMISSIONS><OWNER_U>1</OWNER_U><OWNER_M>1</OWNER_M><OWNER_A>0</OWNER_A><GROUP_U>0</GROUP_U><GROUP_M>0</GROUP_M><GROUP_A>0</GROUP_A><OTHER_U>0</OTHER_U><OTHER_M>0</OTHER_M><OTHER_A>0</OTHER_A></PERMISSIONS><LAST_POLL>0</LAST_POLL><STATE>1</STATE><LCM_STATE>0</LCM_STATE><PREV_STATE>0</PREV_STATE><PREV_LCM_STATE>0</PREV_LCM_STATE><RESCHED>0</RESCHED><STIME>1550570705</STIME><ETIME>0</ETIME><DEPLOY_ID></DEPLOY_ID><MONITORING></MONITORING><TEMPLATE><AUTOMATIC_DS_REQUIREMENTS><![CDATA["CLUSTERS/ID" @> 0]]></AUTOMATIC_DS_REQUIREMENTS><AUTOMATIC_REQUIREMENTS><![CDATA[(CLUSTER_ID = 0) & !(PUBLIC_CLOUD = YES)]]></AUTOMATIC_REQUIREMENTS><CONTEXT><DISK_ID><![CDATA[1]]></DISK_ID><ETH0_CONTEXT_FORCE_IPV4><![CDATA[]]></ETH0_CONTEXT_FORCE_IPV4><ETH0_DNS><![CDATA[192.168.5.253]]></ETH0_DNS><ETH0_GATEWAY><![CDATA[192.168.30.254]]></ETH0_GATEWAY><ETH0_GATEWAY6><![CDATA[]]></ETH0_GATEWAY6><ETH0_IP><![CDATA[192.168.30.143]]></ETH0_IP><ETH0_IP6><![CDATA[]]></ETH0_IP6><ETH0_IP6_PREFIX_LENGTH><![CDATA[]]></ETH0_IP6_PREFIX_LENGTH><ETH0_IP6_ULA><![CDATA[]]></ETH0_IP6_ULA><ETH0_MAC><![CDATA[02:00:c0:a8:1e:8f]]></ETH0_MAC><ETH0_MASK><![CDATA[255.255.255.0]]></ETH0_MASK><ETH0_MTU><![CDATA[]]></ETH0_MTU><ETH0_NETWORK><![CDATA[192.168.30.0]]></ETH0_NETWORK><ETH0_SEARCH_DOMAIN><![CDATA[]]></ETH0_SEARCH_DOMAIN><ETH0_VLAN_ID><![CDATA[30]]></ETH0_VLAN_ID><ETH0_VROUTER_IP><![CDATA[]]></ETH0_VROUTER_IP><ETH0_VROUTER_IP6><![CDATA[]]></ETH0_VROUTER_IP6><ETH0_VROUTER_MANAGEMENT><![CDATA[]]></ETH0_VROUTER_MANAGEMENT><NETWORK><![CDATA[YES]]></NETWORK><SSH_PUBLIC_KEY><![CDATA[]]></SSH_PUBLIC_KEY><TARGET><![CDATA[hda]]></TARGET></CONTEXT><CPU><![CDATA[0.2]]></CPU><DISK><ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS><CLONE><![CDATA[YES]]></CLONE><CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET><CLUSTER_ID><![CDATA[0]]></CLUSTER_ID><DATASTORE><![CDATA[images-disques]]></DATASTORE><DATASTORE_ID><![CDATA[101]]></DATASTORE_ID><DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX><DISK_ID><![CDATA[0]]></DISK_ID><DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE><DRIVER><![CDATA[qcow2]]></DRIVER><IMAGE><![CDATA[eole-2.7.0-cadoles-201902191048]]></IMAGE><IMAGE_ID><![CDATA[2153]]></IMAGE_ID><IMAGE_STATE><![CDATA[2]]></IMAGE_STATE><IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME><LN_TARGET><![CDATA[NONE]]></LN_TARGET><ORDER><![CDATA[1]]></ORDER><ORIGINAL_SIZE><![CDATA[10240]]></ORIGINAL_SIZE><READONLY><![CDATA[NO]]></READONLY><SAVE><![CDATA[NO]]></SAVE><SIZE><![CDATA[10240]]></SIZE><SOURCE><![CDATA[/var/lib/one//datastores/101/2da48b00b19e0d0b7be3db413ae1f19b]]></SOURCE><TARGET><![CDATA[vda]]></TARGET><TM_MAD><![CDATA[shared]]></TM_MAD><TYPE><![CDATA[FILE]]></TYPE></DISK><GRAPHICS><KEYMAP><![CDATA[fr]]></KEYMAP><LISTEN><![CDATA[0.0.0.0]]></LISTEN><TYPE><![CDATA[VNC]]></TYPE></GRAPHICS><INPUT><BUS><![CDATA[usb]]></BUS><TYPE><![CDATA[tablet]]></TYPE></INPUT><MEMORY><![CDATA[1024]]></MEMORY><NIC><AR_ID><![CDATA[0]]></AR_ID><BRIDGE><![CDATA[vswitch]]></BRIDGE><CLUSTER_ID><![CDATA[0]]></CLUSTER_ID><IP><![CDATA[192.168.30.143]]></IP><MAC><![CDATA[02:00:c0:a8:1e:8f]]></MAC><NETWORK><![CDATA[devel]]></NETWORK><NETWORK_ID><![CDATA[5]]></NETWORK_ID><NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME><NIC_ID><![CDATA[0]]></NIC_ID><SECURITY_GROUPS><![CDATA[0]]></SECURITY_GROUPS><TARGET><![CDATA[one-346-0]]></TARGET><VLAN_ID><![CDATA[30]]></VLAN_ID><VN_MAD><![CDATA[ovswitch]]></VN_MAD></NIC><OS><ARCH><![CDATA[x86_64]]></ARCH><BOOT><![CDATA[disk0]]></BOOT></OS><SECURITY_GROUP_RULE><PROTOCOL><![CDATA[ALL]]></PROTOCOL><RULE_TYPE><![CDATA[OUTBOUND]]></RULE_TYPE><SECURITY_GROUP_ID><![CDATA[0]]></SECURITY_GROUP_ID><SECURITY_GROUP_NAME><![CDATA[default]]></SECURITY_GROUP_NAME></SECURITY_GROUP_RULE><SECURITY_GROUP_RULE><PROTOCOL><![CDATA[ALL]]></PROTOCOL><RULE_TYPE><![CDATA[INBOUND]]></RULE_TYPE><SECURITY_GROUP_ID><![CDATA[0]]></SECURITY_GROUP_ID><SECURITY_GROUP_NAME><![CDATA[default]]></SECURITY_GROUP_NAME></SECURITY_GROUP_RULE><VCPU><![CDATA[2]]></VCPU><VMID><![CDATA[346]]></VMID></TEMPLATE><USER_TEMPLATE><DESCRIPTION><![CDATA[Machine eolebase-2.7.0 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION><HYPERVISOR><![CDATA[kvm]]></HYPERVISOR><LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO><MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST><USER_INPUTS><CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU><MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY><VCPU><![CDATA[O|range||2..4|2]]></VCPU></USER_INPUTS></USER_TEMPLATE><HISTORY_RECORDS/></VM>

141
misc/vm_formatted.xml Normal file
View File

@ -0,0 +1,141 @@
<?xml version="1.0"?>
<VM>
<ID>346</ID>
<UID>20</UID>
<GID>1</GID>
<UNAME>****</UNAME>
<GNAME>users</GNAME>
<NAME>one-346</NAME>
<PERMISSIONS>
<OWNER_U>1</OWNER_U>
<OWNER_M>1</OWNER_M>
<OWNER_A>0</OWNER_A>
<GROUP_U>0</GROUP_U>
<GROUP_M>0</GROUP_M>
<GROUP_A>0</GROUP_A>
<OTHER_U>0</OTHER_U>
<OTHER_M>0</OTHER_M>
<OTHER_A>0</OTHER_A>
</PERMISSIONS>
<LAST_POLL>0</LAST_POLL>
<STATE>1</STATE>
<LCM_STATE>0</LCM_STATE>
<PREV_STATE>0</PREV_STATE>
<PREV_LCM_STATE>0</PREV_LCM_STATE>
<RESCHED>0</RESCHED>
<STIME>1550570705</STIME>
<ETIME>0</ETIME>
<DEPLOY_ID/>
<MONITORING/>
<TEMPLATE>
<AUTOMATIC_DS_REQUIREMENTS><![CDATA["CLUSTERS/ID" @> 0]]></AUTOMATIC_DS_REQUIREMENTS>
<AUTOMATIC_REQUIREMENTS><![CDATA[(CLUSTER_ID = 0) & !(PUBLIC_CLOUD = YES)]]></AUTOMATIC_REQUIREMENTS>
<CONTEXT>
<DISK_ID><![CDATA[1]]></DISK_ID>
<ETH0_CONTEXT_FORCE_IPV4><![CDATA[]]></ETH0_CONTEXT_FORCE_IPV4>
<ETH0_DNS><![CDATA[192.168.5.253]]></ETH0_DNS>
<ETH0_GATEWAY><![CDATA[192.168.30.254]]></ETH0_GATEWAY>
<ETH0_GATEWAY6><![CDATA[]]></ETH0_GATEWAY6>
<ETH0_IP><![CDATA[192.168.30.143]]></ETH0_IP>
<ETH0_IP6><![CDATA[]]></ETH0_IP6>
<ETH0_IP6_PREFIX_LENGTH><![CDATA[]]></ETH0_IP6_PREFIX_LENGTH>
<ETH0_IP6_ULA><![CDATA[]]></ETH0_IP6_ULA>
<ETH0_MAC><![CDATA[02:00:c0:a8:1e:8f]]></ETH0_MAC>
<ETH0_MASK><![CDATA[255.255.255.0]]></ETH0_MASK>
<ETH0_MTU><![CDATA[]]></ETH0_MTU>
<ETH0_NETWORK><![CDATA[192.168.30.0]]></ETH0_NETWORK>
<ETH0_SEARCH_DOMAIN><![CDATA[]]></ETH0_SEARCH_DOMAIN>
<ETH0_VLAN_ID><![CDATA[30]]></ETH0_VLAN_ID>
<ETH0_VROUTER_IP><![CDATA[]]></ETH0_VROUTER_IP>
<ETH0_VROUTER_IP6><![CDATA[]]></ETH0_VROUTER_IP6>
<ETH0_VROUTER_MANAGEMENT><![CDATA[]]></ETH0_VROUTER_MANAGEMENT>
<NETWORK><![CDATA[YES]]></NETWORK>
<SSH_PUBLIC_KEY><![CDATA[]]></SSH_PUBLIC_KEY>
<TARGET><![CDATA[hda]]></TARGET>
</CONTEXT>
<CPU><![CDATA[0.2]]></CPU>
<DISK>
<ALLOW_ORPHANS><![CDATA[NO]]></ALLOW_ORPHANS>
<CLONE><![CDATA[YES]]></CLONE>
<CLONE_TARGET><![CDATA[SYSTEM]]></CLONE_TARGET>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<DATASTORE><![CDATA[images-disques]]></DATASTORE>
<DATASTORE_ID><![CDATA[101]]></DATASTORE_ID>
<DEV_PREFIX><![CDATA[vd]]></DEV_PREFIX>
<DISK_ID><![CDATA[0]]></DISK_ID>
<DISK_SNAPSHOT_TOTAL_SIZE><![CDATA[0]]></DISK_SNAPSHOT_TOTAL_SIZE>
<DRIVER><![CDATA[qcow2]]></DRIVER>
<IMAGE><![CDATA[eole-2.7.0-cadoles-201902191048]]></IMAGE>
<IMAGE_ID><![CDATA[2153]]></IMAGE_ID>
<IMAGE_STATE><![CDATA[2]]></IMAGE_STATE>
<IMAGE_UNAME><![CDATA[eoleone]]></IMAGE_UNAME>
<LN_TARGET><![CDATA[NONE]]></LN_TARGET>
<ORDER><![CDATA[1]]></ORDER>
<ORIGINAL_SIZE><![CDATA[10240]]></ORIGINAL_SIZE>
<READONLY><![CDATA[NO]]></READONLY>
<SAVE><![CDATA[NO]]></SAVE>
<SIZE><![CDATA[10240]]></SIZE>
<SOURCE><![CDATA[/var/lib/one//datastores/101/2da48b00b19e0d0b7be3db413ae1f19b]]></SOURCE>
<TARGET><![CDATA[vda]]></TARGET>
<TM_MAD><![CDATA[shared]]></TM_MAD>
<TYPE><![CDATA[FILE]]></TYPE>
</DISK>
<GRAPHICS>
<KEYMAP><![CDATA[fr]]></KEYMAP>
<LISTEN><![CDATA[0.0.0.0]]></LISTEN>
<TYPE><![CDATA[VNC]]></TYPE>
</GRAPHICS>
<INPUT>
<BUS><![CDATA[usb]]></BUS>
<TYPE><![CDATA[tablet]]></TYPE>
</INPUT>
<MEMORY><![CDATA[1024]]></MEMORY>
<NIC>
<AR_ID><![CDATA[0]]></AR_ID>
<BRIDGE><![CDATA[vswitch]]></BRIDGE>
<CLUSTER_ID><![CDATA[0]]></CLUSTER_ID>
<IP><![CDATA[192.168.30.143]]></IP>
<MAC><![CDATA[02:00:c0:a8:1e:8f]]></MAC>
<NETWORK><![CDATA[devel]]></NETWORK>
<NETWORK_ID><![CDATA[5]]></NETWORK_ID>
<NETWORK_UNAME><![CDATA[eoleone]]></NETWORK_UNAME>
<NIC_ID><![CDATA[0]]></NIC_ID>
<SECURITY_GROUPS><![CDATA[0]]></SECURITY_GROUPS>
<TARGET><![CDATA[one-346-0]]></TARGET>
<VLAN_ID><![CDATA[30]]></VLAN_ID>
<VN_MAD><![CDATA[ovswitch]]></VN_MAD>
</NIC>
<OS>
<ARCH><![CDATA[x86_64]]></ARCH>
<BOOT><![CDATA[disk0]]></BOOT>
</OS>
<SECURITY_GROUP_RULE>
<PROTOCOL><![CDATA[ALL]]></PROTOCOL>
<RULE_TYPE><![CDATA[OUTBOUND]]></RULE_TYPE>
<SECURITY_GROUP_ID><![CDATA[0]]></SECURITY_GROUP_ID>
<SECURITY_GROUP_NAME><![CDATA[default]]></SECURITY_GROUP_NAME>
</SECURITY_GROUP_RULE>
<SECURITY_GROUP_RULE>
<PROTOCOL><![CDATA[ALL]]></PROTOCOL>
<RULE_TYPE><![CDATA[INBOUND]]></RULE_TYPE>
<SECURITY_GROUP_ID><![CDATA[0]]></SECURITY_GROUP_ID>
<SECURITY_GROUP_NAME><![CDATA[default]]></SECURITY_GROUP_NAME>
</SECURITY_GROUP_RULE>
<VCPU><![CDATA[2]]></VCPU>
<VMID><![CDATA[346]]></VMID>
</TEMPLATE>
<USER_TEMPLATE>
<DESCRIPTION><![CDATA[Machine eolebase-2.7.0 avec contextualisation OpenNebula
Mot de passe par défaut: cadoles
Généré via https://forge.cadoles.com/Cadoles/cadoles-vm]]></DESCRIPTION>
<HYPERVISOR><![CDATA[kvm]]></HYPERVISOR>
<LOGO><![CDATA[images/logos/ubuntu.png]]></LOGO>
<MEMORY_UNIT_COST><![CDATA[MB]]></MEMORY_UNIT_COST>
<USER_INPUTS>
<CPU><![CDATA[M|range-float||0.1..1|0.2]]></CPU>
<MEMORY><![CDATA[M|range||..4096|1024]]></MEMORY>
<VCPU><![CDATA[O|range||2..4|2]]></VCPU>
</USER_INPUTS>
</USER_TEMPLATE>
<HISTORY_RECORDS/>
</VM>

View File

@ -3,158 +3,192 @@
pipeline {
agent {
label 'common'
label 'docker'
}
environment {
projectDir = "${env.project_name}_${env.BUILD_ID}"
}
triggers {
// Execute pipeline every day at 7h30 to prepare docker images
cron('30 7 * * 1-5')
}
stages {
stage("Clone repository") {
steps {
checkout scm:
[
$class: 'GitSCM',
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'forge-jenkins']],
branches: [[name: env.ref]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
[$class: 'WipeWorkspace' ]
]
],
changelog: false,
poll: false
stage("Prepare build environment") {
when {
anyOf {
triggeredBy cause: "UserIdCause", detail: "wpetit"
triggeredBy 'TimerTrigger'
}
}
}
stage("Ensure packaging branch") {
steps {
script {
dir(env.projectDir) {
sh 'git checkout "${packageBranch}"'
def commitOrRef = env.commit ? env.commit : env.ref
def branchesWithCommitOrRef = sh(script: "git branch --contains '${commitOrRef}'", returnStdout: true).split(' ')
if (branchesWithCommitOrRef.findAll{env.packageBranch.contains(it)}.any{true}) {
currentBuild.result = 'ABORTED'
error("La référence `${env.ref}` ne fait pas partie de la branche `${env.packageBranch}` !")
tamarin.prepareEnvironment()
}
}
}
stage("Package project") {
when {
not {
triggeredBy 'TimerTrigger'
}
}
steps {
script {
stage("Clone repository") {
checkout scm:
[
$class: 'GitSCM',
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-ssh-mse']],
branches: [[name: env.ref]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
[$class: 'WipeWorkspace' ]
]
],
changelog: false,
poll: false
}
stage("Ensure packaging branch") {
dir(env.projectDir) {
sh 'git checkout "${packageBranch}"'
def commitOrRef = env.commit ? env.commit : env.ref
def branchesWithCommitOrRef = sh(script: "git branch --contains '${commitOrRef}'", returnStdout: true).split(' ')
if (branchesWithCommitOrRef.findAll{env.packageBranch.contains(it)}.any{true}) {
currentBuild.result = 'ABORTED'
error("La référence `${env.ref}` ne fait pas partie de la branche `${env.packageBranch}` !")
}
}
}
}
}
}
stage("Checkout ref") {
steps {
dir(env.projectDir) {
sh """
git checkout ${env.ref}
"""
}
}
}
stage("Build package") {
steps {
script {
dir(env.projectDir) {
// On construit les paquets à partir des informations
// de contexte provenant de CPKG et du webhook
def result = tamarin.buildPackageWithCPKG(
env.packageProfile ? env.packageProfile : "debian",
env.packageArch ? env.packageArch : "",
env.packageBranch ? env.packageBranch : "",
env.baseImage ? env.baseImage : ""
)
// On publie chacun des paquets construits
result.each { r ->
vulcain.publish(
r.packages,
r.env,
env.packageBranch
)
}
// On liste l'ensemble des paquets construits
def publishedPackages = result.collect { r ->
return r.packages.collect { p ->
def file = new File(p)
return "- Paquet `${file.getName()}`, Dépôt `${r.env}`, Distribution `${r.distrib}`"
}
}.transpose().collectMany { it }
// On notifie le canal Rocket.Chat de la publication des paquets
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Les paquets suivants ont été publiés pour le projet ${env.project_name}:
${publishedPackages.join('\n')}
[Visualiser le job](${env.RUN_DISPLAY_URL})
@${env.sender_login}
""".stripIndent(),
rawMessage: true
)
if (env.testPackageInstall == 'no') {
println "Test d'intallation des paquets désactivé."
return
}
// Pour chaque construction de paquets...
result.each { r ->
// On essaye de trouver un template de VM compatible
// avec la distribution cible de la construction
def vmTemplate = findMatchingVMTemplate(r.distrib)
if (vmTemplate == null) {
println "Aucun template de VM n'a été trouvé correspondant à la distribution `${r.distrib}`."
return
}
// Pour chaque paquets construits...
r.packages.each { p ->
def packageName = new File(p).getName()
stage("Test package '${packageName}' installation") {
try {
// On démarre une nouvelle VM et on lance l'installation du paquet publié
testPackageInstallation(vmTemplate, r.distrib, r.env, packageName)
} catch(e) {
currentBuild.result = 'UNSTABLE'
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
[Installation du paquet `${packageName}` échouée sur `${vmTemplate}`](${env.RUN_DISPLAY_URL})
@${env.sender_login}
""".stripIndent(),
rawMessage: true
)
error("Installation du paquet `${packageName}` échouée.")
}
stage("Check [ci skip] in tag message") {
dir(env.projectDir) {
sh 'git checkout "${packageBranch}"'
def commitTags = sh(script: 'git describe --exact-match --abbrev=0', returnStdout: true).split(' ')
for (tag in commitTags) {
tag = tag.trim()
def tagMessage = sh(script: "git tag --format='%(subject)' -l '${tag}'", returnStdout: true).trim()
println("Tag '${tag}' message is: '${tagMessage}'")
if (tagMessage.contains('[ci skip]')) {
currentBuild.result = 'ABORTED'
error("Le message du tag '${tag}' contient le marqueur '[ci-skip]' !")
}
}
}
}
stage("Checkout ref") {
dir(env.projectDir) {
sh """
git checkout ${env.ref}
"""
}
}
stage("Build package") {
dir(env.projectDir) {
// On construit les paquets à partir des informations
// de contexte provenant de CPKG et du webhook
def result = tamarin.buildPackageWithCPKG(
env.packageProfile ? env.packageProfile : "debian",
env.packageArch ? env.packageArch : "",
env.packageBranch ? env.packageBranch : "",
env.baseImage ? env.baseImage : ""
)
// On publie chacun des paquets construits
result.each { r ->
vulcain.publish(
r.packages,
r.env,
env.packageBranch
)
}
// On attend que les paquets soient disponibles
// sur Vulcain pour envoyer la notification de diffusion
// si la distribution est spécifiée
waitForPackages(env.ref, result)
// On liste l'ensemble des paquets construits
def publishedPackages = result.collect { r ->
return r.packages.collect { p ->
def file = new File(p)
return "- Paquet `${file.getName()}`, Dépôt `${r.env}`, Distribution `${r.distrib}`"
}
}.transpose().collectMany { it }
// On notifie le canal Rocket.Chat de la publication des paquets
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Les paquets suivants ont été publiés pour le projet ${env.project_name}:
${publishedPackages.join('\n')}
[Visualiser le job](${env.RUN_DISPLAY_URL})
@${env.sender_login}
""".stripIndent(),
rawMessage: true,
attachments: lolops.getRandomDeliveryAttachment()
)
if (env.testPackageInstall != 'yes') {
println "Test d'intallation des paquets désactivé."
return
}
// Pour chaque construction de paquets...
result.each { r ->
// On essaye de trouver un template de VM compatible
// avec la distribution cible de la construction
def vmTemplate = findMatchingVMTemplate(r.distrib)
if (vmTemplate == null) {
println "Aucun template de VM n'a été trouvé correspondant à la distribution `${r.distrib}`."
return
}
// Pour chaque paquets construits...
r.packages.each { p ->
def packageFullName = new File(p).getName()
def packageRepository = r.distrib.split('-')[1] + '-' + r.env
def packageNameParts = packageFullName.split('_')
def packageName = packageNameParts[0]
def packageVersion = packageNameParts[1]
stage("Test package '${packageName}' installation") {
build job: 'Test de paquet Debian', wait: false, parameters: [
[$class: 'StringParameterValue', name: 'packageName', value: packageName],
[$class: 'StringParameterValue', name: 'packageVersion', value: packageVersion],
[$class: 'StringParameterValue', name: 'packageRepository', value: packageRepository],
[$class: 'StringParameterValue', name: 'vmTemplate', value: vmTemplate]
]
}
}
}
}
}
}
}
post {
always {
sh "rm -rf '${env.projectDir}'"
cleanWs()
}
}
}
}
post {
always {
sh "rm -rf '${env.projectDir}'"
}
}
}
// Cette fonction fait un simple "mapping"
@ -168,19 +202,27 @@ def findMatchingVMTemplate(String distrib) {
return vmTemplatesMap.get(distrib, null)
}
// Cette fonction démarre une nouvelle VM, configure les dépôts tiers pour ajouter
// celui de Cadoles correspondant à la cible du paquet et tente d'installer celui ci.
def testPackageInstallation(String vmTemplate, String distrib, String env, String packageName) {
def version = distrib.split('-')[1]
def packageNameParts = packageName.split('_')
nebula.runInNewVM([
vmTemplate: vmTemplate,
script: """
set -xeo pipefail
wget -qO - https://vulcain.cadoles.com/cadoles.gpg | apt-key add -
echo 'deb https://vulcain.cadoles.com ${version}-${env} main' > /etc/apt/sources.list.d/${version}-${env}.list
apt-get update -y
apt-get install -y ${packageNameParts[0]}=${packageNameParts[1]}
"""
])
}
def waitForPackages(String tagRef, buildResults) {
def packageVersion = tagRef.split('/')[3];
def packageDistrib = env.packageBranch.split('/')[2];
buildResults.each { r ->
def distrib = "${packageDistrib}-${r.env}"
r.packages.each { p ->
def file = new File(p)
def fileNameParts = file.getName().take(file.getName().lastIndexOf('.')).split('_')
def packageName = fileNameParts[0]
def packageArch = fileNameParts[2]
debian.waitForRepoPackage(packageName, [
baseURL: 'https://vulcain.cadoles.com',
distrib: distrib,
component: 'main',
type: 'binary',
arch: packageArch,
expectedVersion: packageVersion
])
}
}
}

View File

@ -0,0 +1,132 @@
@Library("cadoles") _
pipeline {
agent {
label 'common'
}
parameters {
string(
name: 'packageName',
description: 'Nom du paquet à installer'
)
string(
name: 'packageVersion',
defaultValue: '',
description: 'Version du paquet à installer'
)
string(
name: 'packageRepository',
description: 'Dépôt de paquets à utiliser sur Vulcain'
)
string(
name: 'vmTemplate',
description: 'Template OpenNebula de la VM à utiliser pour le test d\'installation'
)
}
stages {
stage("Check parameters") {
steps {
script {
if (!params.packageName?.trim()) {
error("Le nom du paquet n'est pas défini !")
}
if (!params.vmTemplate?.trim()) {
error("Le template de VM n'est pas défini !")
}
if (!params.packageRepository?.trim()) {
error("Le dépôt de paquets n'est pas défini !")
}
}
}
}
stage("Test package installation") {
steps {
script {
nebula.runInNewVM([vmTemplate: params.vmTemplate]) { shell ->
// On ajoute le dépôt Vulcain sur la machine et on met
// à jour la liste des paquets
stage("Add Vulcain '${params.packageRepository}' repository") {
shell("""
set -xeo pipefail
wget -qO - https://vulcain.cadoles.com/cadoles.gpg | apt-key add -
echo 'deb https://vulcain.cadoles.com ${params.packageRepository} main' > /etc/apt/sources.list.d/${params.packageRepository}.list
apt-get update -y
""")
}
// On installe le paquet dans la version demandée (si celle ci est fournie)
stage("Install package '${params.packageName}${ params.packageVersion?.trim() ? '=' + params.packageVersion?.trim() : ''}'") {
hook("pre-install")
shell("""
set -xeo pipefail
apt-get install -y ${params.packageName}${ params.packageVersion?.trim() ? '=' + params.packageVersion?.trim() : ''}
""")
hook("post-install")
}
// Si le template de VM est basé sur EOLE, alors on tente
// d'appliquer un CreoleLint sur les dicos/templates qui pourraient être
// distribués par le paquet nouvellement installé
if (params.vmTemplate.contains('eole')) {
stage("Lint Creole templates") {
shell("""
set -xeo pipefail
TEMPLATES=\$(dpkg -L ${params.packageName} | grep /usr/share/eole/creole/distrib/ || true)
if [ -z "\$TEMPLATES" ]; then
echo "Aucun template Creole dans le paquet."
exit 0
fi
for tmpl in \$TEMPLATES; do
CreoleLint -t \$(basename \$tmpl)
done
""")
}
stage("Lint Creole dictionnaries") {
shell("""
set -xeo pipefail
DICOS=\$(dpkg -L ${params.packageName} | grep /usr/share/eole/creole/dicos/ || true)
if [ -z "\$DICOS" ]; then
echo "Aucun dictionnaire Creole dans le paquet."
exit 0
fi
for dico in \$DICOS; do
CreoleLint -d \$(basename \$DICOS)
done
""")
}
}
}
}
}
}
}
post {
failure {
wrap([$class: 'BuildUser']) {
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Le test d'installation du paquet `${params.packageName}` a échoué:
- Version `${params.packageVersion}`
- Dépôt `${params.packageRepository}`
- Template de la VM `${params.vmTemplate}`
[Voir le job](${env.RUN_DISPLAY_URL})
@${env.BUILD_USER_ID ? env.BUILD_USER_ID : 'here'}
""".stripIndent(),
rawMessage: true
)
}
}
}
}

View File

@ -0,0 +1,93 @@
import hudson.tasks.test.AbstractTestResultAction
@Library('cadoles') _
pipeline {
parameters {
text(name: 'URLS', defaultValue: 'https://msedev.crous-toulouse.fr\nhttps://msedev.crous-toulouse.fr/envole/enregistrement\nhttps://msedev.crous-toulouse.fr/envole/page/faq\nhttps://msedev.crous-toulouse.fr/envole/page/?t=liens_utiles\nhttps://msedev.crous-toulouse.fr/envole/page/?t=mentions_legales\nhttps://msedev.crous-toulouse.fr/envole/message/new\nhttps://msedev.crous-toulouse.fr/envole/recuperation/email\nhttps://msedev.crous-toulouse.fr/envole/courriel/raz', description: 'Liste des URLs à tester, une par ligne')
booleanParam(name: 'INCLUDE_WARNINGS', defaultValue: false, description: 'Inclure les avertissements')
booleanParam(name: 'INCLUDE_NOTICES', defaultValue: false, description: 'Inclure les notifications')
text(name: 'COOKIE_VALUE', defaultValue: 'mselang=fr_FR')
}
options {
disableConcurrentBuilds()
}
agent {
node {
label 'docker'
}
}
stages {
stage('Run RGAA audit') {
steps {
script {
def urls = params.URLS.split('\n')
def count = 0
urls.each { u ->
stage("Audit page '${u}'") {
withCredentials([
usernamePassword(
credentialsId: 'msedev-basic-auth',
usernameVariable: 'MSEDEV_USERNAME',
passwordVariable: 'MSEDEV_PASSWORD'
)
]) {
def report = pa11y.audit(u.trim(), [
reporter: 'junit',
username: env.MSEDEV_USERNAME,
password: env.MSEDEV_PASSWORD,
standard: 'WCAG2AA',
includeNotices: params.INCLUDE_NOTICES,
includeWarnings: params.INCLUDE_WARNINGS,
cookie_value: params.COOKIE_VALUE
])
writeFile file:"./report_${count}.xml", text:report
count++
}
}
}
junit testResults: '*.xml', skipPublishingChecks: true
rocketSend(
channel: '#cnous-mse',
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Audit RGAA | ${testStatuses()}
- [Voir les tests](${env.RUN_DISPLAY_URL})
@here
""".stripIndent(),
rawMessage: true,
)
}
}
}
}
post {
always {
cleanWs()
}
}
}
@NonCPS
def testStatuses() {
def testStatus = ''
AbstractTestResultAction testResultAction = currentBuild.rawBuild.getAction(AbstractTestResultAction.class)
if (testResultAction != null) {
def total = testResultAction.totalCount
def failed = testResultAction.failCount
def skipped = testResultAction.skipCount
def passed = total - failed - skipped
testStatus = "Passant(s): ${passed}, Échoué(s): ${failed} ${testResultAction.failureDiffString}, Désactivé(s): ${skipped}"
}
return testStatus
}

View File

@ -0,0 +1,76 @@
pipeline {
agent {
docker {
image "getsentry/sentry-cli"
args "--entrypoint="
}
}
environment {
projectDir = "${env.project_name}_${env.BUILD_ID}"
}
stages {
stage("Clone repository") {
steps {
checkout scm:
[
$class: 'GitSCM',
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
branches: [[name: env.ref]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
[$class: 'WipeWorkspace' ]
]
],
changelog: false,
poll: false
}
}
stage('Create sentry release') {
steps {
dir(env.projectDir) {
withCredentials([
string(credentialsId: 'sentry-url', variable: 'SENTRY_URL'),
string(credentialsId: 'sentry-release-auth-token', variable: 'SENTRY_AUTH_TOKEN')
]) {
sh '''
SENTRY_CMD="sentry-cli --auth-token \"${SENTRY_AUTH_TOKEN}\" --url \"${SENTRY_URL}\""
PROJECT_VERSION=$(sentry-cli releases propose-version)
$SENTRY_CMD \
releases \
--org "${sentry_org}" \
new \
-p "${sentry_project}" ${PROJECT_VERSION}
(
$SENTRY_CMD \
releases \
--org "${sentry_org}" \
set-commits --local \
${PROJECT_VERSION} || exit 0
)
$SENTRY_CMD \
releases \
--org "${sentry_org}" \
finalize \
${PROJECT_VERSION}
'''
}
}
}
}
}
post {
always {
cleanWs()
}
}
}

View File

@ -0,0 +1,42 @@
#!/bin/sh
set -eo pipefail
declare -a DESTDIR_PATHS=(
"/usr/local/share/ca-certificates"
"/etc/ca-certificates/trust-source/anchors"
"/etc/pki/ca-trust/source/anchors"
)
for path in "${DESTDIR_PATHS[@]}"; do
if [ -d "$path" ]; then
DESTDIR=$path
break
fi
done
UPDATE_CERTS_CMD=update-ca-certificates
if [ -z "$(which $UPDATE_CERTS_CMD)" ]; then
UPDATE_CERTS_CMD="update-ca-trust extract"
fi
CERTS="$(cat <<EOF
https://letsencrypt.org/certs/isrgrootx1.pem
https://letsencrypt.org/certs/isrg-root-x2.pem
https://letsencrypt.org/certs/lets-encrypt-r3.pem
https://letsencrypt.org/certs/lets-encrypt-e1.pem
https://letsencrypt.org/certs/lets-encrypt-r4.pem
https://letsencrypt.org/certs/lets-encrypt-e2.pem
EOF
)"
cd "$DESTDIR"
for cert in $CERTS; do
echo "Downloading '$cert'..."
filename=$(basename "$cert")
wget --tries=10 --timeout=30 -O "$filename" "$cert"
openssl x509 -in "$filename" -inform PEM -out "$filename.crt"
done
$UPDATE_CERTS_CMD

View File

@ -0,0 +1,181 @@
#!/bin/bash
set -eo pipefail
GITEA_DOWNLOAD_PROJECT=${GITEA_DOWNLOAD_PROJECT}
GITEA_DOWNLOAD_ORG=${GITEA_DOWNLOAD_ORG}
GITEA_DOWNLOAD_BASE_URL=${GITEA_BASE_URL:-https://forge.cadoles.com}
GITEA_DOWNLOAD_ANONYMOUS=${GITEA_DOWNLOAD_ANONYMOUS:-no}
GITEA_DOWNLOAD_USERNAME=${GITEA_DOWNLOAD_USERNAME}
GITEA_DOWNLOAD_PASSWORD=${GITEA_DOWNLOAD_PASSWORD}
GITEA_DOWNLOAD_RELEASE_NAME=${GITEA_DOWNLOAD_RELEASE_NAME:-latest}
GITEA_DOWNLOAD_TARGET_DIRECTORY=${GITEA_DOWNLOAD_TARGET_DIRECTORY:-gitea-dl}
GITEA_DOWNLOAD_ATTACHMENTS_FILTER="${GITEA_DOWNLOAD_ATTACHMENTS_FILTER:-.*}"
function check_dependencies {
assert_command_available 'curl'
assert_command_available 'jq'
}
function assert_command_available {
local command=$1
local command_path=$(which $command)
if [ -z "$command_path" ]; then
echo "The '$command' command could not be found. Please install it before using this script." 1>&2
exit 1
fi
}
function check_environment {
assert_environment GITEA_DOWNLOAD_PROJECT
assert_environment GITEA_DOWNLOAD_ORG
assert_environment GITEA_DOWNLOAD_BASE_URL
}
function source_env_file {
if [ ! -f '.env' ]; then
return 0
fi
set -o allexport
source .env
set +o allexport
}
function assert_environment {
local name=$1
local value=${!name}
if [ -z "$value" ]; then
echo "The $"$name" environment variable is empty." 1>&2
exit 1
fi
}
function ask_credentials {
if [ "${GITEA_DOWNLOAD_ANONYMOUS}" == "yes" ]; then
return
fi
if [ -z "$GITEA_DOWNLOAD_USERNAME" ]; then
echo -n "Username: "
read GITEA_DOWNLOAD_USERNAME
fi
if [ -z "$GITEA_DOWNLOAD_PASSWORD" ]; then
echo -n "Password: "
stty -echo
read GITEA_DOWNLOAD_PASSWORD
stty echo
echo
fi
}
function retrieve_release_name {
if [ ! -z "$GITEA_DOWNLOAD_RELEASE_NAME" ]; then
return
fi
echo -n "Release name: "
read GITEA_DOWNLOAD_RELEASE_NAME
}
function retrieve_target_directory {
if [ ! -z "$GITEA_DOWNLOAD_TARGET_DIRECTORY" ]; then
return
fi
echo -n "Target directory: "
read GITEA_DOWNLOAD_TARGET_DIRECTORY
}
function json_set {
local data=$1
local key=$2
local value=$3
local use_raw_file=$4
if [ "$use_raw_file" != "true" ]; then
echo $data | jq -cr --argjson v "$value" --arg k "$key" '.[$k] = $v'
else
local tmpfile=$(mktemp)
echo "$value" > "$tmpfile"
echo $data | jq -cr --rawfile v "$tmpfile" --arg k "$key" '.[$k] = $v'
rm -f "$tmpfile"
fi
}
function gitea_api {
local path=$1
local args=${@:2}
if [ "${GITEA_DOWNLOAD_ANONYMOUS}" != 'yes' ]; then
args="-u "$GITEA_DOWNLOAD_USERNAME:$GITEA_DOWNLOAD_PASSWORD" ${args}"
fi
curl -L \
--fail \
--ipv4 \
-k \
${args} \
"$GITEA_DOWNLOAD_BASE_URL/api/v1$path"
}
function gitea_download {
local attachment_id=$1
local output=$2
if [ "${GITEA_DOWNLOAD_ANONYMOUS}" != 'yes' ]; then
GITEA_DOWNLOAD_CURL_ARGS="-u "$GITEA_DOWNLOAD_USERNAME:$GITEA_DOWNLOAD_PASSWORD" ${GITEA_DOWNLOAD_CURL_ARGS}"
fi
curl -L \
--fail \
--ipv4 \
-k \
--output "$output" \
$GITEA_DOWNLOAD_CURL_ARGS \
"$GITEA_DOWNLOAD_BASE_URL/attachments/$attachment_id"
}
function download_release_files {
local releases=$(gitea_api "/repos/${GITEA_DOWNLOAD_ORG}/${GITEA_DOWNLOAD_PROJECT}/releases")
local assets
if [ "$GITEA_DOWNLOAD_RELEASE_NAME" == "latest" ]; then
assets=$(echo $releases | jq -r '. | sort_by(.id) | reverse | .[0].assets')
else
assets=$(echo $releases | jq -r --arg name "$GITEA_DOWNLOAD_RELEASE_NAME" '. | map(select( .name == $name)) | .[0].assets')
fi
if [ "$assets" == "null" ]; then
echo 1>&2 "No release found."
exit 1
fi
mkdir -p "$GITEA_DOWNLOAD_TARGET_DIRECTORY"
local attachment_uuids=$(echo $assets | jq -r '.[].uuid')
for uuid in $attachment_uuids; do
local filename=$(echo $assets | jq -r --arg uuid "$uuid" '. | map(select( .uuid == $uuid)) | .[0].name')
if [[ "$filename" =~ $GITEA_DOWNLOAD_ATTACHMENTS_FILTER ]]; then
echo "Downloading attachment '$filename'"
gitea_download "$uuid" "$GITEA_DOWNLOAD_TARGET_DIRECTORY/$filename"
else
echo "Ignoring attachment '$filename'"
fi
done
}
function main {
check_dependencies
source_env_file
check_environment
ask_credentials
retrieve_release_name
retrieve_target_directory
download_release_files
}
main

View File

@ -0,0 +1,161 @@
#!/bin/bash
set -eo pipefail
GITEA_PACKAGE_ORG=${GITEA_PACKAGE_ORG}
GITEA_PACKAGE_BASE_URL=${GITEA_BASE_URL:-https://forge.cadoles.com}
GITEA_PACKAGE_USERNAME=${GITEA_PACKAGE_USERNAME}
GITEA_PACKAGE_PASSWORD=${GITEA_PACKAGE_PASSWORD}
GITEA_PACKAGE_FILE=${GITEA_PACKAGE_FILE}
GITEA_PACKAGE_CURL_MAX_RETRY=${GITEA_PACKAGE_CURL_MAX_RETRY:-3}
GITEA_PACKAGE_FORCE_OVERWRITE=${GITEA_PACKAGE_FORCE_UPLOAD:-yes}
GITEA_PACKAGE_DEBIAN_DISTRIBUTION=${GITEA_PACKAGE_DEBIAN_DISTRIBUTION:-latest}
GITEA_PACKAGE_DEBIAN_COMPONENT=${GITEA_PACKAGE_DEBIAN_COMPONENT:-main}
GITEA_PACKAGE_ALPINE_BRANCH=${GITEA_PACKAGE_ALPINE_BRANCH:-latest}
GITEA_PACKAGE_ALPINE_REPOSITORY=${GITEA_PACKAGE_ALPINE_REPOSITORY:-main}
function check_dependencies {
assert_command_available 'curl'
}
function assert_command_available {
local command=$1
local command_path=$(which $command)
if [ -z "$command_path" ]; then
echo "The '$command' command could not be found. Please install it before using this script." 1>&2
exit 1
fi
}
function check_environment {
assert_environment GITEA_PACKAGE_ORG
assert_environment GITEA_PACKAGE_BASE_URL
}
function source_env_file {
if [ ! -f '.env' ]; then
return 0
fi
set -o allexport
source .env
set +o allexport
}
function assert_environment {
local name=$1
local value=${!name}
if [ -z "$value" ]; then
echo "The $"$name" environment variable is empty." 1>&2
exit 1
fi
}
function ask_credentials {
if [ -z "$GITEA_PACKAGE_USERNAME" ]; then
echo -n "Username: "
read GITEA_PACKAGE_USERNAME
fi
if [ -z "$GITEA_PACKAGE_PASSWORD" ]; then
echo -n "Password: "
stty -echo
read GITEA_PACKAGE_PASSWORD
stty echo
echo
fi
}
function ask_package_type {
local available_types="debian alpine redhat"
local match=$( ( echo "$available_types" | grep -qw "$GITEA_PACKAGE_TYPE" ) && echo yes || echo no )
while [ "$match" == "no" ] || [ -z $GITEA_PACKAGE_TYPE ]; do
echo -n "Package type ($available_types): "
read GITEA_PACKAGE_TYPE
match=$( ( echo "$available_types" | grep -qw "$GITEA_PACKAGE_TYPE" ) && echo yes || echo no )
done
}
function ask_package_file {
while [ ! -f "$GITEA_PACKAGE_FILE" ]; do
echo -n "Package file (must be a valid path to a supported package file): "
read GITEA_PACKAGE_FILE
done
if [ -z $GITEA_PACKAGE_TYPE ]; then
local filename=$(basename -- "$GITEA_PACKAGE_FILE")
local extension="${filename##*.}"
case $extension in
deb)
GITEA_PACKAGE_TYPE=debian
;;
apk)
GITEA_PACKAGE_TYPE=alpine
;;
rpm)
GITEA_PACKAGE_TYPE=redhat
;;
esac
fi
}
function upload_debian_package {
gitea_api "/api/packages/$GITEA_PACKAGE_ORG/debian/pool/$GITEA_PACKAGE_DEBIAN_DISTRIBUTION/$GITEA_PACKAGE_DEBIAN_COMPONENT/upload" \
--upload-file "$GITEA_PACKAGE_FILE"
}
function upload_alpine_package {
gitea_api "/api/packages/$GITEA_PACKAGE_ORG/alpine/$GITEA_PACKAGE_ALPINE_BRANCH/$GITEA_PACKAGE_ALPINE_REPOSITORY" \
--upload-file "$GITEA_PACKAGE_FILE"
}
function upload_redhat_package {
gitea_api "/api/packages/$GITEA_PACKAGE_ORG/rpm/upload" \
--upload-file "$GITEA_PACKAGE_FILE"
}
function gitea_api {
local path=$1
local args=${@:2}
curl -L \
--fail \
--ipv4 \
--progress-bar \
--retry "$GITEA_PACKAGE_CURL_MAX_RETRY" \
-u "$GITEA_PACKAGE_USERNAME:$GITEA_PACKAGE_PASSWORD" \
$GITEA_PACKAGE_CURL_ARGS \
${args} \
"$GITEA_PACKAGE_BASE_URL$path"
}
function main {
check_dependencies
source_env_file
check_environment
ask_credentials
ask_package_file
ask_package_type
case $GITEA_PACKAGE_TYPE in
debian)
upload_debian_package
;;
alpine)
upload_alpine_package
;;
redhat)
upload_redhat_package
;;
*)
echo "Package type '$GITEA_PACKAGE_TYPE' is not yet supported" 1>&2
exit 1
;;
esac
}
main

View File

@ -0,0 +1,202 @@
#!/bin/bash
set -eo pipefail
GITEA_RELEASE_PROJECT=${GITEA_RELEASE_PROJECT}
GITEA_RELEASE_ORG=${GITEA_RELEASE_ORG}
GITEA_RELEASE_BASE_URL=${GITEA_BASE_URL:-https://forge.cadoles.com}
GITEA_RELEASE_USERNAME=${GITEA_RELEASE_USERNAME}
GITEA_RELEASE_PASSWORD=${GITEA_RELEASE_PASSWORD}
GITEA_RELEASE_NAME=${GITEA_RELEASE_NAME}
GITEA_RELEASE_VERSION=${GITEA_RELEASE_VERSION}
GITEA_RELEASE_COMMITISH_TARGET=${GITEA_RELEASE_COMMITISH_TARGET}
GITEA_RELEASE_IS_DRAFT=${GITEA_RELEASE_IS_DRAFT:-false}
GITEA_RELEASE_IS_PRERELEASE=${GITEA_RELEASE_IS_PRERELEASE:-true}
GITEA_RELEASE_BODY=${GITEA_RELEASE_BODY}
GITEA_RELEASE_ATTACHMENTS=${GITEA_RELEASE_ATTACHMENTS}
GITEA_RELEASE_CURL_MAX_RETRY=${GITEA_RELEASE_CURL_MAX_RETRY:-3}
GITEA_RELEASE_CLEANUP_PRERELEASES=${GITEA_RELEASE_CLEANUP_PRERELEASES:-true}
GITEA_RELEASE_CLEANUP_KEPT_PRERELEASES=${GITEA_RELEASE_CLEANUP_KEPT_PRERELEASES:-3}
function check_dependencies {
assert_command_available 'curl'
assert_command_available 'jq'
}
function assert_command_available {
local command=$1
local command_path=$(which $command)
if [ -z "$command_path" ]; then
echo "The '$command' command could not be found. Please install it before using this script." 1>&2
exit 1
fi
}
function check_environment {
assert_environment GITEA_RELEASE_PROJECT
assert_environment GITEA_RELEASE_ORG
assert_environment GITEA_RELEASE_BASE_URL
}
function source_env_file {
if [ ! -f '.env' ]; then
return 0
fi
set -o allexport
source .env
set +o allexport
}
function assert_environment {
local name=$1
local value=${!name}
if [ -z "$value" ]; then
echo "The $"$name" environment variable is empty." 1>&2
exit 1
fi
}
function ask_credentials {
if [ -z "$GITEA_RELEASE_USERNAME" ]; then
echo -n "Username: "
read GITEA_RELEASE_USERNAME
fi
if [ -z "$GITEA_RELEASE_PASSWORD" ]; then
echo -n "Password: "
stty -echo
read GITEA_RELEASE_PASSWORD
stty echo
echo
fi
}
function retrieve_version {
if [ ! -z "$GITEA_RELEASE_VERSION" ]; then
return
fi
set +e
GITEA_RELEASE_VERSION=$(git describe --abbrev=0 --tags 2>/dev/null)
GITEA_RELEASE_VERSION=${GITEA_RELEASE_VERSION}
set -e
}
function retrieve_commitish_target {
if [ ! -z "$GITEA_RELEASE_COMMITISH_TARGET" ]; then
return
fi
GITEA_RELEASE_COMMITISH_TARGET=$(git log -n 1 --pretty="format:%h")
}
function create_release {
local payload={}
payload=$(json_set "$payload" body "$GITEA_RELEASE_BODY" true)
payload=$(json_set "$payload" draft $GITEA_RELEASE_IS_DRAFT)
payload=$(json_set "$payload" name "\"${GITEA_RELEASE_NAME:-$GITEA_RELEASE_VERSION}\"")
payload=$(json_set "$payload" prerelease $GITEA_RELEASE_IS_PRERELEASE)
payload=$(json_set "$payload" tag_name "\"${GITEA_RELEASE_VERSION:-$GITEA_RELEASE_COMMITISH_TARGET}\"")
payload=$(json_set "$payload" target_commitish "\"$GITEA_RELEASE_COMMITISH_TARGET\"")
local existing_release=$(gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases" -XGET | jq -e ".[] | select(.tag_name == \"${GITEA_RELEASE_VERSION}\") | .id")
if [ ! -z "${existing_release}" ]; then
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases/${existing_release}" -XDELETE
fi
local tmpfile=$(mktemp)
echo "$payload" > "$tmpfile"
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases" \
-H "Content-Type:application/json" \
-d "@$tmpfile"
rm -f "$tmpfile"
}
function json_set {
local data=$1
local key=$2
local value=$3
local use_raw_file=$4
if [ "$use_raw_file" != "true" ]; then
echo $data | jq -cr --argjson v "$value" --arg k "$key" '.[$k] = $v'
else
local tmpfile=$(mktemp)
echo "$value" > "$tmpfile"
echo $data | jq -cr --rawfile v "$tmpfile" --arg k "$key" '.[$k] = $v'
rm -f "$tmpfile"
fi
}
function upload_release_attachments {
local release_id="$1"
if [ -z "$GITEA_RELEASE_ATTACHMENTS" ]; then
set +e
GITEA_RELEASE_ATTACHMENTS="$(ls release/*.{tar.gz,zip} 2>/dev/null)"
set -e
fi
for file in $GITEA_RELEASE_ATTACHMENTS; do
local filename=$(basename "$file")
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases/$release_id/assets?name=$filename" \
-H "Content-Type:multipart/form-data" \
-F "attachment=@$file"
done
}
function gitea_api {
local path=$1
local args=${@:2}
curl -L \
--fail \
--ipv4 \
--progress-bar \
--retry "$GITEA_RELEASE_CURL_MAX_RETRY" \
-u "$GITEA_RELEASE_USERNAME:$GITEA_RELEASE_PASSWORD" \
$GITEA_RELEASE_CURL_ARGS \
${args} \
"$GITEA_RELEASE_BASE_URL/api/v1$path"
}
function clean_prereleases {
if [ "$GITEA_RELEASE_CLEANUP_PRERELEASES" != "true" ]; then
return
fi
local releases=$(gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases")
local to_delete=$(echo "$releases" | jq -r --arg index "$GITEA_RELEASE_CLEANUP_KEPT_PRERELEASES" '[.[] | select(.prerelease == true)] | sort_by(.created_at, .id) | reverse | .[$index | tonumber:] | .[].id')
echo $to_delete
for release_id in $to_delete; do
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases/$release_id" \
-X DELETE \
-H "Content-Type:application/json"
done
}
function main {
check_dependencies
source_env_file
check_environment
ask_credentials
retrieve_commitish_target
retrieve_version
local release=$(create_release)
local release_id=$(echo "$release" | jq -r .id)
sleep 1 # Wait for release creation
upload_release_attachments "$release_id"
clean_prereleases
}
main

View File

@ -0,0 +1,54 @@
FROM reg.cadoles.com/proxy_cache/library/golang:1.15 as envtpl
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
RUN apt-get update -y && apt-get install -y git
RUN git clone https://github.com/subfuzion/envtpl /src \
&& cd /src \
&& git checkout v1.0.0 \
&& CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
FROM reg.cadoles.com/proxy_cache/library/alpine:3.13
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
COPY --from=envtpl /src/bin/envtpl /usr/local/bin/envtpl
RUN apk add --no-cache \
nss \
freetype \
freetype-dev \
harfbuzz \
ca-certificates \
ttf-freefont \
nodejs \
npm \
chromium
RUN npm install -g lighthouse
RUN adduser -D lighthouse
COPY run-audit.sh /usr/local/bin/run-audit
RUN chmod +x /usr/local/bin/run-audit
COPY config.js.tmpl /home/lighthouse/config.js.tmpl
WORKDIR /home/lighthouse
RUN mkdir /home/lighthouse/reports
RUN chown -R lighthouse: /home/lighthouse
USER lighthouse
CMD /usr/local/bin/run-audit

View File

@ -0,0 +1,4 @@
module.exports = {
extends: 'lighthouse:default',
settings: {},
};

View File

@ -0,0 +1,18 @@
#!/bin/sh
set -x
envtpl -o config.js /home/lighthouse/config.js.tmpl
mkdir -p reports
rm -f reports/*
cd reports
lighthouse \
"$LIGHTHOUSE_URL" \
--no-enable-error-reporting \
--chrome-flags="--headless --disable-dev-shm-usage --no-sandbox --disable-gpu" \
--config=../config.js \
--output json --output html \
--output-path=lighthouse

View File

@ -0,0 +1,60 @@
FROM reg.cadoles.com/proxy_cache/library/golang:1.13 as envtpl
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
RUN apt-get update -y && apt-get install -y git
RUN git clone https://github.com/subfuzion/envtpl /src \
&& cd /src \
&& git checkout v1.0.0 \
&& CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
FROM alpine:3.16
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
COPY --from=envtpl /src/bin/envtpl /usr/local/bin/envtpl
RUN apk add --no-cache \
nss \
freetype \
freetype-dev \
harfbuzz \
ca-certificates \
ttf-freefont \
nodejs \
npm \
chromium \
bash \
curl \
openssl \
git
RUN curl -k https://forge.cadoles.com/Cadoles/Jenkins/raw/branch/master/resources/com/cadoles/common/add-letsencrypt-ca.sh | bash
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true npm install -g pa11y git+https://forge.cadoles.com/rmasson/junit-reporter-fork.git
RUN adduser -D pa11y
COPY run-audit.sh /usr/local/bin/run-audit
RUN chmod +x /usr/local/bin/run-audit
COPY patty.json.tmpl /home/pa11y/patty.json.tmpl
WORKDIR /home/pa11y
RUN mkdir /home/pa11y/reports
RUN chown -R pa11y: /home/pa11y
USER pa11y
CMD /usr/local/bin/run-audit

View File

@ -0,0 +1,15 @@
{
"chromeLaunchConfig": {
"args": ["--disable-dev-shm-usage", "--no-sandbox", "--disable-gpu"],
"ignoreHTTPSErrors": true
},
"headers": {
{{if not (empty .PA11Y_USERNAME)}}
{{ $credentials := print .PA11Y_USERNAME ":" .PA11Y_PASSWORD }}
"Authorization": "Basic {{b64enc $credentials}}" {{if not (empty .PA11Y_COOKIE)}},{{end}}
{{end}}
{{if not (empty .PA11Y_COOKIE)}}
"Cookie": "{{print .PA11Y_COOKIE}}"
{{end}}
}
}

View File

@ -0,0 +1,31 @@
#!/bin/bash
envtpl -o patty.json /home/pa11y/patty.json.tmpl
mkdir -p reports
rm -f reports/*
cd reports
export PUPPETEER_EXECUTABLE_PATH=$(which chromium-browser)
export PA11Y_REPORTER="${PA11Y_REPORTER:-html}"
export PA11Y_STANDARD="${PA11Y_STANDARD:-WCAG2AA}"
export PA11Y_IGNORE="${PA11Y_IGNORE}"
PA11Y_ARGS=""
if [ "${PA11Y_INCLUDE_WARNINGS}" == 'true' ]; then
PA11Y_ARGS="${PA11Y_ARGS} --include-warnings"
fi
if [ "${PA11Y_INCLUDE_NOTICES}" == 'true' ]; then
PA11Y_ARGS="${PA11Y_ARGS} --include-notices"
fi
pa11y \
--config ../patty.json \
${PA11Y_ARGS} \
--reporter "${PA11Y_REPORTER}" \
--standard "${PA11Y_STANDARD}" \
--ignore "${PA11Y_IGNORE}" \
"$PA11Y_URL" || exit 0

View File

@ -0,0 +1,25 @@
{{ $serviceName := index ( .Env.IMAGE_NAME | strings.Split "/" | coll.Reverse ) 0 }}
name: "cadoles-pod-{{ $serviceName }}"
arch: amd64
platform: linux
version: "{{ strings.TrimPrefix "v" ( getenv "IMAGE_TAG" "latest" ) }}"
version_schema: none
version_metadata: git
section: "{{ getenv "PACKAGE_SECTION" "default" }}"
priority: "{{ getenv "PACKAGE_PRIORITY" "optional" }}"
maintainer: "{{ getenv "PACKAGE_MAINTAINER" "contact@cadoles.com" }}"
description: "{{ getenv "PACKAGE_DESCRIPTION" "" }}"
homepage: "{{ getenv "PACKAGE_HOMEPAGE" "https://forge.cadoles.com" }}"
license: "{{ getenv "PACKAGE_LICENCE" "GPL-3.0" }}"
depends:
- podman
scripts:
postinstall: post-install.sh
contents:
- packager: deb
src: pod.service
dst: "/usr/lib/systemd/system/cadoles-pod-{{ $serviceName }}.service"
- packager: deb
src: pod.conf
dst: /etc/cadoles-pod-{{ $serviceName }}.conf
type: config|noreplace

View File

@ -0,0 +1 @@
PODMAN_ARGS="{{ getenv "PODMAN_ARGS" "" }}"

View File

@ -0,0 +1,24 @@
[Unit]
Description={{ .Env.IMAGE_NAME }} pod service
Wants=network-online.target
After=network-online.target
RequiresMountsFor=/run/containers/storage
[Service]
Type=simple
Environment=PODMAN_SYSTEMD_UNIT=%n
EnvironmentFile=-/etc/cadoles-pod-{{ .Env.IMAGE_NAME }}.conf
Environment=IMAGE_NAME={{ .Env.IMAGE_NAME }} IMAGE_TAG={{ .Env.IMAGE_TAG }}
PassEnvironment=PODMAN_ARGS IMAGE_NAME IMAGE_TAG
Restart=on-failure
TimeoutStopSec=70
{{ if getenv "SYSTEMD_EXEC_STARTPRE" "" }}
ExecStartPre={{ .Env.SYSTEMD_EXEC_STARTPRE }}
{{ end }}
ExecStart=/bin/sh -c "podman run ${PODMAN_ARGS} '${IMAGE_NAME}:${IMAGE_TAG}'"
{{ if getenv "SYSTEMD_EXEC_STARTPOST" "" }}
ExecStartPost={{ .Env.SYSTEMD_EXEC_STARTPOST }}
{{ end }}
[Install]
WantedBy=default.target

View File

@ -0,0 +1,79 @@
#!/bin/sh
# Adapted from https://nfpm.goreleaser.com/tips/
use_systemctl="True"
systemd_version=0
if ! command -V systemctl >/dev/null 2>&1; then
use_systemctl="False"
else
systemd_version=$( systemctl --version | head -1 | sed 's/systemd //g' | cut -d' ' -f1 )
fi
SERVICE_NAME="cadoles-pod-{{ .Env.IMAGE_NAME }}"
cleanup() {
if [ "${use_systemctl}" = "False" ]; then
rm -f /usr/lib/systemd/system/$SERVICE_NAME.service
else
rm -f /etc/chkconfig/$SERVICE_NAME
rm -f /etc/init.d/$SERVICE_NAME
fi
}
cleanInstall() {
if [ "${use_systemctl}" = "False" ]; then
if command -V chkconfig >/dev/null 2>&1; then
chkconfig --add $SERVICE_NAME
fi
service $SERVICE_NAME restart ||:
else
if [ "${systemd_version}" -lt 231 ]; then
printf "\033[31m systemd version %s is less then 231, fixing the service file \033[0m\n" "${systemd_version}"
sed -i "s/=+/=/g" /usr/lib/systemd/system/$SERVICE_NAME.service
fi
systemctl daemon-reload ||:
systemctl unmask $SERVICE_NAME ||:
systemctl preset $SERVICE_NAME ||:
systemctl enable $SERVICE_NAME ||:
systemctl restart $SERVICE_NAME ||:
fi
}
upgrade() {
if [ "${use_systemctl}" = "False" ]; then
service $SERVICE_NAME restart ||:
else
if [ "${systemd_version}" -lt 231 ]; then
printf "\033[31m systemd version %s is less then 231, fixing the service file \033[0m\n" "${systemd_version}"
sed -i "s/=+/=/g" /usr/lib/systemd/system/$SERVICE_NAME.service
fi
systemctl daemon-reload ||:
systemctl restart $SERVICE_NAME ||:
fi
echo 'Cleaning up unused images...'
podman image prune -f --filter "reference={{ .Env.IMAGE_NAME }}"
}
action="$1"
if [ "$1" = "configure" ] && [ -z "$2" ]; then
action="install"
elif [ "$1" = "configure" ] && [ -n "$2" ]; then
action="upgrade"
fi
case "$action" in
"1" | "install")
cleanInstall
;;
"2" | "upgrade")
upgrade
;;
*)
cleanInstall
;;
esac
cleanup

View File

@ -0,0 +1,26 @@
ARG JQ_VERSION=1.6
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y \
wget tar curl ca-certificates \
openssl bash git unzip build-essential gnupg
COPY add-letsencrypt-ca.sh /root/add-letsencrypt-ca.sh
RUN bash /root/add-letsencrypt-ca.sh \
&& rm -f /root/add-letsencrypt-ca.sh
# Install JQ
RUN wget -O /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-${JQ_VERSION}/jq-linux64 \
&& chmod +x /usr/local/bin/jq
# Install Docker client
RUN install -m 0755 -d /etc/apt/keyrings \
&& curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg \
&& chmod a+r /etc/apt/keyrings/docker.gpg \
&& echo \
"deb [arch="$(dpkg --print-architecture)" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
"$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | \
tee /etc/apt/sources.list.d/docker.list > /dev/null \
&& apt-get update \
&& apt-get install -y docker-ce-cli

View File

@ -0,0 +1,41 @@
<?php
$finder = PhpCsFixer\Finder::create()
->in(__DIR__.'/src')
->name('*.php')
;
return (new PhpCsFixer\Config())
->setRules([
'@Symfony' => true,
'concat_space' => ['spacing' => 'none'],
'array_syntax' => ['syntax' => 'short'],
'combine_consecutive_issets' => true,
'explicit_indirect_variable' => true,
'no_useless_return' => true,
'ordered_imports' => true,
'no_unused_imports' => true,
'no_spaces_after_function_name' => true,
'no_spaces_inside_parenthesis' => true,
'ternary_operator_spaces' => true,
'class_definition' => ['single_line' => true],
'whitespace_after_comma_in_array' => true,
'phpdoc_add_missing_param_annotation' => ['only_untyped' => true],
'phpdoc_order' => true,
'phpdoc_types_order' => [
'null_adjustment' => 'always_last',
'sort_algorithm' => 'alpha',
],
'phpdoc_no_empty_return' => false,
'phpdoc_summary' => false,
'general_phpdoc_annotation_remove' => [
'annotations' => [
'expectedExceptionMessageRegExp',
'expectedException',
'expectedExceptionMessage',
'author',
],
],
])
->setFinder($finder)
;

View File

@ -0,0 +1,47 @@
ARG PHP_SECURITY_CHECKER_VERSION=1.0.0
ARG JQ_VERSION=1.6
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y \
wget tar curl ca-certificates \
openssl bash git unzip \
php-cli php-dom php-mbstring php-ctype php-xml php-iconv
COPY add-letsencrypt-ca.sh /root/add-letsencrypt-ca.sh
RUN bash /root/add-letsencrypt-ca.sh \
&& rm -f /root/add-letsencrypt-ca.sh
RUN wget -O /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-${JQ_VERSION}/jq-linux64 \
&& chmod +x /usr/local/bin/jq
# Install local-php-security-checker
RUN wget -O /usr/local/bin/local-php-security-checker https://github.com/fabpot/local-php-security-checker/releases/download/v${PHP_SECURITY_CHECKER_VERSION}/local-php-security-checker_${PHP_SECURITY_CHECKER_VERSION}_linux_amd64 \
&& chmod +x /usr/local/bin/local-php-security-checker
# Install junit2md
RUN junit2md_download_url=$(curl "https://forge.cadoles.com/api/v1/repos/Cadoles/junit2md/releases" -H "accept:application/json" | jq -r 'sort_by(.published_at) | reverse | .[0] | .assets[] | select(.name == "junit2md-linux-amd64.tar.gz") | .browser_download_url') \
&& wget -O junit2md-linux-amd64.tar.gz "$junit2md_download_url" \
&& tar -xzf junit2md-linux-amd64.tar.gz \
&& cp junit2md-linux-amd64/junit2md /usr/local/bin/junit2md
# Install composer
RUN wget https://raw.githubusercontent.com/composer/getcomposer.org/76a7060ccb93902cd7576b67264ad91c8a2700e2/web/installer -O - -q | php -- --force --install-dir /usr/local/bin --filename composer \
&& chmod +x /usr/local/bin/composer
# Install php-cs-fixer
RUN mkdir --parents /tools/php-cs-fixer \
&& composer require --working-dir=/tools/php-cs-fixer friendsofphp/php-cs-fixer \
&& ln -s /tools/php-cs-fixer/vendor/bin/php-cs-fixer /usr/local/bin/php-cs-fixer
# Install php-stan
RUN mkdir --parents /tools/phpstan \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan \
&& ln -s /tools/phpstan/vendor/bin/phpstan /usr/local/bin/phpstan \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-symfony \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-doctrine
# Install Symfony
RUN curl -1sLf 'https://dl.cloudsmith.io/public/symfony/stable/setup.deb.sh' | bash \
&& apt update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y symfony-cli

View File

@ -0,0 +1,4 @@
includes:
- /tools/phpstan/vendor/phpstan/phpstan-symfony/extension.neon
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/extension.neon
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/rules.neon

View File

@ -1,4 +1,4 @@
FROM alpine:3.8
FROM reg.cadoles.com/proxy_cache/library/alpine:3.12
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
@ -7,9 +7,11 @@ ARG https_proxy=
ARG TAMARIN_VERSION=develop
RUN apk add --no-cache git docker python3 bash
RUN apk add --no-cache git docker python3 bash openssl curl
RUN git clone https://forge.cadoles.com/Cadoles/Tamarin /tamarin\
RUN curl -k https://forge.cadoles.com/Cadoles/Jenkins/raw/branch/master/resources/com/cadoles/common/add-letsencrypt-ca.sh | bash
RUN git clone http://forge.cadoles.com/Cadoles/Tamarin /tamarin\
&& cd /tamarin\
&& git checkout ${TAMARIN_VERSION}
@ -23,4 +25,4 @@ VOLUME /dist
ADD run-tamarin.sh /usr/local/bin/run-tamarin
RUN chmod +x /usr/local/bin/run-tamarin
CMD /usr/local/bin/run-tamarin
CMD /usr/local/bin/run-tamarin

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
set -e
set -xe
cp -r . /src
@ -17,6 +17,7 @@ https_proxy=${https_proxy}
EOF
[ "${TAMARIN_FORCE_REBUILD}" == "true" ] && PACKAGE_ARGS="${PACKAGE_ARGS} --rebuild"
[ "${TAMARIN_PREPARE_ONLY}" == "true" ] && PACKAGE_ARGS="${PACKAGE_ARGS} --prepare-only"
[ ! -z "${TAMARIN_PACKAGE_ARCH}" ] && PACKAGE_ARGS="${PACKAGE_ARGS} -a ${TAMARIN_PACKAGE_ARCH}"
[ ! -z "${TAMARIN_BASE_IMAGE}" ] && PACKAGE_ARGS="${PACKAGE_ARGS} -b ${TAMARIN_BASE_IMAGE}"
[ ! -z "${TAMARIN_PROFILE}" ] && PACKAGE_ARGS="${PACKAGE_ARGS} -p ${TAMARIN_PROFILE}"
@ -25,4 +26,8 @@ EOF
DEST_DIR=${TAMARIN_DEST_DIR:-dist}
mkdir -p ${DEST_DIR}
cp -r /dist/* ./${DEST_DIR}
for f in /dist/*; do
if [ -e "$f" ]; then
cp "$f" ./${DEST_DIR}
fi
done

View File

@ -0,0 +1,56 @@
{{- if . }}
{{- range . }}
<h3>Target <code>{{ escapeXML .Target }}</code></h3>
{{- if (eq (len .Vulnerabilities) 0) }}
<h4>No Vulnerabilities found</h4>
{{- else }}
<h4>Vulnerabilities ({{ len .Vulnerabilities }})</h4>
<table>
<tr>
<th>Package</th>
<th>ID</th>
<th>Severity</th>
<th>Installed Version</th>
<th>Fixed Version</th>
</tr>
{{- range .Vulnerabilities }}
<tr>
<td><code>{{ escapeXML .PkgName }}</code></td>
<td>{{ escapeXML .VulnerabilityID }}</td>
<td>{{ escapeXML .Severity }}</td>
<td>{{ escapeXML .InstalledVersion }}</td>
<td>{{ escapeXML .FixedVersion }}</td>
</tr>
{{- end }}
</table>
{{- end }}
{{- if (eq (len .Misconfigurations ) 0) }}
<h4>No Misconfigurations found</h4>
{{- else }}
<h4>Misconfigurations</h4>
<table>
<tr>
<th>Type</th>
<th>ID</th>
<th>Check</th>
<th>Severity</th>
<th>Message</th>
</tr>
{{- range .Misconfigurations }}
<tr>
<td>{{ escapeXML .Type }}</td>
<td>{{ escapeXML .ID }}</td>
<td>{{ escapeXML .Title }}</td>
<td>{{ escapeXML .Severity }}</td>
<td>
{{ escapeXML .Message }}
<br><a href={{ escapeXML .PrimaryURL | printf "%q" }}>{{ escapeXML .PrimaryURL }}</a></br>
</td>
</tr>
{{- end }}
</table>
{{- end }}
{{- end }}
{{- else }}
<h3>Trivy Returned Empty Report</h3>
{{- end }}

View File

@ -0,0 +1,68 @@
FROM reg.cadoles.com/proxy_cache/library/golang:1.11.4 as envtpl
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
RUN apt-get update -y && apt-get install -y git
RUN git clone https://github.com/subfuzion/envtpl /src \
&& cd /src \
&& git checkout v1.0.0 \
&& CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
FROM reg.cadoles.com/proxy_cache/library/alpine:3.9
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
COPY --from=envtpl /src/bin/envtpl /usr/local/bin/envtpl
RUN apk --no-cache add \
build-base \
git \
libffi-dev \
libxslt-dev \
linux-headers \
openssl-dev \
py-pillow \
py-pip \
py-setuptools \
python \
python-dev \
sqlite-dev \
yaml-dev \
sudo \
nodejs \
npm
RUN adduser -D w3af
RUN git clone --depth=1 \
--branch=master \
https://github.com/andresriancho/w3af.git /home/w3af/w3af \
&& rm -rf /home/w3af/w3af/.git \
&& chown -R w3af /home/w3af/w3af
RUN cd /home/w3af/w3af \
&& ( ./w3af_console || . /tmp/w3af_dependency_install.sh )
COPY run-audit.sh /usr/local/bin/run-audit
RUN chmod +x /usr/local/bin/run-audit
USER w3af
WORKDIR /home/w3af/w3af
COPY audit.w3af.tmpl /home/w3af/w3af/audit.w3af.tmpl
ENV HTTP_PROXY=
ENV HTTPS_PROXY=
ENV http_proxy=
ENV https_proxy=
CMD ["./w3af_console"]

View File

@ -0,0 +1,89 @@
# -----------------------------------------------------------------------------------------------------------
# W3AF AUDIT SCRIPT FOR WEB APPLICATION
# -----------------------------------------------------------------------------------------------------------
# Configure HTTP settings
http-settings
set timeout {{ default 30 .W3AF_TIMEOUT }}
{{ if .W3AF_BASIC_AUTH_USERNAME }}
set basic_auth_user {{ .W3AF_BASIC_AUTH_USERNAME }}
set basic_auth_passwd {{ .W3AF_BASIC_AUTH_PASSWORD }}
{{if .W3AF_BASIC_AUTH_DOMAIN }}
set basic_auth_domain {{ .W3AF_BASIC_AUTH_DOMAIN }}
{{end}}
{{ end }}
back
# Configure scanner global behaviors
misc-settings
set max_discovery_time {{ default 10 .W3AF_MAX_DISCOVERY_TIME }}
set fuzz_cookies True
set fuzz_form_files True
set fuzz_url_parts True
set fuzz_url_filenames True
back
profiles
use {{ default "audit_high_risk" .W3AF_PROFILE }}
back
plugins
# Configure rfi plugin
audit rfi
audit config rfi
set listen_address "0.0.0.0"
set listen_port 44449
set use_w3af_site True
back
{{ if .W3AF_AUTH_FORM_URL }}
# Configure target authentication
auth detailed
auth config detailed
set username '{{ .W3AF_AUTH_FORM_USERNAME }}'
set password '{{ .W3AF_AUTH_FORM_PASSWORD }}'
set method POST
set auth_url {{ .W3AF_AUTH_FORM_URL }}
set username_field '{{ default "username" .W3AF_AUTH_FORM_USERNAME_FIELD }}'
set password_field '{{ default "password" .W3AF_AUTH_FORM_PASSWORD_FIELD }}'
set data_format '{{ default "%u=%U&%p=%P" .W3AF_AUTH_FORM_DATA_FORMAT }}'
set check_url {{ .W3AF_AUTH_FORM_CHECK_URL }}
set check_string '{{ default "connected" .W3AF_AUTH_FORM_CHECK_STRING }}'
set follow_redirects True
back
{{end}}
{{ if .W3AF_AUTH_LOGOUT_URL_REGEX }}
crawl web_spider
crawl config web_spider
set ignore_regex {{ .W3AF_AUTH_LOGOUT_URL_REGEX }}
set only_forward {{ default "True" .W3AF_SPIDER_ONLY_FORWARD }}
back
{{end}}
# Configure reporting in order to generate an HTML report
output console, html_file
output config html_file
set output_file reports/report{{- if .W3AF_REPORT_SUFFIX -}}_{{- .W3AF_REPORT_SUFFIX -}}{{- end -}}.html
set verbose {{ default "False" .W3AF_VERBOSE }}
back
output config console
set verbose {{ default "False" .W3AF_VERBOSE }}
back
back
# Set target informations, do a cleanup and run the scan
target
set target {{ .W3AF_TARGET_URL }}
set target_os {{ default "unix" .W3AF_TARGET_OS }}
set target_framework {{ default "unknown" .W3AF_TARGET_FRAMEWORK }}
back
back
cleanup
start
exit

View File

@ -0,0 +1,6 @@
#!/bin/sh
mkdir -p reports
rm -f reports/*
envtpl -o audit.w3af /home/w3af/w3af/audit.w3af.tmpl
/home/w3af/w3af/w3af_console -y -n -s audit.w3af

213
vars/audit.groovy Normal file
View File

@ -0,0 +1,213 @@
// Pipeline de construction des images Docker des services Zéphir
def call() {
pipeline {
agent {
label 'common'
}
parameters {
string(
name: 'targetUrl',
description: 'URL cible pour l\'audit'
)
string(
name: 'basicAuthUsername',
description: "Nom d'utilisateur pour l'authentication 'Basic Auth' (si besoin)",
defaultValue: ''
)
password(
name: 'basicAuthPassword',
description: "Mot de passe pour l'authentication 'Basic Auth' (si besoin)",
defaultValue: ''
)
string(
name: 'basicAuthDomain',
description: "Nom de domaine pour l'authentication 'Basic Auth' (si besoin)",
defaultValue: ''
)
string(
name: 'authFormUrl',
description: "URL du formulaire d'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormUsername',
description: "Nom d'utilisateur du formulaire d'authentication (si besoin)",
defaultValue: ''
)
password(
name: 'authFormPassword',
description: "Mot de passe du formulaire d'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormCheckUrl',
description: "URL de vérification de la réussite de l'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormCheckString',
description: "Chaine de caractères à rechercher pour vérifier la réussite de l'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormUsernameField',
description: "Nom du champ 'nom utilisateur' du formulaire d'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormPasswordField',
description: "Nom du champ 'mot de passe' du formulaire d'authentication (si besoin)",
defaultValue: ''
)
string(
name: 'authFormDataFormat',
description: "Patron de formatage des données POST du formulaire d'authentification (si besoin). Exemple: username=%U&password=%P",
defaultValue: ''
)
string(
name: 'verbose',
description: "Mode verbeux",
defaultValue: 'False'
)
string(
name: 'auditTimeout',
description: "Délai maximum pour la réalisation de l'audit (en minutes)",
defaultValue: '60'
)
}
stages {
stage("Check parameters") {
steps {
script {
if (!params.targetUrl?.trim()) {
error("L'URL cible n'est pas définie !")
}
}
}
}
stage("Run W3AF audit") {
steps {
script {
def w3afImage = buildDockerImage()
def dockerArgs = """
-e W3AF_TARGET_URL='${params.targetUrl}'
-e W3AF_BASIC_AUTH_USERNAME='${params.basicAuthUsername}'
-e W3AF_BASIC_AUTH_PASSWORD='${params.basicAuthPassword}'
-e W3AF_BASIC_AUTH_DOMAIN='${params.basicAuthDomain}'
-e W3AF_AUTH_FORM_URL='${params.authFormUrl}'
-e W3AF_AUTH_FORM_USERNAME='${params.authFormUsername}'
-e W3AF_AUTH_FORM_PASSWORD='${params.authFormPassword}'
-e W3AF_AUTH_FORM_DATA_FORMAT='${params.authFormDataFormat}'
-e W3AF_AUTH_FORM_CHECK_URL='${params.authFormCheckUrl}'
-e W3AF_AUTH_FORM_CHECK_STRING='${params.authFormCheckString}'
-e W3AF_AUTH_FORM_USERNAME_FIELD='${params.authFormUsernameField}'
-e W3AF_AUTH_FORM_PASSWORD_FIELD='${params.authFormPasswordField}'
-e W3AF_VERBOSE='${params.verbose}'
"""
timeout(params.auditTimeout.toInteger()) {
w3afImage.inside(dockerArgs) {
sh 'mkdir -p reports'
sh 'rm -f reports/*'
sh 'envtpl -o audit.w3af /home/w3af/w3af/audit.w3af.tmpl'
sh '/home/w3af/w3af/w3af_console -y -n -s audit.w3af'
}
}
}
}
}
stage("Check discovered vulnerabilities") {
steps {
script {
def totalMediumVulnerabilities = sh(
script: "grep 'MEDIUM' reports/report.html | wc -l",
returnStdout: true
).toInteger()
def totalHighVulnerabilities = sh(
script: "grep 'HIGH' reports/report.html | wc -l",
returnStdout: true
).toInteger()
if (totalHighVulnerabilities == 0 && totalMediumVulnerabilities == 0) {
return
}
wrap([$class: 'BuildUser']) {
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Des vulnérabilités ont é trouvées lors de l'audit de l'URL `${params.targetUrl}`:
- Criticité HAUTE: ${totalHighVulnerabilities} vulnérabilité(s) trouvée(s)
- Criticité MOYENNNE: ${totalMediumVulnerabilities} vulnérabilité(s) trouvée(s)
[Voir le job](${env.RUN_DISPLAY_URL})
@${env.BUILD_USER_ID ? env.BUILD_USER_ID : 'here'}
""".stripIndent(),
rawMessage: true
)
}
}
}
}
}
post {
always {
publishHTML target: [
allowMissing: true,
alwaysLinkToLastBuild: false,
keepAll: true,
reportDir: 'reports',
reportFiles: 'report.html',
reportName: "Rapport d'audit"
]
}
failure {
wrap([$class: 'BuildUser']) {
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Le test de sécurité pour `${params.targetUrl}` a échoué:
[Voir le job](${env.RUN_DISPLAY_URL})
@${env.BUILD_USER_ID ? env.BUILD_USER_ID : 'here'}
""".stripIndent(),
rawMessage: true
)
}
}
}
}
}
def buildDockerImage() {
dir ('.w3af') {
def resourceFiles = [
'com/cadoles/w3af/audit.w3af.tmpl',
'com/cadoles/w3af/Dockerfile',
'com/cadoles/w3af/run-audit.sh'
];
for (res in resourceFiles) {
def fileContent = libraryResource res
def fileName = res.substring(res.lastIndexOf("/")+1)
writeFile file:fileName, text:fileContent
}
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("w3af:${imageTag}", ".")
}
}

245
vars/container.groovy Normal file
View File

@ -0,0 +1,245 @@
/**
* Construit, valide et publie (optionnellement) une image Docker sur le registre Cadoles (par défaut)
*
* Options disponibles:
*
* - dockerfile - String - Chemin vers le fichier Dockerfile à utiliser pour construire l'image, par défaut "./Dockerfile"
* - contextDir - String - Répertoire servant de "contexte" pour la construction de l'image, par défault "./"
* - imageName - String - Nom de l'image à construire, par défaut ""
* - imageTags - String - Tag(s) apposé(s) sur l'image après construction, par défaut tags générés par la méthode utils.getProjectVersionTags()
* - gitCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour cloner le dépôt Git, par défaut "forge-jenkins"
* - dockerRepository - String - Nom d'hôte du registre Docker sur lequel publier l'image, par défaut "reg.cadoles.com"
* - dockerRepositoryCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour déployer l'image sur le registre Docker, par défault "reg.cadoles.com-jenkins"
* - dryRun - Boolean - Désactiver/activer la publication de l'image sur le registre Docker, par défaut "true"
* - skipVerifications - Boolean - Désactiver/activer les étapes de vérifications de qualité/sécurité de l'image Docker, par défaut "false"
*/
String buildAndPublishImage(Map options = [:]) {
String dockerfile = options.get('dockerfile', './Dockerfile')
String contextDir = options.get('contextDir', '.')
String imageName = options.get('imageName', '')
String gitRef = sh(returnStdout: true, script: 'git describe --always').trim()
List<String> defaultImageTags = utils.getProjectVersionTags() + [ "${utils.getProjectVersionDefaultChannel()}-latest" ]
List<String> imageTags = options.get('imageTags', defaultImageTags)
// Handle legacy imageTag parameter
if (options.containsKey('imageTag')) {
imageTags = [ options.get("imageTag", gitRef) ]
}
String gitCredentialsId = options.get('gitCredentialsId', 'forge-jenkins')
String dockerRepository = options.get('dockerRepository', 'reg.cadoles.com')
String dockerRepositoryCredentialsId = options.get('dockerRepositoryCredentialsId', 'reg.cadoles.com-jenkins')
Boolean dryRun = options.get('dryRun', true)
Boolean skipVerifications = options.get('skipVerification', false)
String projectRepository = env.JOB_NAME
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
projectRepository = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
}
projectRepository = options.get('projectRepository', projectRepository)
withCredentials([
usernamePassword([
credentialsId: dockerRepositoryCredentialsId,
usernameVariable: 'HUB_USERNAME',
passwordVariable: 'HUB_PASSWORD'
]),
]) {
stage('Validate Dockerfile with Hadolint') {
utils.when(!skipVerifications) {
runHadolintCheck(dockerfile, projectRepository)
}
}
String primaryImageTag = imageTags[0]
stage("Build image '${imageName}:${primaryImageTag}'") {
git.withHTTPCredentials(gitCredentialsId) {
sh """
docker build \
--build-arg="GIT_USERNAME=${env.GIT_USERNAME}" \
--build-arg="GIT_PASSWORD=${env.GIT_PASSWORD}" \
-t '${imageName}:${primaryImageTag}' \
-f '${dockerfile}' \
'${contextDir}'
"""
}
}
stage('Validate image with Trivy') {
utils.when(!skipVerifications) {
runTrivyCheck("${imageName}:${primaryImageTag}", projectRepository)
}
}
stage("Login with image repository") {
utils.when(!dryRun) {
sh """
echo ${env.HUB_PASSWORD} | docker login -u '${env.HUB_USERNAME}' --password-stdin '${dockerRepository}'
"""
}
}
imageTags.each { imageTag ->
stage("Publish image '${imageName}:${imageTag}'") {
utils.when(!dryRun) {
sh """
docker tag "${imageName}:${primaryImageTag}" "${imageName}:${imageTag}"
"""
retry(2) {
sh """
docker push '${imageName}:${imageTag}'
"""
}
}
}
}
}
}
void runHadolintCheck(String dockerfile, String projectRepository) {
String reportFile = ".hadolint-report-${currentBuild.startTimeInMillis}.txt"
try {
validateDockerfileWithHadolint(dockerfile, ['reportFile': reportFile])
} catch (err) {
unstable("Dockerfile '${dockerfile}' failed linting !")
} finally {
String lintReport = ''
if (fileExists(reportFile)) {
String report = readFile(reportFile)
lintReport = """${lintReport}
|
|```
|${report.trim() ? report : "Rien à signaler."}
|```"""
} else {
lintReport = """${lintReport}
|
|_Vérification échouée mais aucun rapport trouvé !?_ :thinking:"""
}
String defaultReport = '_Rien à signaler !_ :thumbsup:'
String report = """## Rapport d'analyse du fichier `${dockerfile}` avec [Hadolint](https://github.com/hadolint/hadolint)
|
|${lintReport ?: defaultReport}
""".stripMargin()
print report
if (env.CHANGE_ID) {
gitea.commentPullRequest(projectRepository, env.CHANGE_ID, report)
}
}
}
String validateDockerfileWithHadolint(String dockerfile, Map options = [:]) {
String hadolintBin = getOrInstallHadolint(options)
String hadolintArgs = options.get('hadolintArgs', '--no-color')
String reportFile = options.get('reportFile', ".hadolint-report-${currentBuild.startTimeInMillis}.txt")
sh("""#!/bin/bash
set -eo pipefail
'${hadolintBin}' '${dockerfile}' ${hadolintArgs} | tee '${reportFile}'
""")
return reportFile
}
void runTrivyCheck(String imageName, String projectRepository, Map options = [:]) {
String reportFile = ".trivy-report-${currentBuild.startTimeInMillis}.txt"
try {
validateImageWithTrivy(imageName, ['reportFile': reportFile])
} catch (err) {
unstable("Image '${imageName}' failed validation !")
} finally {
String lintReport = ''
if (fileExists(reportFile)) {
lintReport = """${lintReport}
|
|${readFile(reportFile)}
|"""
} else {
lintReport = """${lintReport}
|
|_Vérification échouée mais aucun rapport trouvé !?_ :thinking:"""
}
String defaultReport = '_Rien à signaler !_ :thumbsup:'
String report = """## Rapport d'analyse de l'image avec [Trivy](https://github.com/aquasecurity/trivy)
|
|${lintReport ?: defaultReport}
""".stripMargin()
print report
if (env.CHANGE_ID) {
gitea.commentPullRequest(projectRepository, env.CHANGE_ID, report)
}
}
}
String validateImageWithTrivy(String imageName, Map options = [:]) {
String trivyBin = getOrInstallTrivy(options)
String trivyArgs = options.get('trivyArgs', '--exit-code 1')
String cacheDirectory = options.get('cacheDirectory', '.trivy/.cache')
String cacheDefaultBranch = options.get('cacheDefaultBranch', 'develop')
Integer cacheMaxSize = options.get('cacheMaxSize', 250)
String reportFile = options.get('reportFile', ".trivy-report-${currentBuild.startTimeInMillis}.txt")
String markdownTemplate = libraryResource 'com/cadoles/trivy/templates/markdown.tpl'
writeFile file:'.trivy-markdown.tpl', text: markdownTemplate
cache(maxCacheSize: cacheMaxSize, defaultBranch: cacheDefaultBranch, caches: [
[$class: 'ArbitraryFileCache', path: cacheDirectory, compressionMethod: 'TARGZ']
]) {
sh("'${trivyBin}' --cache-dir '${cacheDirectory}' image --ignorefile .trivyignore.yaml --format template --template '@.trivy-markdown.tpl' -o '${reportFile}' ${trivyArgs} '${imageName}'")
}
return reportFile
}
String getOrInstallHadolint(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '2.12.0')
String forceDownload = options.get('forceDownload', false)
String downloadUrl = options.get('downloadUrl', "https://github.com/hadolint/hadolint/releases/download/v${version}/hadolint-Linux-x86_64")
String hadolintBin = sh(returnStdout: true, script: 'which hadolint || exit 0').trim()
if (hadolintBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -o '${installDir}/hadolint' -sSL '${downloadUrl}'
chmod +x '${installDir}/hadolint'
""")
hadolintBin = "${installDir}/hadolint"
}
return hadolintBin
}
String getOrInstallTrivy(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '0.47.0')
String forceDownload = options.get('forceDownload', false)
String installScriptDownloadUrl = options.get('downloadUrl', 'https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh')
String trivyBin = sh(returnStdout: true, script: 'which trivy || exit 0').trim()
if (trivyBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -sfL '${installScriptDownloadUrl}' | sh -s -- -b '${installDir}' v${version}
chmod +x '${installDir}/trivy'
""")
trivyBin = "${installDir}/trivy"
}
return trivyBin
}

138
vars/cpkg.groovy Normal file
View File

@ -0,0 +1,138 @@
import java.util.regex.Matcher
// Basic port of https://forge.cadoles.com/Cadoles/cpkg
def call(Map params = [:]) {
def currentRef = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
def distRepo = params.distRepo ? params.distRepo : 'dev'
def dist = params.dist ? params.dist : 'eole'
def distVersion = params.distVersion ? params.distVersion : '2.7.0'
def distBranchName = params.distBranchName ? params.distBranchName : env.GIT_BRANCH
def gitCredentials = params.gitCredentials ? params.gitCredentials : null
def gitCredentialsType = params.gitCredentialsType ? params.gitCredentialsType : 'http'
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
def skipCi = params.containsKey('skipCi') ? params.skipCi : false
def skipPush = params.containsKey('skipPush') ? params.skipPush : true
// Define dist branch based on provided informations and base branch name
def distBranch = "dist/${dist}/${distVersion}/${distBranchName}"
def result = [:]
result['distBranch'] = distBranch
result['distVersion'] = distVersion
result['distRepo'] = distRepo
result['distBranchName'] = distBranchName
def proc = {
// Add distBranch to fetched refs
sh("git config --add remote.origin.fetch +refs/heads/${distBranch}:refs/remotes/origin/${distBranch}")
// Update branches
sh('git fetch --all')
// Merge currentRef into distBranch and push
sh("git checkout -b '${distBranch}' 'origin/${distBranch}'")
// Add git username/email
sh("git config user.email '${gitEmail}'")
sh("git config user.username '${gitUsername}'")
sh("git merge ${currentRef}")
if (!skipPush) {
sh('git push')
} else {
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
}
// Retrieve last tag matching pattern pkg/${distRepo}/${dist}-${distVersion}/*
def lastTag = sh(
script: "git tag -l 'pkg/${distRepo}/${dist}-${distVersion}/*' --sort=v:refname | tail -n 1",
returnStdout: true,
)
result['previousTag'] = lastTag.trim()
println("Last tag is '${result['previousTag']}'")
// Extract version number from last tag
def lastVersionNumber = lastTag.split('/').last().trim()
result['previousVersionNumber'] = lastVersionNumber
println("Last version number is '${lastVersionNumber}'")
String versionRoot = extractVersionRoot(lastVersionNumber)
String versionNumber = ''
if (versionRoot) {
versionNumber = versionRoot
} else {
versionNumber = sh(
script: "git describe --always ${currentRef}",
returnStdout: true,
).split('/').last().trim()
Boolean isCommitRef = !versionNumber.matches(/^[0-9]+\.[0-9]+\.[0-9]+.*$/)
if (isCommitRef) {
versionNumber = "0.0.0-${versionNumber}"
}
}
versionNumber = "${versionNumber}-${env.BUILD_NUMBER}"
println("New version number will be '${versionNumber}'")
result['newVersionNumber'] = versionNumber
// Generate tag with incremented version number
def tag = "pkg/${distRepo}/${dist}-${distVersion}/${versionNumber}"
result['newTag'] = tag
def tagComment = "Build ${versionNumber} ${distRepo} package for ${dist}-${distVersion}."
if (skipCi) {
tagComment += ' [ci skip]'
}
sh("git tag -f -a '${tag}' -m '${tagComment}'")
// Push tag
if (!skipPush) {
sh('git push --tags -f')
} else {
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
}
// Switch back to previous ref
sh("git checkout ${currentRef}")
}
if (gitCredentials != null) {
if (gitCredentialsType == 'http') {
git.withHTTPCredentials(gitCredentials) {
proc.call()
}
} else if (gitCredentialsType == 'ssh') {
git.withSSHCredentials(gitCredentials) {
proc.call()
}
} else {
throw new Exception("Unknown git credentials type '${gitCredentialsType}' ! Expected 'ssh' or 'http' (default).")
}
} else {
proc.call()
}
return result
}
@NonCPS
String extractVersionRoot(String fullVersion) {
Matcher fullVersionMatcher = fullVersion =~ /^([0-9]+\.[0-9]+\.[0-9]+).*$/
if (!fullVersionMatcher.matches()) {
return ""
}
return fullVersionMatcher.group(1)
}

88
vars/debian.groovy Normal file
View File

@ -0,0 +1,88 @@
def waitForRepoPackage(String packageName, Map params = [:]) {
def expectedVersion = params.expectedVersion ? params.expectedVersion : null
def delay = params.delay ? params.delay : 30
def waitTimeout = params.timeout ? params.timeout : 2400
def asPattern = params.containsKey("asPattern") ? params.asPattern : true
def message = "Waiting for package '${packageName}'"
if (expectedVersion != null) {
message += " with version '${expectedVersion}'"
}
message += '...'
println(message)
timeout(time: waitTimeout, unit: 'SECONDS') {
while(true) {
println("Retrieving packages list...")
def repo = listRepoPackages(params)
def packages = repo[packageName]
if (!packages) {
println("Package not found.")
sleep(time: delay, unit: 'SECONDS')
continue
}
if (expectedVersion == null) {
println("Package found !")
break
}
def versionFound = packages.find {
def matches = asPattern ? it['version'] =~ expectedVersion : it['version'] == expectedVersion
println("Comparing expected version '${expectedVersion}' to '${it['version']}': ${matches}")
return matches
}
if (versionFound) {
println("Expected package version found !")
break
}
println("Package version not found.")
sleep(time: delay, unit: 'SECONDS')
}
}
}
def listRepoPackages(Map params = [:]) {
def baseURL = params.baseURL ? params.baseURL : 'https://vulcain.cadoles.com'
def distrib = params.distrib ? params.distrib : '2.7.0-dev'
def component = params.component ? params.component : 'main'
def type = params.type ? params.type : 'binary'
def arch = params.arch ? params.arch : 'amd64'
def response = httpRequest(url: "${baseURL}/dists/${distrib}/${component}/${type}-${arch}/Packages")
def packages = [:]
def lines = response.content.split('\n')
def currentPackage
lines.each {
def packageMatch = (it =~ /^Package: (.*)$/)
if (packageMatch.find()) {
def packageName = packageMatch.group(1)
if (!packages[packageName]) {
packages[packageName] = []
}
currentPackage = [:]
currentPackage['name'] = packageName
packages[packageName] += currentPackage
}
def versionMatch = (it =~ /^Version: (.*)$/)
if (versionMatch.find()) {
def version = versionMatch.group(1)
currentPackage['version'] = version
}
}
println "Found packages:"
packages.each{
println " - Package: ${it.key}, Version: ${it.value['version']}"
}
return packages
}

44
vars/git.groovy Normal file
View File

@ -0,0 +1,44 @@
def withHTTPCredentials(String credentialsId, Closure fn) {
withCredentials([
usernamePassword(
credentialsId: credentialsId,
usernameVariable: 'GIT_USERNAME',
passwordVariable: 'GIT_PASSWORD'
)
]) {
def randomUUID = UUID.randomUUID().toString()
def tmpAskPassScript = pwd(tmp:true) + "/${randomUUID}"
try {
writeFile(
file: tmpAskPassScript,
text: '''
#!/bin/sh
case "$1" in
Username*) echo $GIT_USERNAME ;;
Password*) echo $GIT_PASSWORD ;;
esac
'''
)
sh(script: "chmod +x '${tmpAskPassScript}'")
withEnv(["GIT_ASKPASS=${tmpAskPassScript}"]) {
fn.call()
}
} finally {
sh(script: "rm -f '${tmpAskPassScript}'")
}
}
}
def withSSHCredentials(String credentialsId, Closure fn) {
def randomUUID = UUID.randomUUID().toString()
withCredentials([
sshUserPrivateKey(
credentialsId: credentialsId,
keyFileVariable: 'GIT_SSH_IDENTITY_FILE',
)
]) {
withEnv(['GIT_SSH_VARIANT=ssh', 'GIT_SSH_COMMAND=ssh -i $GIT_SSH_IDENTITY_FILE -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null']) {
fn.call()
}
}
}

156
vars/gitea.groovy Normal file
View File

@ -0,0 +1,156 @@
def commentPullRequest(String repo, String issueId, String comment, Integer commentIndex = -1) {
comment = comment.replaceAll('"', '\\"')
withCredentials([
string(credentialsId: 'GITEA_JENKINS_PERSONAL_TOKEN', variable: 'GITEA_TOKEN'),
]) {
writeFile(file: '.prComment', text: comment)
sh """#!/bin/bash
set -xeo pipefail
previous_comment_id=null
if [ "${commentIndex}" != "-1" ]; then
# Récupération si il existe du commentaire existant
previous_comment_id=\$(curl -v --fail \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments \
| jq -c '[ .[] | select(.user.login=="jenkins") ] | .[${commentIndex}] | .id' \
)
fi
# Génération du payload pour l'API Gitea
echo '{}' | jq -c --rawfile body .prComment '.body = \$body' > payload.json
if [[ "\$previous_comment_id" == "null" ]]; then
# Création du commentaire via l'API Gitea
curl -v --fail \
-XPOST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d @payload.json \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments
else
# Modification du commentaire existant
curl -v --fail \
-XPATCH \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d @payload.json \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/comments/\$previous_comment_id
fi
"""
}
}
// Effectue une "release" sur Gitea pour le <ORG>/<PROJET> donné.
void release(String credentialsId, String org, String project, Map options = [:]) {
Boolean isDraft = options.get('isDraft', false)
String baseUrl = options.get('baseUrl', 'https://forge.cadoles.com')
String defaultVersion = sh(returnStdout: true, script: 'git describe --always').trim()
String releaseVersion = options.get('releaseVersion', defaultVersion)
String releaseName = options.get('releaseName', releaseVersion)
String commitishTarget = options.get('commitishTarget', env.GIT_COMMIT)
Boolean defaultIsPrerelease = true
try {
sh(script: "git describe --exact-match ${GIT_COMMIT}")
defaultIsPrerelease = false
} catch (err) {
println "Could not find tag associated with commit '${GIT_COMMIT}' ! Using 'prerelease' as default."
}
Boolean isPrerelease = options.get('isPrerelease', defaultIsPrerelease)
String body = options.get('body', '')
List<String> attachments = options.get('attachments', [])
String scriptTempDir = ".gitea-release-script-${System.currentTimeMillis()}"
sh("mkdir -p '${scriptTempDir}'")
String giteaReleaseScript = "${scriptTempDir}/gitea-release.sh"
String giteaReleaseScriptContent = libraryResource 'com/cadoles/gitea/gitea-release.sh'
writeFile file: giteaReleaseScript, text:giteaReleaseScriptContent
sh("chmod +x '${giteaReleaseScript}'")
try {
withCredentials([
usernamePassword(
credentialsId: credentialsId,
usernameVariable: 'GITEA_RELEASE_USERNAME',
passwordVariable: 'GITEA_RELEASE_PASSWORD'
)
]) {
sh """
export GITEA_RELEASE_PROJECT="${project}"
export GITEA_RELEASE_ORG="${org}"
export GITEA_RELEASE_BASE_URL="${baseUrl}"
export GITEA_RELEASE_VERSION="${releaseVersion}"
export GITEA_RELEASE_NAME="${releaseName}"
export GITEA_RELEASE_COMMITISH_TARGET="${commitishTarget}"
export GITEA_RELEASE_IS_DRAFT="${isDraft}"
export GITEA_RELEASE_IS_PRERELEASE="${isPrerelease}"
export GITEA_RELEASE_BODY="${body}"
export GITEA_RELEASE_ATTACHMENTS="${attachments.join(' ')}"
${giteaReleaseScript}
"""
}
} finally {
dir(scriptTempDir) {
deleteDir()
}
}
rocketSend(
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Nouvelle version publiée pour le projet `${org}/${project}`: [${releaseName}](${baseUrl}/${org}/${project}/releases/tag/${releaseVersion})
[Visualiser le job](${env.RUN_DISPLAY_URL})
@${utils.getBuildUser()}
""".stripIndent(),
rawMessage: true
)
}
// Télécharge les fichiers associés à une "version" publiée sur un projet Gitea
void download(String credentialsId, String org, String project, Map options = [:]) {
String baseUrl = options.get('baseUrl', 'https://forge.cadoles.com')
String releaseName = options.get('releaseName', 'latest')
String outputDir = options.get('outputDir', 'gitea-dl')
String scriptTempDir = ".gitea-download-script-${System.currentTimeMillis()}"
sh("mkdir -p '${scriptTempDir}'")
String giteaDownloadScript = "${scriptTempDir}/gitea-download.sh"
String giteaDownloadScriptContent = libraryResource 'com/cadoles/gitea/gitea-download.sh'
writeFile file: giteaDownloadScript, text:giteaDownloadScriptContent
sh("chmod +x '${giteaDownloadScript}'")
try {
withCredentials([
usernamePassword(
credentialsId: credentialsId,
usernameVariable: 'GITEA_DOWNLOAD_USERNAME',
passwordVariable: 'GITEA_DOWNLOAD_PASSWORD'
)
]) {
sh """
export GITEA_DOWNLOAD_PROJECT="${project}"
export GITEA_DOWNLOAD_ORG="${org}"
export GITEA_DOWNLOAD_BASE_URL="${baseUrl}"
export GITEA_DOWNLOAD_RELEASE_NAME="${releaseName}"
export GITEA_DOWNLOAD_TARGET_DIRECTORY="${outputDir}"
${giteaDownloadScript}
"""
}
} finally {
dir(scriptTempDir) {
deleteDir()
}
}
}

46
vars/gomplate.groovy Normal file
View File

@ -0,0 +1,46 @@
void call(String sourceTemplate, String destFile, Map env = [:], Map options = [:]) {
String gomplateBin = getOrInstallGomplate(options)
sh """
${exportEnvMap(env)}
${gomplateBin} -f '${sourceTemplate}' > '${destFile}'
"""
}
String exportEnvMap(Map env) {
String exports = ''
env.each { item ->
exports = """
${exports}
export ${item.key}="${item.value}"
"""
}
return exports
}
String getOrInstallGomplate(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '3.10.0')
Boolean forceDownload = options.get('forceDownload', false)
String downloadUrl = options.get('downloadUrl', "https://github.com/hairyhenderson/gomplate/releases/download/v${version}/gomplate_linux-amd64")
String gomplateBin = ''
lock("${env.NODE_NAME}:gomplate-install") {
gomplateBin = sh(returnStdout: true, script: 'which gomplate || exit 0').trim()
if (gomplateBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -o '${installDir}/gomplate' -sSL '${downloadUrl}'
chmod +x '${installDir}/gomplate'
""")
gomplateBin = "${installDir}/gomplate"
}
}
return gomplateBin
}

19
vars/hook.groovy Normal file
View File

@ -0,0 +1,19 @@
def call(String name) {
def filepath = "${env.WORKSPACE}/.jenkins/${name}.groovy"
def exists = fileExists(filepath)
if (!exists) {
println("No hook '${filepath}' script. Skipping.")
return
}
def hook = load(filepath)
if (hook == null) {
error("Hook '${filepath}' seems to be null. Did you forget to add 'return this' at the end of the script ?")
}
if (hook.metaClass.respondsTo(hook, 'exec')) {
hook.exec()
} else {
error("Hook script '${filepath}' exists but does not expose an exec() function.")
}
}

195
vars/kubecontainer.groovy Normal file
View File

@ -0,0 +1,195 @@
/**
* Construit, valide et publie (optionnellement) une image Docker sur le registre Cadoles (par défaut)
*
* Options disponibles:
*
* - dockerfile - String - Chemin vers le fichier Dockerfile à utiliser pour construire l'image, par défaut "./Dockerfile"
* - contextDir - String - Répertoire servant de "contexte" pour la construction de l'image, par défault "./"
* - imageName - String - Nom de l'image à construire, par défaut ""
* - imageTag - String - Tag apposé sur l'image après construction, par défaut résultat de la commande `git describe --always`
* - gitCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour cloner le dépôt Git, par défaut "forge-jenkins"
* - dockerRepository - String - Nom d'hôte du registre Docker sur lequel publier l'image, par défaut "reg.cadoles.com"
* - dockerRepositoryCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour déployer l'image sur le registre Docker, par défault "reg.cadoles.com-jenkins"
* - dryRun - Boolean - Désactiver/activer la publication de l'image sur le registre Docker, par défaut "true"
* - skipVerifications - Boolean - Désactiver/activer les étapes de vérifications de qualité/sécurité de l'image Docker, par défaut "false"
*/
String buildAndPublishImage(Map options = [:]) {
String dockerfile = options.get('dockerfile', './Dockerfile')
String contextDir = options.get('contextDir', '.')
String imageName = options.get('imageName', '')
String gitRef = sh(returnStdout: true, script: 'git describe --always').trim()
String imageTag = options.get('imageTag', gitRef)
String gitCredentialsId = options.get('gitCredentialsId', 'forge-jenkins')
String dockerRepository = options.get('dockerRepository', 'reg.cadoles.com')
String dockerRepositoryCredentialsId = options.get('dockerRepositoryCredentialsId', 'reg.cadoles.com-jenkins')
Boolean dryRun = options.get('dryRun', true)
Boolean skipVerifications = options.get('skipVerification', false)
String currentBranch = env.BRANCH_NAME.replaceAll("[^a-zA-Z]+","_")
String projectRepository = env.JOB_NAME
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
projectRepository = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
}
projectRepository = options.get('projectRepository', projectRepository)
withCredentials([
usernamePassword([
credentialsId: dockerRepositoryCredentialsId,
usernameVariable: 'HUB_USERNAME',
passwordVariable: 'HUB_PASSWORD'
]),
]) {
stage('Validate Dockerfile with Hadolint') {
utils.when(!skipVerifications) {
runHadolintCheck(dockerfile, projectRepository)
}
}
stage("Build image '${imageName}:${imageTag}'") {
git.withHTTPCredentials(gitCredentialsId) {
sh """
CURRENT_BRANCH=${currentBranch} make
"""
}
}
stage('Validate image with Trivy') {
utils.when(!skipVerifications) {
sh """
CURRENT_BRANCH=${currentBranch} make scan
"""
}
}
stage("Publish image '${imageName}:${imageTag}'") {
utils.when(!dryRun) {
retry(2) {
sh """
CURRENT_BRANCH=${currentBranch} make release
"""
}
}
}
}
}
void runHadolintCheck(String dockerfile, String projectRepository) {
String reportFile = ".hadolint-report-${currentBuild.startTimeInMillis}.txt"
try {
validateDockerfileWithHadolint(dockerfile, ['reportFile': reportFile])
} catch (err) {
unstable("Dockerfile '${dockerfile}' failed linting !")
} finally {
String lintReport = ''
if (fileExists(reportFile)) {
lintReport = """${lintReport}
|
|```
|${readFile(reportFile)}
|```"""
} else {
lintReport = """${lintReport}
|
|_Vérification échouée mais aucun rapport trouvé !?_ :thinking:"""
}
String defaultReport = '_Rien à signaler !_ :thumbsup:'
String report = """## Validation du Dockerfile `${dockerfile}`
|
|${lintReport ?: defaultReport}
""".stripMargin()
print report
if (env.CHANGE_ID) {
gitea.commentPullRequest(projectRepository, env.CHANGE_ID, report)
}
}
}
String validateDockerfileWithHadolint(String dockerfile, Map options = [:]) {
String hadolintBin = getOrInstallHadolint(options)
String hadolintArgs = options.get('hadolintArgs', '--no-color')
String reportFile = options.get('reportFile', ".hadolint-report-${currentBuild.startTimeInMillis}.txt")
sh("""#!/bin/bash
set -eo pipefail
'${hadolintBin}' '${dockerfile}' ${hadolintArgs} | tee '${reportFile}'
""")
return reportFile
}
void runTrivyCheck(String imageName, String projectRepository, Map options = [:]) {
String currentBranch = env.BRANCH_NAME.replaceAll("[^a-zA-Z]+","_")
stage("Scan with trivy '${imageName}:${imageTag}'") {
utils.when(!dryRun) {
retry(2) {
sh """
CURRENT_BRANCH=${currentBranch} make scan
"""
}
}
}
}
String validateImageWithTrivy(String imageName, Map options = [:]) {
String trivyBin = getOrInstallTrivy(options)
String trivyArgs = options.get('trivyArgs', '--exit-code 1')
String cacheDirectory = options.get('cacheDirectory', '.trivy/.cache')
String cacheDefaultBranch = options.get('cacheDefaultBranch', 'develop')
Integer cacheMaxSize = options.get('cacheMaxSize', 250)
String reportFile = options.get('reportFile', ".trivy-report-${currentBuild.startTimeInMillis}.txt")
cache(maxCacheSize: cacheMaxSize, defaultBranch: cacheDefaultBranch, caches: [
[$class: 'ArbitraryFileCache', path: cacheDirectory, compressionMethod: 'TARGZ']
]) {
sh("'${trivyBin}' --cache-dir '${cacheDirectory}' image -o '${reportFile}' ${trivyArgs} '${imageName}'")
}
return reportFile
}
String getOrInstallHadolint(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '2.10.0')
String forceDownload = options.get('forceDownload', false)
String downloadUrl = options.get('downloadUrl', "https://github.com/hadolint/hadolint/releases/download/v${version}/hadolint-Linux-x86_64")
String hadolintBin = sh(returnStdout: true, script: 'which hadolint || exit 0').trim()
if (hadolintBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -o '${installDir}/hadolint' -sSL '${downloadUrl}'
chmod +x '${installDir}/hadolint'
""")
hadolintBin = "${installDir}/hadolint"
}
return hadolintBin
}
String getOrInstallTrivy(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '0.27.1')
String forceDownload = options.get('forceDownload', false)
String installScriptDownloadUrl = options.get('downloadUrl', 'https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh')
String trivyBin = sh(returnStdout: true, script: 'which trivy || exit 0').trim()
if (trivyBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -sfL '${installScriptDownloadUrl}' | sh -s -- -b '${installDir}' v${version}
chmod +x '${installDir}/trivy'
""")
trivyBin = "${installDir}/trivy"
}
return trivyBin
}

119
vars/lighthouse.groovy Normal file
View File

@ -0,0 +1,119 @@
// Pipeline d'exécution d'un audit Lighthouse
def call() {
pipeline {
agent {
label 'docker'
}
parameters {
string(
name: 'url',
description: 'URL cible pour l\'audit'
)
string(
name: 'auditTimeout',
description: "Délai maximum pour la réalisation de l'audit (en minutes)",
defaultValue: '60'
)
}
stages {
stage("Check parameters") {
steps {
script {
if (!params.url?.trim()) {
error("L'URL cible n'est pas définie !")
}
}
}
}
stage("Run Lighthouse audit") {
steps {
script {
def lighthouseImage = buildDockerImage()
def dockerArgs = """
-e LIGHTHOUSE_URL='${params.url}'
"""
timeout(params.auditTimeout.toInteger()) {
lighthouseImage.inside(dockerArgs) {
sh 'chown -R lighthouse: ./'
sh 'su lighthouse - /usr/local/bin/run-audit'
}
}
}
}
}
}
post {
always {
publishHTML target: [
allowMissing: true,
alwaysLinkToLastBuild: false,
keepAll: true,
reportDir: 'reports',
reportFiles: '*.report.html',
reportName: "Rapports d'audit"
]
cleanWs()
}
success {
wrap([$class: 'BuildUser']) {
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
L'audit Lighthouse pour `${params.url}` est terminé:
- [Voir le rapport](${env.BUILD_URL}Rapports_20d_27audit/lighthouse.report.html)
[Lancer un nouvel audit](${env.BUILD_URL}../build)
@${env.BUILD_USER_ID ? env.BUILD_USER_ID : 'here'}
""".stripIndent(),
rawMessage: true
)
}
}
failure {
wrap([$class: 'BuildUser']) {
rocketSend (
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
L'audit Lighthouse pour `${params.url}` a échoué:
[Voir le job](${env.RUN_DISPLAY_URL})
@${env.BUILD_USER_ID ? env.BUILD_USER_ID : 'here'}
""".stripIndent(),
rawMessage: true
)
}
}
}
}
}
def buildDockerImage() {
dir ('.lighthouse') {
def resourceFiles = [
'com/cadoles/lighthouse/Dockerfile',
'com/cadoles/lighthouse/config.js.tmpl',
'com/cadoles/lighthouse/run-audit.sh'
];
for (res in resourceFiles) {
def fileContent = libraryResource res
def fileName = res.substring(res.lastIndexOf("/")+1)
writeFile file:fileName, text:fileContent
}
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("lighthouse:${imageTag}", ".")
}
}

32
vars/lolops.groovy Normal file
View File

@ -0,0 +1,32 @@
def getRandomDeliveryAttachment(Integer probability = 25) {
def deliveryImages = [
'https://media.giphy.com/media/UtM8DmnahknE4/giphy.gif',
'https://media.giphy.com/media/11VKF3OwuGHzNe/giphy.gif',
'https://media.giphy.com/media/d5pGYhWb3T1Hyyl8OB/giphy.gif',
'https://media.giphy.com/media/oTSy3rHigPd9S/giphy.gif',
'https://media.giphy.com/media/9DgxhWOxHDHtF8bvwl/giphy.gif',
'https://media.giphy.com/media/9DgxhWOxHDHtF8bvwl/giphy.gif',
'https://media.giphy.com/media/1g2JyW7p6mtZc6bOEY/giphy.gif',
'https://media.giphy.com/media/7AqsdBK15kiOc/giphy.gif',
'https://media.giphy.com/media/r2MkQEOe7niGk/giphy.gif',
'https://media.giphy.com/media/ekXyB3lQ0IYL3luIFn/giphy.gif',
'https://media.giphy.com/media/QBRlXHKV5mpbLJ4prc/giphy.gif',
'https://media.giphy.com/media/NOsfNQGivMFry/giphy.gif',
'https://media.giphy.com/media/M1vu1FJnW6gms/giphy.gif',
'https://media.giphy.com/media/555x0gFF89OhVWPkvb/giphy.gif',
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
'https://media.giphy.com/media/W1fFHj6LvyTgfBNdiz/giphy.gif',
'https://media.giphy.com/media/1g2JyW7p6mtZc6bOEY/giphy.gif',
'https://media.giphy.com/media/ORiFE3ijpNaIWDoOqP/giphy.gif',
'https://media.giphy.com/media/r16Zmuvt1hSTK/giphy.gif',
'https://media.giphy.com/media/bF8Tvy2Ta0mqxXgaPV/giphy.gif',
'https://media.giphy.com/media/C0XT6BmLC3nGg/giphy.gif'
]
Random rnd = new Random()
if (rnd.nextInt(100) > probability) {
return [];
}
def imageUrl = deliveryImages[rnd.nextInt(deliveryImages.size())]
return [[title: '', imageUrl: imageUrl]];
}

View File

@ -133,7 +133,15 @@ def initWithCredentials(String urlCredentialsId, String userCredentialsId, Closu
}
def runInNewVM(Map args) {
def runScriptInNewVM(Map args) {
def script = args.get("script", "")
runInNewVM(args) { shell ->
shell(script)
}
}
def runInNewVM(Map args, Closure body) {
def urlCredentialsId = args.get('urlCredentialsId', 'opennebula-dev-url')
def userCredentialsId = args.get('userCredentialsId', 'kipp-credentials')
@ -141,7 +149,7 @@ def runInNewVM(Map args) {
def vmTemplate = args.get('vmTemplate', '')
def terminateOnExit = args.get('terminateOnExit', true)
def shell = args.get("shell", "/bin/sh")
def script = args.get('script', '')
def connectionTimeout = args.get('connectionTimeout', 10)
// On récupère les identifiants de connexion SSH pour la VM
withCredentials([
@ -153,29 +161,35 @@ def runInNewVM(Map args) {
def sshArgs = "-i '${VM_SSH_KEY}' -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
// On attend que la connexion SSH soit disponible
println "En attente de l'accès SSH sur la machine ${host}..."
while(true) {
def status = sh script: "nc -zv ${host} 22", returnStatus: true
if (status == 0) {
break;
println "En attente de l'accès SSH sur la machine ${host}..."
timeout(connectionTimeout) {
while(true) {
def status = sh script: "nc -zv ${host} 22", returnStatus: true
if (status == 0) {
break;
}
sleep(5)
}
sleep(5)
}
// On créait un script temporaire à exécuter sur la machine distante
def now = System.currentTimeMillis()
def tempScriptFile = "script_${env.BUILD_ID}_${now}.sh"
writeFile(file: tempScriptFile, text: """
#!${shell}
${script.stripIndent()}
""")
def remoteShell = { script ->
// On créait un script temporaire à exécuter sur la machine distante
def now = System.currentTimeMillis()
def tempScriptFile = "script_${env.BUILD_ID}_${now}.sh"
writeFile(file: tempScriptFile, text: """
#!${shell}
${script.stripIndent()}
""")
// On transfère le script sur la machine distante et on l'exécute
sh """
scp ${sshArgs} '${tempScriptFile}' 'root@${host}:/tmp/${tempScriptFile}'
ssh ${sshArgs} root@${host} 'chmod +x /tmp/${tempScriptFile}; /tmp/${tempScriptFile}'
"""
}
// On transfère le script sur la machine distante et on l'exécute
sh """
scp ${sshArgs} '${tempScriptFile}' 'root@${host}:/tmp/${tempScriptFile}'
ssh ${sshArgs} root@${host} 'chmod +x /tmp/${tempScriptFile}; /tmp/${tempScriptFile}'
"""
body(remoteShell)
}
}
}
}
}

37
vars/nfpm.groovy Normal file
View File

@ -0,0 +1,37 @@
/**
* Générer des paquets Debian, RPM, Alpine (ipk) via nfpm
* Voir See https://nfpm.goreleaser.com/
*
* Options:
* - installDir - Répertoire d'installation du binaire nfpm, par défaut /usr/local/bin
* - version - Version de nfpm à installer, par défaut 2.15.1
* - forceDownload - Forcer l'installation de nfpm, par défaut false
* - config - Fichier de configuration nfpm à utiliser, par défaut nfpm.yaml
* - target - Répertoire cible pour nfpm, par défaut ./dist
* - packager - Limiter l'exécution de nfpm à un packager spécifique, par défaut "deb" (i.e. pas de limitation)
*/
void call(Map options = [:]) {
String installDir = options.get('installDir', '/usr/local/bin')
String version = options.get('version', '2.20.0')
Boolean forceDownload = options.get('forceDownload', false)
String downloadUrl = options.get('downloadUrl', "https://github.com/goreleaser/nfpm/releases/download/v${version}/nfpm_${version}_Linux_x86_64.tar.gz")
String config = options.get('config', 'nfpm.yaml')
String target = options.get('target', env.WORKSPACE + '/dist')
String packager = options.get('packager', 'deb')
String nfpmBin = sh(returnStdout: true, script: 'which nfpm || exit 0').trim()
if (nfpmBin == '' || forceDownload) {
sh("""
mkdir -p '${installDir}'
curl -L '${downloadUrl}' > /tmp/nfpm.tar.gz
tar -C /usr/local/bin -xzf /tmp/nfpm.tar.gz
""")
nfpmBin = "${installDir}/nfpm"
}
sh("""
mkdir -p '${target}'
${nfpmBin} package --config '${config}' ${packager ? '--packager ' + packager : ''} --target '${target}'
""")
}

54
vars/pa11y.groovy Normal file
View File

@ -0,0 +1,54 @@
def audit(String url, Map params = [:]) {
def reporter = params.reporter ? params.reporter : 'html'
def username = params.username ? params.username : ''
def password = params.password ? params.password : ''
def standard = params.standard ? params.standard : 'WCAG2AA'
def includeWarnings = params.includeWarnings ? params.includeWarnings : false
def includeNotices = params.includeNotices ? params.includeNotices : false
def cookie = params.cookie ? params.cookie : ''
def ignoredRules = params.ignoredRules ? params.ignoredRules : ''
def pa11yImage = buildDockerImage()
def dockerArgs = """
-e PA11Y_REPORTER='${reporter}'
-e PA11Y_URL='${url}'
-e PA11Y_USERNAME='${username}'
-e PA11Y_PASSWORD='${password}'
-e PA11Y_STANDARD='${standard}'
-e PA11Y_INCLUDE_WARNINGS='${includeWarnings}'
-e PA11Y_INCLUDE_NOTICES='${includeNotices}'
-e PA11Y_COOKIE='${cookie}'
-e PA11Y_IGNORE='${ignoredRules}'
"""
pa11yImage.inside(dockerArgs) {
sh 'chown -R pa11y: ./'
def report = sh(
script: 'su pa11y - /usr/local/bin/run-audit',
returnStdout: true
)
return report
}
}
def buildDockerImage() {
dir('.pa11y') {
def resourceFiles = [
'com/cadoles/pa11y/Dockerfile',
'com/cadoles/pa11y/patty.json.tmpl',
'com/cadoles/pa11y/run-audit.sh'
]
for (res in resourceFiles) {
def fileContent = libraryResource res
def fileName = res.substring(res.lastIndexOf('/') + 1)
writeFile file:fileName, text:fileContent
}
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("pa11y:${imageTag}", '.')
}
}

40
vars/podman.groovy Normal file
View File

@ -0,0 +1,40 @@
String buildCadolesPodPackage(String imageName, String imageTag, Map options = [:]) {
String destDir = options.get('destDir', env.WORKSPACE + '/dist')
Map nfpmOptions = options.get('nfpmOptions', [:])
nfpmOptions['target'] = destDir
Map env = options.get('env', [:])
env['IMAGE_NAME'] = imageName
env['IMAGE_TAG'] = imageTag
return withPodmanPackagingTempDir {
gomplate('post-install.sh.gotmpl', 'post-install.sh', env)
gomplate('pod.service.gotmpl', 'pod.service', env)
gomplate('pod.conf.gotmpl', 'pod.conf', env)
gomplate('nfpm.yaml.gotmpl', 'nfpm.yaml', env)
nfpm(nfpmOptions)
}
}
void withPodmanPackagingTempDir(Closure fn) {
File tempDir = File.createTempDir()
tempDir.deleteOnExit()
tempDir.mkdirs()
dir(tempDir.getAbsolutePath()) {
List<String> resources = [
'com/cadoles/podman/nfpm.yaml.gotmpl',
'com/cadoles/podman/pod.conf.gotmpl',
'com/cadoles/podman/pod.service.gotmpl',
'com/cadoles/podman/post-install.sh.gotmpl',
]
for (res in resources) {
String fileContent = libraryResource res
String fileName = res.substring(res.lastIndexOf('/') + 1)
writeFile file:fileName, text:fileContent
}
fn()
}
}

117
vars/pulp.groovy Normal file
View File

@ -0,0 +1,117 @@
import groovy.json.JsonOutput
def exportPackages(
String credentials,
List packages = [],
String pulpHost = 'pulp.bbohard.lan'
) {
def exportTasks = []
packages.each {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/content/deb/packages/", httpMode: 'POST', ignoreSslErrors: true, multipartName: "file", timeout: 900, responseHandle: 'NONE', uploadFile: "${it}"
jsonResponse = readJSON text: response.content
println(jsonResponse)
exportTasks << jsonResponse['task']
}
return exportTasks
}
def getRepositoryHREF(
String credentials,
String repositoryLevel = 'dev',
String pulpHost = 'pulp.bbohard.lan'
) {
def repositoriesMapping = ['dev': 'Cadoles4MSE']
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/repositories/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
def repositories = jsonResponse.results
def repositoryHREF = repositories.find { it -> it['name'] == repositoriesMapping[repositoryLevel] }
return repositoryHREF.pulp_href
}
def addToRepository(
String credentials,
List packagesHREF,
String repositoryHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def packagesHREFURL = ["add_content_units": packagesHREF.collect { "https://$pulpHost$it" }]
def postBody = JsonOutput.toJson(packagesHREFURL)
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${repositoryHREF}modify/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
def jsonResponse = readJSON text: response.content
return waitForTaskCompletion(credentials, jsonResponse.task)
}
def publishRepository(
String credentials,
String repositoryHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def postBody = JsonOutput.toJson(["repository": repositoryHREF, "simple": true])
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/publications/deb/apt/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
return waitForTaskCompletion(credentials, jsonResponse.task)
}
def distributePublication(
String credentials,
String publicationHREF,
String distributionName,
String basePath,
String pulpHost = 'pulp.bbohard.lan',
String contentGuard = null
) {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/distributions/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
def httpMode = ''
def url = ''
def distribution = jsonResponse.results.find { it -> it.name == distributionName}
if (distribution) {
httpMode = 'PUT'
url = distribution.pulp_href
} else {
httpMode = 'POST'
url = '/pulp/api/v3/distributions/deb/apt/'
}
def postBody = JsonOutput.toJson(["publication": publicationHREF, "name": distributionName, "base_path": basePath, "content_guard": contentGuard])
response = httpRequest authentication: credentials, url: "https://${pulpHost}${url}", httpMode: httpMode, requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
jsonResponse = readJSON text: response.content
if (distribution) {
waitForTaskCompletion(credentials, jsonResponse.task)
return [url]
} else {
return waitForTaskCompletion(credentials, jsonResponse.task)
}
}
def waitForTaskCompletion(
String credentials,
String taskHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def status = ''
def created_resources = []
while (status != 'completed') {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${taskHREF}", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
status = jsonResponse.state
if (status == 'completed') {
created_resources = jsonResponse.created_resources
}
sleep(10)
}
return created_resources
}
def getDistributionURL(
String credentials,
String resourceHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${resourceHREF}", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
return jsonResponse.base_url
}

80
vars/sonarqube.groovy Normal file
View File

@ -0,0 +1,80 @@
// Pipeline de scan de projet avec SonarQube
def call() {
pipeline {
agent {
label 'docker'
}
environment {
projectDir = "${env.project_name}_${env.BUILD_ID}"
}
stages {
stage("Package project") {
when {
not {
triggeredBy 'TimerTrigger'
}
}
steps {
script {
stage("Clone repository") {
checkout scm:
[
$class: 'GitSCM',
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-ssh-mse']],
branches: [[name: env.ref]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
[$class: 'WipeWorkspace' ]
]
],
changelog: false,
poll: false
}
stage("Scan project") {
dir(env.projectDir) {
withCredentials([
string(credentialsId: 'SONARQUBE_URL', variable: 'SONARQUBE_URL'),
string(credentialsId: 'SONARQUBE_TOKEN', variable: 'SONARQUBE_TOKEN'),
]) {
sh """
docker run \
--rm \
-e SONAR_HOST_URL="${env.SONARQUBE_URL}" \
-e SONAR_LOGIN="${env.SONARQUBE_TOKEN}" \
-v "${env.WORKSPACE}/${env.projectDir}/:/usr/src" \
sonarsource/sonar-scanner-cli \
-Dsonar.projectKey=${env.sonarqubeProjectKey} \
-Dsonar.projectVersion=${env.ref}
"""
}
// On notifie le canal Rocket.Chat du scan
// rocketSend (
// avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
// message: """
// Le projet ${env.project_name} a é scanné par SonarQube.
// - [Voir les résultats](${env.SONARQUBE_URL}/dashboard?id=${env.sonarqubeProjectKey})
// - [Visualiser le job](${env.RUN_DISPLAY_URL})
// @${env.sender_login}
// """.stripIndent(),
// rawMessage: true,
// )
}
}
}
}
post {
always {
sh "rm -rf '${env.projectDir}'"
}
}
}
}
}
}

View File

@ -0,0 +1,258 @@
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
void call(Map options = [:]) {
Map hooks = options.get('hooks', [
'pre': null,
'pre-test': null,
'post-test': null,
'pre-build': null,
'post-build': null,
'pre-release': null,
'post-release': null,
'post-success': null,
'post-always': null,
'post-failure': null,
])
String testTask = options.get('testTask', 'test')
String buildTask = options.get('buildTask', 'build')
String releaseTask = options.get('releaseTask', 'release')
String jobHistory = options.get('jobHistory', '5')
String baseDockerfile = options.get('baseDockerfile', '')
String baseImage = options.get('baseImage', 'reg.cadoles.com/proxy_cache/library/ubuntu:22.04')
String dockerfileExtension = options.get('dockerfileExtension', '')
List credentials = options.get('credentials', [])
List<String> releaseBranches = options.get('releaseBranches', ['develop', 'testing', 'stable', 'staging', 'master'])
node {
properties([
buildDiscarder(logRotator(daysToKeepStr: jobHistory, numToKeepStr: jobHistory)),
])
environment {
// Set MKT_PROJECT_VERSION_BRANCH_NAME to Jenkins current branch name by default
// See https://forge.cadoles.com/Cadoles/mktools project
MKT_PROJECT_VERSION_BRANCH_NAME = env.BRANCH_NAME
}
stage('Cancel older jobs') {
int buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
milestone(buildNumber - 1)
}
milestone(buildNumber)
}
stage('Checkout project') {
checkout(scm)
}
try {
def containerImage = buildContainerImage(baseImage, baseDockerfile, dockerfileExtension)
containerImage.inside('-v /var/run/docker.sock:/var/run/docker.sock --network host') {
String repo = env.JOB_NAME
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
repo = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
}
List<String> environment = prepareEnvironment()
withEnv(environment) {
withCredentials(credentials) {
runHook(hooks, 'pre')
stage('Build project') {
runHook(hooks, 'pre-build')
runTask('buildTask', buildTask)
runHook(hooks, 'post-build')
}
stage('Run tests') {
runHook(hooks, 'pre-test')
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
def ( status, output ) = runAndCaptureTask('testTask', testTask)
if (!!output.trim() && env.CHANGE_ID) {
String gitCommit = sh(script: 'git rev-parse --short HEAD', returnStdout: true)
String report = """
|# Test report for ${gitCommit}
|
|<details ${output.count('\n') <= 10 ? 'open' : ''}>
|
|<summary>Output</summary>
|
|```
|${output}
|```
|
|</details>
|""".trim().stripMargin()
gitea.commentPullRequest(repo, env.CHANGE_ID, report)
}
if (status != 0) {
throw new Exception("Task `${testTask}` failed !")
}
}
runHook(hooks, 'post-test')
}
stage('Release project') {
if (releaseBranches.contains(env.BRANCH_NAME)) {
try {
runHook(hooks, 'pre-release')
runTask('releaseTask', releaseTask)
runHook(hooks, 'post-release')
} catch (Exception ex) {
rocketSend(
message: """
|:warning: Une erreur est survenue lors de la publication de [${repo}](https://forge.cadoles.com/${repo - env.JOB_BASE_NAME}):
|
| - **Commit:** [${env.GIT_COMMIT}](https://forge.cadoles.com/${repo - env.JOB_BASE_NAME}/commit/${env.GIT_COMMIT})
| - **Tags:** `${env.PROJECT_VERSION_TAG}` / `${env.PROJECT_VERSION_SHORT_TAG}`
|
| **Erreur**
|```
|${ex}
|```
|
|[Visualiser le job](${env.RUN_DISPLAY_URL})
|
|@${utils.getBuildUser()}
""".stripMargin(),
rawMessage: true
)
throw ex
}
rocketSend(
message: """
|:white_check_mark: Nouvelle publication terminée pour [${repo}](https://forge.cadoles.com/${repo - env.JOB_BASE_NAME}):
|
| - **Commit:** [${env.GIT_COMMIT}](https://forge.cadoles.com/${repo - env.JOB_BASE_NAME}/commit/${env.GIT_COMMIT})
| - **Tags:** `${env.PROJECT_VERSION_TAG}` / `${env.PROJECT_VERSION_SHORT_TAG}`
|
|[Visualiser le job](${env.RUN_DISPLAY_URL})
|
|@${utils.getBuildUser()}
""".stripMargin(),
rawMessage: true
)
} else {
println("Current branch '${env.BRANCH_NAME}' not in releases branches (${releaseBranches}). Skipping.")
Utils.markStageSkippedForConditional('Release project')
}
}
}
}
}
} catch (Exception ex) {
runHook(hooks, 'post-failure', [ex])
throw ex
} finally {
runHook(hooks, 'post-always')
cleanWs()
}
runHook(hooks, 'post-success')
}
}
void buildContainerImage(String baseImage, String baseDockerfile, String dockerfileExtension) {
String imageName = 'cadoles-standard-make-ci'
dir(".${imageName}") {
String dockerfile = ''
if (baseDockerfile) {
dockerfile = baseDockerfile
} else {
dockerfile = libraryResource 'com/cadoles/standard-make/Dockerfile'
dockerfile = "FROM ${baseImage}\n" + dockerfile
}
dockerfile = """
${dockerfile}
${dockerfileExtension}
"""
writeFile file:'Dockerfile', text: dockerfile
String addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
String safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
String imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("${imageName}:${imageTag}", '.')
}
}
void runHook(Map hooks, String name, List args = []) {
if (!hooks[name]) {
println("No hook '${name}' defined. Skipping.")
return
}
if (hooks[name] instanceof Closure) {
hooks[name](*args)
} else {
error("Hook '${name}' seems to be defined but is not a closure !")
}
}
void runTask(String name, task) {
if (!task) {
println("No task '${name}' defined. Skipping.")
return [ -1, '' ]
}
sh(script: """#!/bin/bash
make ${task}
""")
}
List runAndCaptureTask(String name, task) {
if (!task) {
println("No task '${name}' defined. Skipping.")
return [ -1, '' ]
}
String outputFile = ".${name}-output"
int status = sh(script: """#!/bin/bash
set -eo pipefail
make ${task} 2>&1 | tee '${outputFile}'
""", returnStatus: true)
String output = readFile(outputFile)
sh(script: "rm -f '${outputFile}'")
return [status, output]
}
List<String> prepareEnvironment() {
List<String> env = []
def ( longTag, shortTag ) = utils.getProjectVersionTags()
env += ["PROJECT_VERSION_TAG=${longTag}"]
env += ["PROJECT_VERSION_SHORT_TAG=${shortTag}"]
String gitCommit = sh(script:'git rev-parse --short HEAD', returnStdout: true).trim()
env += ["GIT_COMMIT=${gitCommit}"]
Boolean isPR = utils.isPR()
env += ["IS_PR=${isPR ? 'true' : 'false'}"]
return env
}

View File

@ -0,0 +1,134 @@
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
def call(String baseImage = 'ubuntu:22.04', Map options = [:]) {
Map hooks = options.get('hooks', [:])
String jobHistory = options.get('jobHistory', '10')
node {
properties([
buildDiscarder(logRotator(daysToKeepStr: jobHistory, numToKeepStr: jobHistory)),
])
stage('Cancel older jobs') {
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) milestone(buildNumber - 1)
milestone(buildNumber)
}
stage('Checkout project') {
checkout(scm)
}
stage('Run pre hooks') {
runHook(hooks, 'preSymfonyAppPipeline')
}
stage('Run in Symfony image') {
def symfonyImage = buildDockerImage(baseImage, hooks)
symfonyImage.inside() {
def repo = env.JOB_NAME
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
repo = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
}
stage('Install composer dependencies') {
sh '''
symfony composer install
'''
}
parallel([
'php-security-check': {
stage('Check PHP security issues') {
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
def auditReport = sh(script: 'local-php-security-checker --format=markdown || true', returnStdout: true)
if (auditReport.trim() != '') {
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, auditReport)
} else {
print auditReport
}
}
if (!auditReport.contains('No packages have known vulnerabilities.')) {
throw new Exception('Dependencies check failed !')
}
}
}
},
'php-cs-fixer': {
stage('Run PHP-CS-Fixer on modified code') {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
if ( !fileExists('.php-cs-fixer.dist.php') ) {
def phpCsFixerConfig = libraryResource 'com/cadoles/symfony/.php-cs-fixer.dist.php'
writeFile file:'.php-cs-fixer.dist.php', text:phpCsFixerConfig
}
sh '''
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "HEAD~..HEAD" | fgrep ".php" | tr "\n" " ")
if ! echo "${CHANGED_FILES}" | grep -qE "^(\\.php-cs-fixer(\\.dist)\\.php?|composer\\.lock)$"; then EXTRA_ARGS=$(printf -- '--path-mode=intersection -- %s' "${CHANGED_FILES}"); else EXTRA_ARGS=''; fi
symfony php $(which php-cs-fixer) fix --config=.php-cs-fixer.dist.php -v --dry-run --using-cache=no --format junit ${EXTRA_ARGS} > php-cs-fixer.xml || true
'''
def report = sh(script: 'junit2md php-cs-fixer.xml', returnStdout: true)
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, report)
} else {
print report
}
}
}
},
'phpstan': {
stage('Run phpstan') {
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
if ( !fileExists('phpstan.neon') ) {
def phpStanConfig = libraryResource 'com/cadoles/symfony/phpstan.neon'
writeFile file:'phpstan.neon', text:phpStanConfig
}
sh '''
symfony php $(which phpstan) analyze -l 1 --error-format=table src > phpstan.txt || true
'''
def report = sh(script: 'cat phpstan.txt', returnStdout: true)
report = '## Rapport PHPStan\n\n```\n' + report
report = report + '\n```\n'
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, report)
} else {
print report
}
}
}
}
])
}
}
stage('Run post hooks') {
runHook(hooks, 'postSymfonyAppPipeline')
}
}
}
void buildDockerImage(String baseImage, Map hooks) {
def imageName = 'cadoles-symfony-ci'
dir(".${imageName}") {
def dockerfile = libraryResource 'com/cadoles/symfony/Dockerfile'
writeFile file:'Dockerfile', text: "FROM ${baseImage}\n\n" + dockerfile
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
runHook(hooks, 'buildSymfonyImage')
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("${imageName}:${imageTag}", '.')
}
}
void runHook(Map hooks, String name) {
if (!hooks[name]) {
println("No hook '${name}' defined. Skipping.")
return
}
if (hooks[name] instanceof Closure) {
hooks[name]()
} else {
error("Hook '${name}' seems to be defined but is not a closure !")
}
}

View File

@ -80,7 +80,7 @@ def buildPackage(
}
packages = sh(script: "find '${destDir}' -name '*.deb' -type f", returnStdout: true)
.split(' ')
.split('\n')
.collect { return it.trim() }
.findAll { it != '' }
}
@ -89,6 +89,31 @@ def buildPackage(
}
def prepareEnvironment(
String packageProfile = "debian",
String baseImage = ""
) {
def tamarinImage
stage("Create Tamarin environment") {
tamarinImage = buildDockerImage()
}
stage("Prepare Tamarin") {
def dockerArgs = """
-v /var/run/docker.sock:/var/run/docker.sock
${baseImage ? '-e TAMARIN_BASE_IMAGE='+baseImage : ''}
${packageProfile ? '-e TAMARIN_PROFILE='+packageProfile : ''}
-e TAMARIN_PREPARE_ONLY=true
-e TAMARIN_FORCE_REBUILD=true
""".stripIndent()
tamarinImage.inside(dockerArgs) {
sh 'run-tamarin'
}
}
}
def buildDockerImage() {
dir ('.tamarin') {
def dockerfile = libraryResource 'com/cadoles/tamarin/Dockerfile'
@ -96,6 +121,9 @@ def buildDockerImage() {
def runTamarinScript = libraryResource 'com/cadoles/tamarin/run-tamarin.sh'
writeFile file:'run-tamarin.sh', text:runTamarinScript
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"

117
vars/utils.groovy Normal file
View File

@ -0,0 +1,117 @@
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
import org.jenkinsci.plugins.pipeline.modeldefinition.when.impl.ChangeSetConditional
void when(Boolean condition, body) {
Map config = [:]
body.resolveStrategy = Closure.OWNER_FIRST
body.delegate = config
if (condition) {
body()
} else {
Utils.markStageSkippedForConditional(STAGE_NAME)
}
}
@NonCPS
String getBuildUser() {
def build = currentBuild.rawBuild
String buildUser = ''
// On essaie de récupérer l'utilisateur à l'origine de l'exécution du job
try {
def cause = build.getCause(hudson.model.Cause.UserIdCause.class)
buildUser = cause.getUserName()
} catch (Exception ex) {
// On ignore l'erreur
}
if (buildUser == '') {
// Si on a pas réussi à retrouver l'utilisateur, on récupère celui du commit courant
try {
def committerUsername = sh(script: 'git --no-pager show -s --format=\'%ae\' | cut -d\'@\' -f1', returnStdout: true).trim()
buildUser = committerUsername
} catch (Exception ex) {
// On ignore l'erreur
}
}
if (buildUser == '') {
// Par défaut, on considère que jenkins est à l'origine du job
buildUser = 'jenkins'
}
return buildUser
}
String getProjectVersionDefaultChannel() {
switch (env.BRANCH_NAME) {
case 'develop':
return 'develop'
case 'testing':
case 'staging':
return 'testing'
case 'stable':
case 'master':
return 'stable'
default:
return env.BRANCH_NAME.toLowerCase().replaceAll('(_|-| )+', '')
}
}
String getProjectVersionShortChannel(String channel) {
switch (channel) {
case 'develop':
return 'dev'
case 'testing':
case 'staging':
return 'tst'
case 'stable':
case 'master':
return 'stb'
default:
return channel.toLowerCase().replaceAll('(a|e|i|o|u|y_|-| )+', '').take(3)
}
}
List<String> getProjectVersionTags(String overrideChannel = '') {
String channel = overrideChannel ? overrideChannel : getProjectVersionDefaultChannel()
String shortChannel = getProjectVersionShortChannel(channel)
String currrentCommitDate = sh(script: 'git show -s --format=%ct', returnStdout: true).trim()
String dateVersion = sh(script: "TZ=Europe/Paris date -d '@${currrentCommitDate}' +%Y.%-m.%-d", returnStdout: true).trim()
String timestamp = sh(script: "TZ=Europe/Paris date -d '@${currrentCommitDate}' +%-H%M", returnStdout: true).trim()
String shortCommit = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
String longTag = "${dateVersion}-${channel}.${timestamp}.${shortCommit}"
String shortTag = "${dateVersion}-${shortChannel}.${timestamp}"
return [ longTag, shortTag ]
}
Boolean isPR() {
return env.BRANCH_NAME ==~ /^PR-.*$/
}
def hasChanges(String pattern) {
def changeLogSets = currentBuild.changeSets
def conditional = new ChangeSetConditional(pattern)
for (set in changeLogSets) {
def entries = set.items
for (entry in entries) {
if (conditional.changeSetMatches(entry, pattern, true)) {
return true;
}
}
}
return false;
}