Compare commits
2 Commits
podman
...
build_pipe
Author | SHA1 | Date | |
---|---|---|---|
b458d075fe | |||
fba9a12cb4 |
@ -1,10 +1,13 @@
|
|||||||
import hudson.tasks.test.AbstractTestResultAction
|
import hudson.tasks.test.AbstractTestResultAction
|
||||||
|
|
||||||
@Library('cadoles') _
|
@Library("cadoles") _
|
||||||
|
|
||||||
pipeline {
|
pipeline {
|
||||||
|
|
||||||
parameters {
|
parameters {
|
||||||
text(name: 'URLS', defaultValue: 'https://msedev.crous-toulouse.fr\nhttps://msedev.crous-toulouse.fr/envole/enregistrement\nhttps://msedev.crous-toulouse.fr/envole/page/faq\nhttps://msedev.crous-toulouse.fr/envole/page/?t=liens_utiles\nhttps://msedev.crous-toulouse.fr/envole/page/?t=mentions_legales\nhttps://msedev.crous-toulouse.fr/envole/message/new\nhttps://msedev.crous-toulouse.fr/envole/recuperation/email\nhttps://msedev.crous-toulouse.fr/envole/courriel/raz', description: 'Liste des URLs à tester, une par ligne')
|
text(name: 'URLS', defaultValue: 'https://msedev.crous-toulouse.fr\nhttps://msedev.crous-toulouse.fr/envole/enregistrement\nhttps://msedev.crous-toulouse.fr/envole/page/faq\nhttps://msedev.crous-toulouse.fr/envole/page/?t=liens_utiles\nhttps://msedev.crous-toulouse.fr/envole/page/?t=mentions_legales\nhttps://msedev.crous-toulouse.fr/envole/message/new\nhttps://msedev.crous-toulouse.fr/envole/recuperation/email\nhttps://msedev.crous-toulouse.fr/envole/courriel/raz', description: 'Liste des URLs à tester, une par ligne')
|
||||||
|
string(name: 'USERNAME', defaultValue: '', description: "Nom d'utilisateur pour l'authentification Basic Auth, si nécessaire")
|
||||||
|
password(name: 'PASSWORD', defaultValue: '', description: "Mot de passe pour l'authentification Basic Auth, si nécessaire")
|
||||||
booleanParam(name: 'INCLUDE_WARNINGS', defaultValue: false, description: 'Inclure les avertissements')
|
booleanParam(name: 'INCLUDE_WARNINGS', defaultValue: false, description: 'Inclure les avertissements')
|
||||||
booleanParam(name: 'INCLUDE_NOTICES', defaultValue: false, description: 'Inclure les notifications')
|
booleanParam(name: 'INCLUDE_NOTICES', defaultValue: false, description: 'Inclure les notifications')
|
||||||
}
|
}
|
||||||
@ -15,12 +18,12 @@ pipeline {
|
|||||||
|
|
||||||
agent {
|
agent {
|
||||||
node {
|
node {
|
||||||
label 'docker'
|
label "mse"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stages {
|
stages {
|
||||||
stage('Run RGAA audit') {
|
stage("Run RGAA audit") {
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
def urls = params.URLS.split('\n')
|
def urls = params.URLS.split('\n')
|
||||||
@ -28,32 +31,24 @@ pipeline {
|
|||||||
def count = 0
|
def count = 0
|
||||||
urls.each { u ->
|
urls.each { u ->
|
||||||
stage("Audit page '${u}'") {
|
stage("Audit page '${u}'") {
|
||||||
withCredentials([
|
|
||||||
usernamePassword(
|
|
||||||
credentialsId: 'msedev-basic-auth',
|
|
||||||
usernameVariable: 'MSEDEV_USERNAME',
|
|
||||||
passwordVariable: 'MSEDEV_PASSWORD'
|
|
||||||
)
|
|
||||||
]) {
|
|
||||||
def report = pa11y.audit(u.trim(), [
|
def report = pa11y.audit(u.trim(), [
|
||||||
reporter: 'junit',
|
reporter: 'junit',
|
||||||
username: env.MSEDEV_USERNAME,
|
username: params.USERNAME,
|
||||||
password: env.MSEDEV_PASSWORD,
|
password: params.PASSWORD,
|
||||||
standard: 'WCAG2AA',
|
standard: 'WCAG2AA',
|
||||||
includeNotices: params.INCLUDE_NOTICES,
|
includeNotices: params.INCLUDE_NOTICES,
|
||||||
includeWarnings: params.INCLUDE_WARNINGS,
|
includeWarnings: params.INCLUDE_WARNINGS,
|
||||||
])
|
]);
|
||||||
|
|
||||||
writeFile file:"./report_${count}.xml", text:report
|
writeFile file:"./report_${count}.xml", text:report
|
||||||
count++
|
count++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
junit testResults: '*.xml', skipPublishingChecks: true
|
junit "*.xml"
|
||||||
|
|
||||||
rocketSend (
|
rocketSend (
|
||||||
channel: '#cnous-mse',
|
channel: "#cnous-mse-dev",
|
||||||
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
||||||
message: """
|
message: """
|
||||||
Audit RGAA | ${testStatuses()}
|
Audit RGAA | ${testStatuses()}
|
||||||
@ -64,21 +59,17 @@ pipeline {
|
|||||||
""".stripIndent(),
|
""".stripIndent(),
|
||||||
rawMessage: true,
|
rawMessage: true,
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
post {
|
|
||||||
always {
|
|
||||||
cleanWs()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@NonCPS
|
@NonCPS
|
||||||
def testStatuses() {
|
def testStatuses() {
|
||||||
def testStatus = ''
|
def testStatus = ""
|
||||||
AbstractTestResultAction testResultAction = currentBuild.rawBuild.getAction(AbstractTestResultAction.class)
|
AbstractTestResultAction testResultAction = currentBuild.rawBuild.getAction(AbstractTestResultAction.class)
|
||||||
if (testResultAction != null) {
|
if (testResultAction != null) {
|
||||||
def total = testResultAction.totalCount
|
def total = testResultAction.totalCount
|
||||||
|
@ -1,76 +0,0 @@
|
|||||||
pipeline {
|
|
||||||
agent {
|
|
||||||
docker {
|
|
||||||
image "getsentry/sentry-cli"
|
|
||||||
args "--entrypoint="
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
environment {
|
|
||||||
projectDir = "${env.project_name}_${env.BUILD_ID}"
|
|
||||||
}
|
|
||||||
|
|
||||||
stages {
|
|
||||||
|
|
||||||
stage("Clone repository") {
|
|
||||||
steps {
|
|
||||||
checkout scm:
|
|
||||||
[
|
|
||||||
$class: 'GitSCM',
|
|
||||||
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
|
|
||||||
branches: [[name: env.ref]],
|
|
||||||
extensions: [
|
|
||||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
|
|
||||||
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
|
|
||||||
[$class: 'WipeWorkspace' ]
|
|
||||||
]
|
|
||||||
],
|
|
||||||
changelog: false,
|
|
||||||
poll: false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
stage('Create sentry release') {
|
|
||||||
steps {
|
|
||||||
dir(env.projectDir) {
|
|
||||||
withCredentials([
|
|
||||||
string(credentialsId: 'sentry-url', variable: 'SENTRY_URL'),
|
|
||||||
string(credentialsId: 'sentry-release-auth-token', variable: 'SENTRY_AUTH_TOKEN')
|
|
||||||
]) {
|
|
||||||
sh '''
|
|
||||||
SENTRY_CMD="sentry-cli --auth-token \"${SENTRY_AUTH_TOKEN}\" --url \"${SENTRY_URL}\""
|
|
||||||
PROJECT_VERSION=$(sentry-cli releases propose-version)
|
|
||||||
|
|
||||||
$SENTRY_CMD \
|
|
||||||
releases \
|
|
||||||
--org "${sentry_org}" \
|
|
||||||
new \
|
|
||||||
-p "${sentry_project}" ${PROJECT_VERSION}
|
|
||||||
|
|
||||||
(
|
|
||||||
$SENTRY_CMD \
|
|
||||||
releases \
|
|
||||||
--org "${sentry_org}" \
|
|
||||||
set-commits --local \
|
|
||||||
${PROJECT_VERSION} || exit 0
|
|
||||||
)
|
|
||||||
|
|
||||||
$SENTRY_CMD \
|
|
||||||
releases \
|
|
||||||
--org "${sentry_org}" \
|
|
||||||
finalize \
|
|
||||||
${PROJECT_VERSION}
|
|
||||||
'''
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
post {
|
|
||||||
always {
|
|
||||||
cleanWs()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,26 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
DESTDIR=/usr/local/share/ca-certificates
|
|
||||||
UPDATE_CERTS_CMD=update-ca-certificates
|
|
||||||
CERTS="$(cat <<EOF
|
|
||||||
https://letsencrypt.org/certs/isrgrootx1.pem
|
|
||||||
https://letsencrypt.org/certs/isrg-root-x2.pem
|
|
||||||
https://letsencrypt.org/certs/lets-encrypt-r3.pem
|
|
||||||
https://letsencrypt.org/certs/lets-encrypt-e1.pem
|
|
||||||
https://letsencrypt.org/certs/lets-encrypt-r4.pem
|
|
||||||
https://letsencrypt.org/certs/lets-encrypt-e2.pem
|
|
||||||
EOF
|
|
||||||
)"
|
|
||||||
|
|
||||||
cd "$DESTDIR"
|
|
||||||
|
|
||||||
for cert in $CERTS; do
|
|
||||||
echo "Downloading '$cert'..."
|
|
||||||
filename=$(basename "$cert")
|
|
||||||
wget --tries=10 --timeout=30 -O "$filename" "$cert"
|
|
||||||
openssl x509 -in "$filename" -inform PEM -out "$filename.crt"
|
|
||||||
done
|
|
||||||
|
|
||||||
$UPDATE_CERTS_CMD
|
|
@ -1,175 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
GITEA_RELEASE_PROJECT=${GITEA_RELEASE_PROJECT}
|
|
||||||
GITEA_RELEASE_ORG=${GITEA_RELEASE_ORG}
|
|
||||||
GITEA_RELEASE_BASE_URL=${GITEA_BASE_URL:-https://forge.cadoles.com}
|
|
||||||
GITEA_RELEASE_USERNAME=${GITEA_RELEASE_USERNAME}
|
|
||||||
GITEA_RELEASE_PASSWORD=${GITEA_RELEASE_PASSWORD}
|
|
||||||
GITEA_RELEASE_NAME=${GITEA_RELEASE_NAME}
|
|
||||||
GITEA_RELEASE_VERSION=${GITEA_RELEASE_VERSION}
|
|
||||||
GITEA_RELEASE_COMMITISH_TARGET=${GITEA_RELEASE_COMMITISH_TARGET}
|
|
||||||
GITEA_RELEASE_IS_DRAFT=${GITEA_RELEASE_IS_DRAFT:-false}
|
|
||||||
GITEA_RELEASE_IS_PRERELEASE=${GITEA_RELEASE_IS_PRERELEASE:-true}
|
|
||||||
GITEA_RELEASE_BODY=${GITEA_RELEASE_BODY}
|
|
||||||
GITEA_RELEASE_ATTACHMENTS=${GITEA_RELEASE_ATTACHMENTS}
|
|
||||||
|
|
||||||
function check_dependencies {
|
|
||||||
assert_command_available 'curl'
|
|
||||||
assert_command_available 'jq'
|
|
||||||
}
|
|
||||||
|
|
||||||
function assert_command_available {
|
|
||||||
local command=$1
|
|
||||||
local command_path=$(which $command)
|
|
||||||
|
|
||||||
if [ -z "$command_path" ]; then
|
|
||||||
echo "The '$command' command could not be found. Please install it before using this script." 1>&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function check_environment {
|
|
||||||
assert_environment GITEA_RELEASE_PROJECT
|
|
||||||
assert_environment GITEA_RELEASE_ORG
|
|
||||||
assert_environment GITEA_RELEASE_BASE_URL
|
|
||||||
}
|
|
||||||
|
|
||||||
function source_env_file {
|
|
||||||
if [ ! -f '.env' ]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
set -o allexport
|
|
||||||
source .env
|
|
||||||
set +o allexport
|
|
||||||
}
|
|
||||||
|
|
||||||
function assert_environment {
|
|
||||||
local name=$1
|
|
||||||
local value=${!name}
|
|
||||||
|
|
||||||
if [ -z "$value" ]; then
|
|
||||||
echo "The $"$name" environment variable is empty." 1>&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function ask_credentials {
|
|
||||||
if [ -z "$GITEA_RELEASE_USERNAME" ]; then
|
|
||||||
echo -n "Username: "
|
|
||||||
read GITEA_RELEASE_USERNAME
|
|
||||||
|
|
||||||
fi
|
|
||||||
if [ -z "$GITEA_RELEASE_PASSWORD" ]; then
|
|
||||||
echo -n "Password: "
|
|
||||||
stty -echo
|
|
||||||
read GITEA_RELEASE_PASSWORD
|
|
||||||
stty echo
|
|
||||||
echo
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function retrieve_version {
|
|
||||||
if [ ! -z "$GITEA_RELEASE_VERSION" ]; then
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
set +e
|
|
||||||
GITEA_RELEASE_VERSION=$(git describe --abbrev=0 --tags 2>/dev/null)
|
|
||||||
GITEA_RELEASE_VERSION=${GITEA_RELEASE_VERSION}
|
|
||||||
set -e
|
|
||||||
}
|
|
||||||
|
|
||||||
function retrieve_commitish_target {
|
|
||||||
if [ ! -z "$GITEA_RELEASE_COMMITISH_TARGET" ]; then
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
GITEA_RELEASE_COMMITISH_TARGET=$(git log -n 1 --pretty="format:%h")
|
|
||||||
}
|
|
||||||
|
|
||||||
function create_release {
|
|
||||||
local payload={}
|
|
||||||
|
|
||||||
payload=$(json_set "$payload" body "$GITEA_RELEASE_BODY" true)
|
|
||||||
payload=$(json_set "$payload" draft $GITEA_RELEASE_IS_DRAFT)
|
|
||||||
payload=$(json_set "$payload" name "\"${GITEA_RELEASE_NAME:-$GITEA_RELEASE_VERSION}\"")
|
|
||||||
payload=$(json_set "$payload" prerelease $GITEA_RELEASE_IS_PRERELEASE)
|
|
||||||
payload=$(json_set "$payload" tag_name "\"${GITEA_RELEASE_VERSION:-$GITEA_RELEASE_COMMITISH_TARGET}\"")
|
|
||||||
payload=$(json_set "$payload" target_commitish "\"$GITEA_RELEASE_COMMITISH_TARGET\"")
|
|
||||||
|
|
||||||
local existing_release=$(gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases" -XGET | jq -e ".[] | select(.tag_name == \"${GITEA_RELEASE_VERSION}\") | .id")
|
|
||||||
|
|
||||||
if [ ! -z "${existing_release}" ]; then
|
|
||||||
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases/${existing_release}" -XDELETE
|
|
||||||
fi
|
|
||||||
|
|
||||||
local tmpfile=$(mktemp)
|
|
||||||
|
|
||||||
echo "$payload" > "$tmpfile"
|
|
||||||
|
|
||||||
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases" \
|
|
||||||
-H "Content-Type:application/json" \
|
|
||||||
-d "@$tmpfile"
|
|
||||||
|
|
||||||
rm -f "$tmpfile"
|
|
||||||
}
|
|
||||||
|
|
||||||
function json_set {
|
|
||||||
local data=$1
|
|
||||||
local key=$2
|
|
||||||
local value=$3
|
|
||||||
local use_raw_file=$4
|
|
||||||
|
|
||||||
if [ "$use_raw_file" != "true" ]; then
|
|
||||||
echo $data | jq -cr --argjson v "$value" --arg k "$key" '.[$k] = $v'
|
|
||||||
else
|
|
||||||
local tmpfile=$(mktemp)
|
|
||||||
echo "$value" > "$tmpfile"
|
|
||||||
echo $data | jq -cr --rawfile v "$tmpfile" --arg k "$key" '.[$k] = $v'
|
|
||||||
rm -f "$tmpfile"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function upload_release_attachments {
|
|
||||||
local release="$1"
|
|
||||||
local release_id=$(echo "$release" | jq -r .id)
|
|
||||||
|
|
||||||
if [ -z "$GITEA_RELEASE_ATTACHMENTS" ]; then
|
|
||||||
set +e
|
|
||||||
GITEA_RELEASE_ATTACHMENTS="$(ls release/*.{tar.gz,zip} 2>/dev/null)"
|
|
||||||
set -e
|
|
||||||
fi
|
|
||||||
|
|
||||||
for file in $GITEA_RELEASE_ATTACHMENTS; do
|
|
||||||
local filename=$(basename "$file")
|
|
||||||
gitea_api "/repos/$GITEA_RELEASE_ORG/$GITEA_RELEASE_PROJECT/releases/$release_id/assets?name=$filename" \
|
|
||||||
-H "Content-Type:multipart/form-data" \
|
|
||||||
-F "attachment=@$file"
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
function gitea_api {
|
|
||||||
local path=$1
|
|
||||||
local args=${@:2}
|
|
||||||
|
|
||||||
curl -L \
|
|
||||||
--fail \
|
|
||||||
-u "$GITEA_RELEASE_USERNAME:$GITEA_RELEASE_PASSWORD" \
|
|
||||||
${args} \
|
|
||||||
"$GITEA_RELEASE_BASE_URL/api/v1$path"
|
|
||||||
}
|
|
||||||
|
|
||||||
function main {
|
|
||||||
check_dependencies
|
|
||||||
source_env_file
|
|
||||||
check_environment
|
|
||||||
ask_credentials
|
|
||||||
retrieve_commitish_target
|
|
||||||
retrieve_version
|
|
||||||
local release=$(create_release)
|
|
||||||
upload_release_attachments "$release"
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
@ -1,4 +1,4 @@
|
|||||||
FROM golang:1.15 as envtpl
|
FROM golang:1.13 as envtpl
|
||||||
|
|
||||||
ARG HTTP_PROXY=
|
ARG HTTP_PROXY=
|
||||||
ARG HTTPS_PROXY=
|
ARG HTTPS_PROXY=
|
||||||
@ -14,7 +14,7 @@ RUN git clone https://github.com/subfuzion/envtpl /src \
|
|||||||
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
|
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
|
||||||
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
|
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
|
||||||
|
|
||||||
FROM alpine:3.13
|
FROM alpine:3.10
|
||||||
|
|
||||||
ARG HTTP_PROXY=
|
ARG HTTP_PROXY=
|
||||||
ARG HTTPS_PROXY=
|
ARG HTTPS_PROXY=
|
||||||
|
@ -10,9 +10,10 @@ rm -f reports/*
|
|||||||
cd reports
|
cd reports
|
||||||
|
|
||||||
lighthouse \
|
lighthouse \
|
||||||
"$LIGHTHOUSE_URL" \
|
|
||||||
--no-enable-error-reporting \
|
--no-enable-error-reporting \
|
||||||
--chrome-flags="--headless --disable-dev-shm-usage --no-sandbox --disable-gpu" \
|
--chrome-flags="--headless --disable-dev-shm-usage --no-sandbox --disable-gpu" \
|
||||||
--config=../config.js \
|
--config=../config.js \
|
||||||
--output json --output html \
|
--output json --output html \
|
||||||
--output-path=lighthouse
|
--output-path=lighthouse \
|
||||||
|
-- \
|
||||||
|
"$LIGHTHOUSE_URL"
|
||||||
|
@ -35,7 +35,7 @@ RUN apk add --no-cache \
|
|||||||
chromium \
|
chromium \
|
||||||
bash
|
bash
|
||||||
|
|
||||||
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y@^5.0.0 pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
|
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
|
||||||
|
|
||||||
RUN adduser -D pa11y
|
RUN adduser -D pa11y
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ cd reports
|
|||||||
|
|
||||||
export PUPPETEER_EXECUTABLE_PATH=$(which chromium-browser)
|
export PUPPETEER_EXECUTABLE_PATH=$(which chromium-browser)
|
||||||
export PA11Y_REPORTER="${PA11Y_REPORTER:-html}"
|
export PA11Y_REPORTER="${PA11Y_REPORTER:-html}"
|
||||||
export PA11Y_STANDARD=${PA11Y_STANDARD:-WCAG2AA}
|
|
||||||
|
|
||||||
PA11Y_ARGS=""
|
PA11Y_ARGS=""
|
||||||
|
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
{{ $serviceName := index ( .Env.IMAGE_NAME | strings.Split "/" | coll.Reverse ) 0 }}
|
|
||||||
name: "cadoles-pod-{{ $serviceName }}"
|
|
||||||
arch: amd64
|
|
||||||
platform: linux
|
|
||||||
version: "{{ strings.TrimPrefix "v" ( getenv "IMAGE_TAG" "latest" ) }}"
|
|
||||||
version_schema: none
|
|
||||||
version_metadata: git
|
|
||||||
section: "{{ getenv "PACKAGE_SECTION" "default" }}"
|
|
||||||
priority: "{{ getenv "PACKAGE_PRIORITY" "optional" }}"
|
|
||||||
maintainer: "{{ getenv "PACKAGE_MAINTAINER" "contact@cadoles.com" }}"
|
|
||||||
description: "{{ getenv "PACKAGE_DESCRIPTION" "" }}"
|
|
||||||
homepage: "{{ getenv "PACKAGE_HOMEPAGE" "https://forge.cadoles.com" }}"
|
|
||||||
license: "{{ getenv "PACKAGE_LICENCE" "GPL-3.0" }}"
|
|
||||||
depends:
|
|
||||||
- podman
|
|
||||||
scripts:
|
|
||||||
postinstall: post-install.sh
|
|
||||||
contents:
|
|
||||||
- packager: deb
|
|
||||||
src: pod.service
|
|
||||||
dst: "/usr/lib/systemd/system/cadoles-pod-{{ $serviceName }}.service"
|
|
||||||
- packager: deb
|
|
||||||
src: pod.conf
|
|
||||||
dst: /etc/cadoles-pod-{{ $serviceName }}.conf
|
|
||||||
type: config|noreplace
|
|
@ -1 +0,0 @@
|
|||||||
PODMAN_ARGS="{{ getenv "PODMAN_ARGS" "" }}"
|
|
@ -1,24 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description={{ .Env.IMAGE_NAME }} pod service
|
|
||||||
Wants=network-online.target
|
|
||||||
After=network-online.target
|
|
||||||
RequiresMountsFor=/run/containers/storage
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
Environment=PODMAN_SYSTEMD_UNIT=%n
|
|
||||||
EnvironmentFile=-/etc/cadoles-pod-{{ .Env.IMAGE_NAME }}.conf
|
|
||||||
Environment=IMAGE_NAME={{ .Env.IMAGE_NAME }} IMAGE_TAG={{ .Env.IMAGE_TAG }}
|
|
||||||
PassEnvironment=PODMAN_ARGS IMAGE_NAME IMAGE_TAG
|
|
||||||
Restart=on-failure
|
|
||||||
TimeoutStopSec=70
|
|
||||||
{{ if getenv "SYSTEMD_EXEC_STARTPRE" "" }}
|
|
||||||
ExecStartPre={{ .Env.SYSTEMD_EXEC_STARTPRE }}
|
|
||||||
{{ end }}
|
|
||||||
ExecStart=/bin/sh -c "podman run ${PODMAN_ARGS} '${IMAGE_NAME}:${IMAGE_TAG}'"
|
|
||||||
{{ if getenv "SYSTEMD_EXEC_STARTPOST" "" }}
|
|
||||||
ExecStartPost={{ .Env.SYSTEMD_EXEC_STARTPOST }}
|
|
||||||
{{ end }}
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=default.target
|
|
@ -1,79 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# Adapted from https://nfpm.goreleaser.com/tips/
|
|
||||||
|
|
||||||
use_systemctl="True"
|
|
||||||
systemd_version=0
|
|
||||||
if ! command -V systemctl >/dev/null 2>&1; then
|
|
||||||
use_systemctl="False"
|
|
||||||
else
|
|
||||||
systemd_version=$( systemctl --version | head -1 | sed 's/systemd //g' | cut -d' ' -f1 )
|
|
||||||
fi
|
|
||||||
|
|
||||||
SERVICE_NAME="cadoles-pod-{{ .Env.IMAGE_NAME }}"
|
|
||||||
|
|
||||||
cleanup() {
|
|
||||||
if [ "${use_systemctl}" = "False" ]; then
|
|
||||||
rm -f /usr/lib/systemd/system/$SERVICE_NAME.service
|
|
||||||
else
|
|
||||||
rm -f /etc/chkconfig/$SERVICE_NAME
|
|
||||||
rm -f /etc/init.d/$SERVICE_NAME
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanInstall() {
|
|
||||||
if [ "${use_systemctl}" = "False" ]; then
|
|
||||||
if command -V chkconfig >/dev/null 2>&1; then
|
|
||||||
chkconfig --add $SERVICE_NAME
|
|
||||||
fi
|
|
||||||
|
|
||||||
service $SERVICE_NAME restart ||:
|
|
||||||
else
|
|
||||||
if [ "${systemd_version}" -lt 231 ]; then
|
|
||||||
printf "\033[31m systemd version %s is less then 231, fixing the service file \033[0m\n" "${systemd_version}"
|
|
||||||
sed -i "s/=+/=/g" /usr/lib/systemd/system/$SERVICE_NAME.service
|
|
||||||
fi
|
|
||||||
systemctl daemon-reload ||:
|
|
||||||
systemctl unmask $SERVICE_NAME ||:
|
|
||||||
systemctl preset $SERVICE_NAME ||:
|
|
||||||
systemctl enable $SERVICE_NAME ||:
|
|
||||||
systemctl restart $SERVICE_NAME ||:
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
upgrade() {
|
|
||||||
if [ "${use_systemctl}" = "False" ]; then
|
|
||||||
service $SERVICE_NAME restart ||:
|
|
||||||
else
|
|
||||||
if [ "${systemd_version}" -lt 231 ]; then
|
|
||||||
printf "\033[31m systemd version %s is less then 231, fixing the service file \033[0m\n" "${systemd_version}"
|
|
||||||
sed -i "s/=+/=/g" /usr/lib/systemd/system/$SERVICE_NAME.service
|
|
||||||
fi
|
|
||||||
systemctl daemon-reload ||:
|
|
||||||
systemctl restart $SERVICE_NAME ||:
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo 'Cleaning up unused images...'
|
|
||||||
podman image prune -f --filter "reference={{ .Env.IMAGE_NAME }}"
|
|
||||||
}
|
|
||||||
|
|
||||||
action="$1"
|
|
||||||
if [ "$1" = "configure" ] && [ -z "$2" ]; then
|
|
||||||
action="install"
|
|
||||||
elif [ "$1" = "configure" ] && [ -n "$2" ]; then
|
|
||||||
action="upgrade"
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "$action" in
|
|
||||||
"1" | "install")
|
|
||||||
cleanInstall
|
|
||||||
;;
|
|
||||||
"2" | "upgrade")
|
|
||||||
upgrade
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
cleanInstall
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
cleanup
|
|
@ -1,41 +0,0 @@
|
|||||||
<?php
|
|
||||||
|
|
||||||
$finder = PhpCsFixer\Finder::create()
|
|
||||||
->in(__DIR__.'/src')
|
|
||||||
->name('*.php')
|
|
||||||
;
|
|
||||||
|
|
||||||
return (new PhpCsFixer\Config())
|
|
||||||
->setRules([
|
|
||||||
'@Symfony' => true,
|
|
||||||
'concat_space' => ['spacing' => 'none'],
|
|
||||||
'array_syntax' => ['syntax' => 'short'],
|
|
||||||
'combine_consecutive_issets' => true,
|
|
||||||
'explicit_indirect_variable' => true,
|
|
||||||
'no_useless_return' => true,
|
|
||||||
'ordered_imports' => true,
|
|
||||||
'no_unused_imports' => true,
|
|
||||||
'no_spaces_after_function_name' => true,
|
|
||||||
'no_spaces_inside_parenthesis' => true,
|
|
||||||
'ternary_operator_spaces' => true,
|
|
||||||
'class_definition' => ['single_line' => true],
|
|
||||||
'whitespace_after_comma_in_array' => true,
|
|
||||||
'phpdoc_add_missing_param_annotation' => ['only_untyped' => true],
|
|
||||||
'phpdoc_order' => true,
|
|
||||||
'phpdoc_types_order' => [
|
|
||||||
'null_adjustment' => 'always_last',
|
|
||||||
'sort_algorithm' => 'alpha',
|
|
||||||
],
|
|
||||||
'phpdoc_no_empty_return' => false,
|
|
||||||
'phpdoc_summary' => false,
|
|
||||||
'general_phpdoc_annotation_remove' => [
|
|
||||||
'annotations' => [
|
|
||||||
'expectedExceptionMessageRegExp',
|
|
||||||
'expectedException',
|
|
||||||
'expectedExceptionMessage',
|
|
||||||
'author',
|
|
||||||
],
|
|
||||||
],
|
|
||||||
])
|
|
||||||
->setFinder($finder)
|
|
||||||
;
|
|
@ -1,47 +0,0 @@
|
|||||||
ARG PHP_SECURITY_CHECKER_VERSION=1.0.0
|
|
||||||
ARG JQ_VERSION=1.6
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
|
||||||
DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
||||||
wget tar curl ca-certificates \
|
|
||||||
openssl bash git unzip \
|
|
||||||
php-cli php-dom php-mbstring php-ctype php-xml php-iconv
|
|
||||||
|
|
||||||
COPY add-letsencrypt-ca.sh /root/add-letsencrypt-ca.sh
|
|
||||||
|
|
||||||
RUN bash /root/add-letsencrypt-ca.sh \
|
|
||||||
&& rm -f /root/add-letsencrypt-ca.sh
|
|
||||||
|
|
||||||
RUN wget -O /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-${JQ_VERSION}/jq-linux64 \
|
|
||||||
&& chmod +x /usr/local/bin/jq
|
|
||||||
|
|
||||||
# Install local-php-security-checker
|
|
||||||
RUN wget -O /usr/local/bin/local-php-security-checker https://github.com/fabpot/local-php-security-checker/releases/download/v${PHP_SECURITY_CHECKER_VERSION}/local-php-security-checker_${PHP_SECURITY_CHECKER_VERSION}_linux_amd64 \
|
|
||||||
&& chmod +x /usr/local/bin/local-php-security-checker
|
|
||||||
|
|
||||||
# Install junit2md
|
|
||||||
RUN junit2md_download_url=$(curl "https://forge.cadoles.com/api/v1/repos/Cadoles/junit2md/releases" -H "accept:application/json" | jq -r 'sort_by(.published_at) | reverse | .[0] | .assets[] | select(.name == "junit2md-linux-amd64.tar.gz") | .browser_download_url') \
|
|
||||||
&& wget -O junit2md-linux-amd64.tar.gz "$junit2md_download_url" \
|
|
||||||
&& tar -xzf junit2md-linux-amd64.tar.gz \
|
|
||||||
&& cp junit2md-linux-amd64/junit2md /usr/local/bin/junit2md
|
|
||||||
|
|
||||||
# Install composer
|
|
||||||
RUN wget https://raw.githubusercontent.com/composer/getcomposer.org/76a7060ccb93902cd7576b67264ad91c8a2700e2/web/installer -O - -q | php -- --force --install-dir /usr/local/bin --filename composer \
|
|
||||||
&& chmod +x /usr/local/bin/composer
|
|
||||||
|
|
||||||
# Install php-cs-fixer
|
|
||||||
RUN mkdir --parents /tools/php-cs-fixer \
|
|
||||||
&& composer require --working-dir=/tools/php-cs-fixer friendsofphp/php-cs-fixer \
|
|
||||||
&& ln -s /tools/php-cs-fixer/vendor/bin/php-cs-fixer /usr/local/bin/php-cs-fixer
|
|
||||||
|
|
||||||
# Install php-stan
|
|
||||||
RUN mkdir --parents /tools/phpstan \
|
|
||||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan \
|
|
||||||
&& ln -s /tools/phpstan/vendor/bin/phpstan /usr/local/bin/phpstan \
|
|
||||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-symfony \
|
|
||||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-doctrine
|
|
||||||
|
|
||||||
# Install Symfony
|
|
||||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/symfony/stable/setup.deb.sh' | bash \
|
|
||||||
&& apt update \
|
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y symfony-cli
|
|
@ -1,4 +0,0 @@
|
|||||||
includes:
|
|
||||||
- /tools/phpstan/vendor/phpstan/phpstan-symfony/extension.neon
|
|
||||||
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/extension.neon
|
|
||||||
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/rules.neon
|
|
@ -1,15 +1,13 @@
|
|||||||
FROM alpine:3.12
|
FROM alpine:3.8
|
||||||
|
|
||||||
ARG HTTP_PROXY=
|
ARG HTTP_PROXY=
|
||||||
ARG HTTPS_PROXY=
|
ARG HTTPS_PROXY=
|
||||||
ARG http_proxy=
|
ARG http_proxy=
|
||||||
ARG https_proxy=
|
ARG https_proxy=
|
||||||
|
|
||||||
ARG TAMARIN_VERSION=develop
|
ARG TAMARIN_VERSION=253c774
|
||||||
|
|
||||||
RUN apk add --no-cache git docker python3 bash openssl curl
|
RUN apk add --no-cache git docker python3 bash
|
||||||
|
|
||||||
RUN curl -k https://forge.cadoles.com/Cadoles/Jenkins/raw/branch/master/resources/com/cadoles/common/add-letsencrypt-ca.sh | bash
|
|
||||||
|
|
||||||
RUN git clone http://forge.cadoles.com/Cadoles/Tamarin /tamarin\
|
RUN git clone http://forge.cadoles.com/Cadoles/Tamarin /tamarin\
|
||||||
&& cd /tamarin\
|
&& cd /tamarin\
|
||||||
|
@ -37,10 +37,55 @@ RUN apk --no-cache add \
|
|||||||
python-dev \
|
python-dev \
|
||||||
sqlite-dev \
|
sqlite-dev \
|
||||||
yaml-dev \
|
yaml-dev \
|
||||||
sudo \
|
|
||||||
nodejs \
|
nodejs \
|
||||||
npm
|
npm
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip \
|
||||||
|
&& pip install \
|
||||||
|
pyClamd==0.4.0 \
|
||||||
|
GitPython==2.1.3 \
|
||||||
|
chardet==3.0.4 \
|
||||||
|
futures==3.2.0 \
|
||||||
|
pyOpenSSL==18.0.0 \
|
||||||
|
ndg-httpsclient==0.4.0 \
|
||||||
|
pyasn1==0.4.2 \
|
||||||
|
scapy==2.4.0 \
|
||||||
|
msgpack==0.5.6 \
|
||||||
|
Jinja2==2.10 \
|
||||||
|
vulndb==0.1.1 \
|
||||||
|
psutil==5.4.8 \
|
||||||
|
ds-store==1.1.2 \
|
||||||
|
pebble==4.3.8 \
|
||||||
|
acora==2.1 \
|
||||||
|
diff-match-patch==20121119 \
|
||||||
|
lz4==1.1.0 \
|
||||||
|
vulners==1.3.0 \
|
||||||
|
ipaddresses==0.0.2 \
|
||||||
|
PyGithub==1.21.0 \
|
||||||
|
pybloomfiltermmap==0.3.14 \
|
||||||
|
phply==0.9.1 nltk==3.0.1 \
|
||||||
|
tblib==0.2.0 \
|
||||||
|
pdfminer==20140328 \
|
||||||
|
lxml==3.4.4 \
|
||||||
|
guess-language==0.2 \
|
||||||
|
cluster==1.1.1b3 \
|
||||||
|
python-ntlm==1.0.1 \
|
||||||
|
halberd==0.2.4 \
|
||||||
|
darts.util.lru==0.5 \
|
||||||
|
markdown==2.6.1 \
|
||||||
|
termcolor==1.1.0 \
|
||||||
|
mitmproxy==0.13 \
|
||||||
|
ruamel.ordereddict==0.4.8 \
|
||||||
|
Flask==0.10.1 \
|
||||||
|
PyYAML==3.12 \
|
||||||
|
tldextract==1.7.2 \
|
||||||
|
esmre==0.3.1 \
|
||||||
|
bravado-core==5.12.1 \
|
||||||
|
subprocess32==3.5.4 \
|
||||||
|
&& npm install -g retire \
|
||||||
|
&& rm -rf /root/.cache/pip \
|
||||||
|
&& apk del build-base linux-headers
|
||||||
|
|
||||||
RUN adduser -D w3af
|
RUN adduser -D w3af
|
||||||
|
|
||||||
RUN git clone --depth=1 \
|
RUN git clone --depth=1 \
|
||||||
@ -49,9 +94,6 @@ RUN git clone --depth=1 \
|
|||||||
&& rm -rf /home/w3af/w3af/.git \
|
&& rm -rf /home/w3af/w3af/.git \
|
||||||
&& chown -R w3af /home/w3af/w3af
|
&& chown -R w3af /home/w3af/w3af
|
||||||
|
|
||||||
RUN cd /home/w3af/w3af \
|
|
||||||
&& ( ./w3af_console || . /tmp/w3af_dependency_install.sh )
|
|
||||||
|
|
||||||
COPY run-audit.sh /usr/local/bin/run-audit
|
COPY run-audit.sh /usr/local/bin/run-audit
|
||||||
RUN chmod +x /usr/local/bin/run-audit
|
RUN chmod +x /usr/local/bin/run-audit
|
||||||
|
|
||||||
|
@ -1,219 +0,0 @@
|
|||||||
/**
|
|
||||||
* Construit, valide et publie (optionnellement) une image Docker sur le registre Cadoles (par défaut)
|
|
||||||
*
|
|
||||||
* Options disponibles:
|
|
||||||
*
|
|
||||||
* - dockerfile - String - Chemin vers le fichier Dockerfile à utiliser pour construire l'image, par défaut "./Dockerfile"
|
|
||||||
* - contextDir - String - Répertoire servant de "contexte" pour la construction de l'image, par défault "./"
|
|
||||||
* - imageName - String - Nom de l'image à construire, par défaut ""
|
|
||||||
* - imageTag - String - Tag apposé sur l'image après construction, par défaut résultat de la commande `git describe --always`
|
|
||||||
* - gitCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour cloner le dépôt Git, par défaut "forge-jenkins"
|
|
||||||
* - dockerRepository - String - Nom d'hôte du registre Docker sur lequel publier l'image, par défaut "reg.cadoles.com"
|
|
||||||
* - dockerRepositoryCredentialsId - String - Identifiant des "credentials" Jenkins utilisés pour déployer l'image sur le registre Docker, par défault "reg.cadoles.com-jenkins"
|
|
||||||
* - dryRun - Boolean - Désactiver/activer la publication de l'image sur le registre Docker, par défaut "true"
|
|
||||||
* - skipVerifications - Boolean - Désactiver/activer les étapes de vérifications de qualité/sécurité de l'image Docker, par défaut "false"
|
|
||||||
*/
|
|
||||||
String buildAndPublishImage(Map options = [:]) {
|
|
||||||
String dockerfile = options.get('dockerfile', './Dockerfile')
|
|
||||||
String contextDir = options.get('contextDir', '.')
|
|
||||||
String imageName = options.get('imageName', '')
|
|
||||||
String gitRef = sh(returnStdout: true, script: 'git describe --always').trim()
|
|
||||||
String imageTag = options.get('imageTag', gitRef)
|
|
||||||
String gitCredentialsId = options.get('gitCredentialsId', 'forge-jenkins')
|
|
||||||
String dockerRepository = options.get('dockerRepository', 'reg.cadoles.com')
|
|
||||||
String dockerRepositoryCredentialsId = options.get('dockerRepositoryCredentialsId', 'reg.cadoles.com-jenkins')
|
|
||||||
Boolean dryRun = options.get('dryRun', true)
|
|
||||||
Boolean skipVerifications = options.get('skipVerification', false)
|
|
||||||
|
|
||||||
String projectRepository = env.JOB_NAME
|
|
||||||
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
|
|
||||||
projectRepository = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
|
|
||||||
}
|
|
||||||
projectRepository = options.get('projectRepository', projectRepository)
|
|
||||||
|
|
||||||
withCredentials([
|
|
||||||
usernamePassword([
|
|
||||||
credentialsId: dockerRepositoryCredentialsId,
|
|
||||||
usernameVariable: 'HUB_USERNAME',
|
|
||||||
passwordVariable: 'HUB_PASSWORD'
|
|
||||||
]),
|
|
||||||
]) {
|
|
||||||
stage('Validate Dockerfile with Hadolint') {
|
|
||||||
utils.when(!skipVerifications) {
|
|
||||||
runHadolintCheck(dockerfile, projectRepository)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage("Build image '${imageName}:${imageTag}'") {
|
|
||||||
git.withHTTPCredentials(gitCredentialsId) {
|
|
||||||
sh """
|
|
||||||
docker build \
|
|
||||||
--build-arg="GIT_USERNAME=${env.GIT_USERNAME}" \
|
|
||||||
--build-arg="GIT_PASSWORD=${env.GIT_PASSWORD}" \
|
|
||||||
-t '${imageName}:${imageTag}' \
|
|
||||||
-f '${dockerfile}' \
|
|
||||||
'${contextDir}'
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Validate image with Trivy') {
|
|
||||||
utils.when(!skipVerifications) {
|
|
||||||
runTrivyCheck("${imageName}:${imageTag}", projectRepository)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage("Publish image '${imageName}:${imageTag}'") {
|
|
||||||
utils.when(!dryRun) {
|
|
||||||
retry(2) {
|
|
||||||
sh """
|
|
||||||
echo ${env.HUB_PASSWORD} | docker login -u '${env.HUB_USERNAME}' --password-stdin '${dockerRepository}'
|
|
||||||
docker push '${imageName}:${imageTag}'
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void runHadolintCheck(String dockerfile, String projectRepository) {
|
|
||||||
String reportFile = ".hadolint-report-${currentBuild.startTimeInMillis}.txt"
|
|
||||||
|
|
||||||
try {
|
|
||||||
validateDockerfileWithHadolint(dockerfile, ['reportFile': reportFile])
|
|
||||||
} catch (err) {
|
|
||||||
unstable("Dockerfile '${dockerfile}' failed linting !")
|
|
||||||
} finally {
|
|
||||||
String lintReport = ''
|
|
||||||
|
|
||||||
if (fileExists(reportFile)) {
|
|
||||||
lintReport = """${lintReport}
|
|
||||||
|
|
|
||||||
|```
|
|
||||||
|${readFile(reportFile)}
|
|
||||||
|```"""
|
|
||||||
} else {
|
|
||||||
lintReport = """${lintReport}
|
|
||||||
|
|
|
||||||
|_Vérification échouée mais aucun rapport trouvé !?_ :thinking:"""
|
|
||||||
}
|
|
||||||
|
|
||||||
String defaultReport = '_Rien à signaler !_ :thumbsup:'
|
|
||||||
String report = """## Validation du Dockerfile `${dockerfile}`
|
|
||||||
|
|
|
||||||
|${lintReport ?: defaultReport}
|
|
||||||
""".stripMargin()
|
|
||||||
|
|
||||||
print report
|
|
||||||
|
|
||||||
if (env.CHANGE_ID) {
|
|
||||||
gitea.commentPullRequest(projectRepository, env.CHANGE_ID, report)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String validateDockerfileWithHadolint(String dockerfile, Map options = [:]) {
|
|
||||||
String hadolintBin = getOrInstallHadolint(options)
|
|
||||||
String hadolintArgs = options.get('hadolintArgs', '--no-color')
|
|
||||||
String reportFile = options.get('reportFile', ".hadolint-report-${currentBuild.startTimeInMillis}.txt")
|
|
||||||
|
|
||||||
sh("""#!/bin/bash
|
|
||||||
set -eo pipefail
|
|
||||||
'${hadolintBin}' '${dockerfile}' ${hadolintArgs} | tee '${reportFile}'
|
|
||||||
""")
|
|
||||||
|
|
||||||
return reportFile
|
|
||||||
}
|
|
||||||
|
|
||||||
void runTrivyCheck(String imageName, String projectRepository, Map options = [:]) {
|
|
||||||
String reportFile = ".trivy-report-${currentBuild.startTimeInMillis}.txt"
|
|
||||||
|
|
||||||
try {
|
|
||||||
validateImageWithTrivy(imageName, ['reportFile': reportFile])
|
|
||||||
} catch (err) {
|
|
||||||
unstable("Image '${imageName}' failed validation !")
|
|
||||||
} finally {
|
|
||||||
String lintReport = ''
|
|
||||||
|
|
||||||
if (fileExists(reportFile)) {
|
|
||||||
lintReport = """${lintReport}
|
|
||||||
|
|
|
||||||
|```
|
|
||||||
|${readFile(reportFile)}
|
|
||||||
|```"""
|
|
||||||
} else {
|
|
||||||
lintReport = """${lintReport}
|
|
||||||
|
|
|
||||||
|_Vérification échouée mais aucun rapport trouvé !?_ :thinking:"""
|
|
||||||
}
|
|
||||||
|
|
||||||
String defaultReport = '_Rien à signaler !_ :thumbsup:'
|
|
||||||
String report = """## Validation de l'image `${imageName}`
|
|
||||||
|
|
|
||||||
|${lintReport ?: defaultReport}
|
|
||||||
""".stripMargin()
|
|
||||||
|
|
||||||
print report
|
|
||||||
|
|
||||||
if (env.CHANGE_ID) {
|
|
||||||
gitea.commentPullRequest(projectRepository, env.CHANGE_ID, report)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String validateImageWithTrivy(String imageName, Map options = [:]) {
|
|
||||||
String trivyBin = getOrInstallTrivy(options)
|
|
||||||
String trivyArgs = options.get('trivyArgs', '--exit-code 1')
|
|
||||||
String cacheDirectory = options.get('cacheDirectory', '.trivy/.cache')
|
|
||||||
String cacheDefaultBranch = options.get('cacheDefaultBranch', 'develop')
|
|
||||||
Integer cacheMaxSize = options.get('cacheMaxSize', 250)
|
|
||||||
String reportFile = options.get('reportFile', ".trivy-report-${currentBuild.startTimeInMillis}.txt")
|
|
||||||
|
|
||||||
cache(maxCacheSize: cacheMaxSize, defaultBranch: cacheDefaultBranch, caches: [
|
|
||||||
[$class: 'ArbitraryFileCache', path: cacheDirectory, compressionMethod: 'TARGZ']
|
|
||||||
]) {
|
|
||||||
sh("'${trivyBin}' --cache-dir '${cacheDirectory}' image -o '${reportFile}' ${trivyArgs} '${imageName}'")
|
|
||||||
}
|
|
||||||
|
|
||||||
return reportFile
|
|
||||||
}
|
|
||||||
|
|
||||||
String getOrInstallHadolint(Map options = [:]) {
|
|
||||||
String installDir = options.get('installDir', '/usr/local/bin')
|
|
||||||
String version = options.get('version', '2.10.0')
|
|
||||||
String forceDownload = options.get('forceDownload', false)
|
|
||||||
String downloadUrl = options.get('downloadUrl', "https://github.com/hadolint/hadolint/releases/download/v${version}/hadolint-Linux-x86_64")
|
|
||||||
|
|
||||||
String hadolintBin = sh(returnStdout: true, script: 'which hadolint || exit 0').trim()
|
|
||||||
if (hadolintBin == '' || forceDownload) {
|
|
||||||
sh("""
|
|
||||||
mkdir -p '${installDir}'
|
|
||||||
curl -o '${installDir}/hadolint' -sSL '${downloadUrl}'
|
|
||||||
chmod +x '${installDir}/hadolint'
|
|
||||||
""")
|
|
||||||
|
|
||||||
hadolintBin = "${installDir}/hadolint"
|
|
||||||
}
|
|
||||||
|
|
||||||
return hadolintBin
|
|
||||||
}
|
|
||||||
|
|
||||||
String getOrInstallTrivy(Map options = [:]) {
|
|
||||||
String installDir = options.get('installDir', '/usr/local/bin')
|
|
||||||
String version = options.get('version', '0.27.1')
|
|
||||||
String forceDownload = options.get('forceDownload', false)
|
|
||||||
String installScriptDownloadUrl = options.get('downloadUrl', 'https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh')
|
|
||||||
|
|
||||||
String trivyBin = sh(returnStdout: true, script: 'which trivy || exit 0').trim()
|
|
||||||
if (trivyBin == '' || forceDownload) {
|
|
||||||
sh("""
|
|
||||||
mkdir -p '${installDir}'
|
|
||||||
curl -sfL '${installScriptDownloadUrl}' | sh -s -- -b '${installDir}' v${version}
|
|
||||||
chmod +x '${installDir}/trivy'
|
|
||||||
""")
|
|
||||||
|
|
||||||
trivyBin = "${installDir}/trivy"
|
|
||||||
}
|
|
||||||
|
|
||||||
return trivyBin
|
|
||||||
}
|
|
@ -1,14 +1,13 @@
|
|||||||
import java.util.regex.Matcher
|
|
||||||
|
|
||||||
// Basic port of https://forge.cadoles.com/Cadoles/cpkg
|
// Basic port of https://forge.cadoles.com/Cadoles/cpkg
|
||||||
def call(Map params = [:]) {
|
def call(Map params = [:]) {
|
||||||
|
|
||||||
def currentRef = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
|
def currentRef = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
|
||||||
|
def baseRef = params.baseRef ? params.baseRef : currentRef
|
||||||
def distRepo = params.distRepo ? params.distRepo : 'dev'
|
def distRepo = params.distRepo ? params.distRepo : 'dev'
|
||||||
def dist = params.dist ? params.dist : 'eole'
|
def dist = params.dist ? params.dist : 'eole'
|
||||||
def distVersion = params.distVersion ? params.distVersion : '2.7.0'
|
def distVersion = params.distVersion ? params.distVersion : '2.7.0'
|
||||||
def distBranchName = params.distBranchName ? params.distBranchName : env.GIT_BRANCH
|
def distBranchName = params.distBranchName ? params.distBranchName : env.GIT_BRANCH
|
||||||
def gitCredentials = params.gitCredentials ? params.gitCredentials : null
|
def gitCredentials = params.gitCredentials ? params.gitCredentials : null
|
||||||
def gitCredentialsType = params.gitCredentialsType ? params.gitCredentialsType : 'http'
|
|
||||||
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
|
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
|
||||||
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
|
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
|
||||||
def skipCi = params.containsKey('skipCi') ? params.skipCi : false
|
def skipCi = params.containsKey('skipCi') ? params.skipCi : false
|
||||||
@ -28,7 +27,7 @@ def call(Map params = [:]) {
|
|||||||
sh("git config --add remote.origin.fetch +refs/heads/${distBranch}:refs/remotes/origin/${distBranch}")
|
sh("git config --add remote.origin.fetch +refs/heads/${distBranch}:refs/remotes/origin/${distBranch}")
|
||||||
|
|
||||||
// Update branches
|
// Update branches
|
||||||
sh('git fetch --all')
|
sh("git fetch --all")
|
||||||
|
|
||||||
// Merge currentRef into distBranch and push
|
// Merge currentRef into distBranch and push
|
||||||
sh("git checkout -b '${distBranch}' 'origin/${distBranch}'")
|
sh("git checkout -b '${distBranch}' 'origin/${distBranch}'")
|
||||||
@ -40,7 +39,7 @@ def call(Map params = [:]) {
|
|||||||
sh("git merge ${currentRef}")
|
sh("git merge ${currentRef}")
|
||||||
|
|
||||||
if (!skipPush) {
|
if (!skipPush) {
|
||||||
sh('git push')
|
sh("git push")
|
||||||
} else {
|
} else {
|
||||||
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
|
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
|
||||||
}
|
}
|
||||||
@ -61,25 +60,7 @@ def call(Map params = [:]) {
|
|||||||
|
|
||||||
println("Last version number is '${lastVersionNumber}'")
|
println("Last version number is '${lastVersionNumber}'")
|
||||||
|
|
||||||
String versionRoot = extractVersionRoot(lastVersionNumber)
|
def versionNumber = incrementVersionNumber(lastVersionNumber)
|
||||||
String versionNumber = ''
|
|
||||||
|
|
||||||
if (versionRoot) {
|
|
||||||
versionNumber = versionRoot
|
|
||||||
} else {
|
|
||||||
versionNumber = sh(
|
|
||||||
script: "git describe --always ${currentRef}",
|
|
||||||
returnStdout: true,
|
|
||||||
).split('/').last().trim()
|
|
||||||
|
|
||||||
Boolean isCommitRef = !versionNumber.matches(/^[0-9]+\.[0-9]+\.[0-9]+.*$/)
|
|
||||||
|
|
||||||
if (isCommitRef) {
|
|
||||||
versionNumber = "0.0.0-${versionNumber}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
versionNumber = "${versionNumber}-b${env.BUILD_NUMBER}"
|
|
||||||
|
|
||||||
println("New version number will be '${versionNumber}'")
|
println("New version number will be '${versionNumber}'")
|
||||||
result['newVersionNumber'] = versionNumber
|
result['newVersionNumber'] = versionNumber
|
||||||
@ -94,11 +75,11 @@ def call(Map params = [:]) {
|
|||||||
tagComment += ' [ci skip]'
|
tagComment += ' [ci skip]'
|
||||||
}
|
}
|
||||||
|
|
||||||
sh("git tag -f -a '${tag}' -m '${tagComment}'")
|
sh("git tag -a '${tag}' -m '${tagComment}'")
|
||||||
|
|
||||||
// Push tag
|
// Push tag
|
||||||
if (!skipPush) {
|
if (!skipPush) {
|
||||||
sh('git push --tags -f')
|
sh("git push --tags")
|
||||||
} else {
|
} else {
|
||||||
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
|
println("Skipping push. Set skipPush param to 'true' to enable remote repository update.")
|
||||||
}
|
}
|
||||||
@ -108,17 +89,9 @@ def call(Map params = [:]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (gitCredentials != null) {
|
if (gitCredentials != null) {
|
||||||
if (gitCredentialsType == 'http') {
|
|
||||||
git.withHTTPCredentials(gitCredentials) {
|
git.withHTTPCredentials(gitCredentials) {
|
||||||
proc.call()
|
proc.call()
|
||||||
}
|
}
|
||||||
} else if (gitCredentialsType == 'ssh') {
|
|
||||||
git.withSSHCredentials(gitCredentials) {
|
|
||||||
proc.call()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Exception("Unknown git credentials type '${gitCredentialsType}' ! Expected 'ssh' or 'http' (default).")
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
proc.call()
|
proc.call()
|
||||||
}
|
}
|
||||||
@ -126,13 +99,20 @@ def call(Map params = [:]) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
@NonCPS
|
def incrementVersionNumber(String versionNumber) {
|
||||||
String extractVersionRoot(String fullVersion) {
|
// Split versionNumber (typical pattern: <major>.<minor>.<patch>)
|
||||||
Matcher fullVersionMatcher = fullVersion =~ /^([0-9]+\.[0-9]+\.[0-9]+).*$/
|
def versionNumberParts = versionNumber.split(/\./)
|
||||||
|
|
||||||
if (!fullVersionMatcher.matches()) {
|
// Extract path number
|
||||||
return ""
|
def patchNumber = versionNumberParts.last()
|
||||||
}
|
|
||||||
|
|
||||||
return fullVersionMatcher.group(1)
|
// Split patch number (typical pattern: <patch>-<build>)
|
||||||
|
def patchNumberParts = patchNumber.split('-')
|
||||||
|
|
||||||
|
// If version number matches pattern <major>.<minor>.<patch>-<build>
|
||||||
|
if (patchNumberParts.size() > 1) {
|
||||||
|
return versionNumberParts[0..-2].join('.') + '.' + patchNumberParts[0..-2].join('-') + '-' + (patchNumberParts.last().toInteger() + 1)
|
||||||
|
} else { // Else version number matches pattern <major>.<minor>.<patch>
|
||||||
|
return versionNumberParts[0..-2].join('.') + '.' + (patchNumber.toInteger() + 1)
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,8 +1,7 @@
|
|||||||
def waitForRepoPackage(String packageName, Map params = [:]) {
|
def waitForRepoPackage(String packageName, Map params = [:]) {
|
||||||
def expectedVersion = params.expectedVersion ? params.expectedVersion : null
|
def expectedVersion = params.expectedVersion ? params.expectedVersion : null
|
||||||
def delay = params.delay ? params.delay : 30
|
def delay = params.delay ? params.delay : 30
|
||||||
def waitTimeout = params.timeout ? params.timeout : 2400
|
def waitTimeout = params.timeout ? params.timeout : 1200
|
||||||
def asPattern = params.containsKey("asPattern") ? params.asPattern : true
|
|
||||||
|
|
||||||
def message = "Waiting for package '${packageName}'"
|
def message = "Waiting for package '${packageName}'"
|
||||||
if (expectedVersion != null) {
|
if (expectedVersion != null) {
|
||||||
@ -29,9 +28,7 @@ def waitForRepoPackage(String packageName, Map params = [:]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def versionFound = packages.find {
|
def versionFound = packages.find {
|
||||||
def matches = asPattern ? it['version'] =~ expectedVersion : it['version'] == expectedVersion
|
return it['version'] =~ expectedVersion
|
||||||
println("Comparing expected version '${expectedVersion}' to '${it['version']}': ${matches}")
|
|
||||||
return matches
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (versionFound) {
|
if (versionFound) {
|
||||||
@ -79,10 +76,5 @@ def listRepoPackages(Map params = [:]) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println "Found packages:"
|
|
||||||
packages.each{
|
|
||||||
println " - Package: ${it.key}, Version: ${it.value['version']}"
|
|
||||||
}
|
|
||||||
|
|
||||||
return packages
|
return packages
|
||||||
}
|
}
|
@ -28,17 +28,3 @@ def withHTTPCredentials(String credentialsId, Closure fn) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def withSSHCredentials(String credentialsId, Closure fn) {
|
|
||||||
def randomUUID = UUID.randomUUID().toString()
|
|
||||||
withCredentials([
|
|
||||||
sshUserPrivateKey(
|
|
||||||
credentialsId: credentialsId,
|
|
||||||
keyFileVariable: 'GIT_SSH_IDENTITY_FILE',
|
|
||||||
)
|
|
||||||
]) {
|
|
||||||
withEnv(['GIT_SSH_VARIANT=ssh', 'GIT_SSH_COMMAND=ssh -i $GIT_SSH_IDENTITY_FILE -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null']) {
|
|
||||||
fn.call()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,104 +0,0 @@
|
|||||||
def commentPullRequest(String repo, String issueId, String comment, Integer commentIndex = -1) {
|
|
||||||
comment = comment.replaceAll('"', '\\"')
|
|
||||||
withCredentials([
|
|
||||||
string(credentialsId: 'GITEA_JENKINS_PERSONAL_TOKEN', variable: 'GITEA_TOKEN'),
|
|
||||||
]) {
|
|
||||||
writeFile(file: '.prComment', text: comment)
|
|
||||||
sh """#!/bin/bash
|
|
||||||
set -xeo pipefail
|
|
||||||
|
|
||||||
previous_comment_id=null
|
|
||||||
|
|
||||||
if [ "${commentIndex}" != "-1" ]; then
|
|
||||||
# Récupération si il existe du commentaire existant
|
|
||||||
previous_comment_id=\$(curl -v --fail \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments \
|
|
||||||
| jq -c '[ .[] | select(.user.login=="jenkins") ] | .[${commentIndex}] | .id' \
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Génération du payload pour l'API Gitea
|
|
||||||
echo '{}' | jq -c --rawfile body .prComment '.body = \$body' > payload.json
|
|
||||||
|
|
||||||
if [[ "\$previous_comment_id" == "null" ]]; then
|
|
||||||
# Création du commentaire via l'API Gitea
|
|
||||||
curl -v --fail \
|
|
||||||
-XPOST \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d @payload.json \
|
|
||||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments
|
|
||||||
else
|
|
||||||
# Modification du commentaire existant
|
|
||||||
curl -v --fail \
|
|
||||||
-XPATCH \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d @payload.json \
|
|
||||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/comments/\$previous_comment_id
|
|
||||||
fi
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Effectue une "release" sur Gitea pour le <ORG>/<PROJET> donné.
|
|
||||||
def release(String credentialsId, String org, String project, Map options = [:]) {
|
|
||||||
def isDraft = options.get('isDraft', false)
|
|
||||||
def baseUrl = options.get('baseUrl', 'https://forge.cadoles.com')
|
|
||||||
def defaultVersion = sh(returnStdout: true, script: 'git describe --always').trim()
|
|
||||||
def releaseVersion = options.get('releaseVersion', defaultVersion)
|
|
||||||
def releaseName = options.get('releaseName', releaseVersion)
|
|
||||||
def commitishTarget = options.get('commitishTarget', env.GIT_COMMIT)
|
|
||||||
|
|
||||||
def defaultIsPrerelease = true
|
|
||||||
try {
|
|
||||||
sh(script: "git describe --exact-match ${GIT_COMMIT}")
|
|
||||||
defaultIsPrerelease = false
|
|
||||||
} catch (err) {
|
|
||||||
println "Could not find tag associated with commit '${GIT_COMMIT}' ! Using 'prerelease' as default."
|
|
||||||
}
|
|
||||||
|
|
||||||
def isPrerelease = options.get('isPrerelease', defaultIsPrerelease)
|
|
||||||
def body = options.get('body', '')
|
|
||||||
def attachments = options.get('attachments', [])
|
|
||||||
|
|
||||||
def scriptTempDir = ".gitea-release-script-${System.currentTimeMillis()}"
|
|
||||||
sh("mkdir -p '${scriptTempDir}'")
|
|
||||||
|
|
||||||
def giteaReleaseScript = "${scriptTempDir}/gitea-release.sh"
|
|
||||||
|
|
||||||
def giteaReleaseScriptContent = libraryResource 'com/cadoles/gitea/gitea-release.sh'
|
|
||||||
writeFile file: giteaReleaseScript, text:giteaReleaseScriptContent
|
|
||||||
sh("chmod +x '${giteaReleaseScript}'")
|
|
||||||
|
|
||||||
try {
|
|
||||||
withCredentials([
|
|
||||||
usernamePassword(
|
|
||||||
credentialsId: credentialsId,
|
|
||||||
usernameVariable: 'GITEA_RELEASE_USERNAME',
|
|
||||||
passwordVariable: 'GITEA_RELEASE_PASSWORD'
|
|
||||||
)
|
|
||||||
]) {
|
|
||||||
sh """
|
|
||||||
export GITEA_RELEASE_PROJECT="${project}"
|
|
||||||
export GITEA_RELEASE_ORG="${org}"
|
|
||||||
export GITEA_RELEASE_BASE_URL="${baseUrl}"
|
|
||||||
export GITEA_RELEASE_VERSION="${releaseVersion}"
|
|
||||||
export GITEA_RELEASE_NAME="${releaseName}"
|
|
||||||
export GITEA_RELEASE_COMMITISH_TARGET="${commitishTarget}"
|
|
||||||
export GITEA_RELEASE_IS_DRAFT="${isDraft}"
|
|
||||||
export GITEA_RELEASE_IS_PRERELEASE="${isPrerelease}"
|
|
||||||
export GITEA_RELEASE_BODY="${body}"
|
|
||||||
export GITEA_RELEASE_ATTACHMENTS="${attachments.join(' ')}"
|
|
||||||
|
|
||||||
${giteaReleaseScript}
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
dir(scriptTempDir) {
|
|
||||||
deleteDir()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,46 +0,0 @@
|
|||||||
void call(String sourceTemplate, String destFile, Map env = [:], Map options = [:]) {
|
|
||||||
String gomplateBin = getOrInstallGomplate(options)
|
|
||||||
|
|
||||||
sh """
|
|
||||||
${exportEnvMap(env)}
|
|
||||||
${gomplateBin} -f '${sourceTemplate}' > '${destFile}'
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
String exportEnvMap(Map env) {
|
|
||||||
String exports = ''
|
|
||||||
|
|
||||||
env.each { item ->
|
|
||||||
exports = """
|
|
||||||
${exports}
|
|
||||||
export ${item.key}="${item.value}"
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
return exports
|
|
||||||
}
|
|
||||||
|
|
||||||
String getOrInstallGomplate(Map options = [:]) {
|
|
||||||
String installDir = options.get('installDir', '/usr/local/bin')
|
|
||||||
String version = options.get('version', '3.10.0')
|
|
||||||
Boolean forceDownload = options.get('forceDownload', false)
|
|
||||||
String downloadUrl = options.get('downloadUrl', "https://github.com/hairyhenderson/gomplate/releases/download/v${version}/gomplate_linux-amd64")
|
|
||||||
|
|
||||||
String gomplateBin = ''
|
|
||||||
|
|
||||||
lock("${env.NODE_NAME}:gomplate-install") {
|
|
||||||
gomplateBin = sh(returnStdout: true, script: 'which gomplate || exit 0').trim()
|
|
||||||
|
|
||||||
if (gomplateBin == '' || forceDownload) {
|
|
||||||
sh("""
|
|
||||||
mkdir -p '${installDir}'
|
|
||||||
curl -o '${installDir}/gomplate' -sSL '${downloadUrl}'
|
|
||||||
chmod +x '${installDir}/gomplate'
|
|
||||||
""")
|
|
||||||
|
|
||||||
gomplateBin = "${installDir}/gomplate"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return gomplateBin
|
|
||||||
}
|
|
@ -1,16 +1,12 @@
|
|||||||
def call(String name) {
|
def call(String name) {
|
||||||
def filepath = "${env.WORKSPACE}/.jenkins/${name}.groovy"
|
def rootDir = pwd()
|
||||||
|
def filepath = "${rootDir}/.jenkins/${name}.groovy"
|
||||||
def exists = fileExists(filepath)
|
def exists = fileExists(filepath)
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
println("No hook '${filepath}' script. Skipping.")
|
println("No hook '${filepath}' script. Skipping.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
def hook = load(filepath)
|
def hook = load(filepath)
|
||||||
|
|
||||||
if (hook == null) {
|
|
||||||
error("Hook '${filepath}' seems to be null. Did you forget to add 'return this' at the end of the script ?")
|
|
||||||
}
|
|
||||||
|
|
||||||
if(hook.metaClass.respondsTo(hook, 'exec')) {
|
if(hook.metaClass.respondsTo(hook, 'exec')) {
|
||||||
hook.exec()
|
hook.exec()
|
||||||
} else {
|
} else {
|
||||||
|
@ -13,15 +13,6 @@ def getRandomDeliveryAttachment(Integer probability = 25) {
|
|||||||
'https://media.giphy.com/media/QBRlXHKV5mpbLJ4prc/giphy.gif',
|
'https://media.giphy.com/media/QBRlXHKV5mpbLJ4prc/giphy.gif',
|
||||||
'https://media.giphy.com/media/NOsfNQGivMFry/giphy.gif',
|
'https://media.giphy.com/media/NOsfNQGivMFry/giphy.gif',
|
||||||
'https://media.giphy.com/media/M1vu1FJnW6gms/giphy.gif',
|
'https://media.giphy.com/media/M1vu1FJnW6gms/giphy.gif',
|
||||||
'https://media.giphy.com/media/555x0gFF89OhVWPkvb/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/W1fFHj6LvyTgfBNdiz/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/1g2JyW7p6mtZc6bOEY/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/ORiFE3ijpNaIWDoOqP/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/r16Zmuvt1hSTK/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/bF8Tvy2Ta0mqxXgaPV/giphy.gif',
|
|
||||||
'https://media.giphy.com/media/C0XT6BmLC3nGg/giphy.gif'
|
|
||||||
]
|
]
|
||||||
Random rnd = new Random()
|
Random rnd = new Random()
|
||||||
if (rnd.nextInt(100) > probability) {
|
if (rnd.nextInt(100) > probability) {
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
/**
|
|
||||||
* Générer des paquets Debian, RPM, Alpine (ipk) via nfpm
|
|
||||||
* Voir See https://nfpm.goreleaser.com/
|
|
||||||
*
|
|
||||||
* Options:
|
|
||||||
* - installDir - Répertoire d'installation du binaire nfpm, par défaut /usr/local/bin
|
|
||||||
* - version - Version de nfpm à installer, par défaut 2.15.1
|
|
||||||
* - forceDownload - Forcer l'installation de nfpm, par défaut false
|
|
||||||
* - config - Fichier de configuration nfpm à utiliser, par défaut nfpm.yaml
|
|
||||||
* - target - Répertoire cible pour nfpm, par défaut ./dist
|
|
||||||
* - packager - Limiter l'exécution de nfpm à un packager spécifique, par défaut "deb" (i.e. pas de limitation)
|
|
||||||
*/
|
|
||||||
void call(Map options = [:]) {
|
|
||||||
String installDir = options.get('installDir', '/usr/local/bin')
|
|
||||||
String version = options.get('version', '2.20.0')
|
|
||||||
Boolean forceDownload = options.get('forceDownload', false)
|
|
||||||
String downloadUrl = options.get('downloadUrl', "https://github.com/goreleaser/nfpm/releases/download/v${version}/nfpm_${version}_Linux_x86_64.tar.gz")
|
|
||||||
String config = options.get('config', 'nfpm.yaml')
|
|
||||||
String target = options.get('target', env.WORKSPACE + '/dist')
|
|
||||||
String packager = options.get('packager', 'deb')
|
|
||||||
|
|
||||||
String nfpmBin = sh(returnStdout: true, script: 'which nfpm || exit 0').trim()
|
|
||||||
if (nfpmBin == '' || forceDownload) {
|
|
||||||
sh("""
|
|
||||||
mkdir -p '${installDir}'
|
|
||||||
curl -L '${downloadUrl}' > /tmp/nfpm.tar.gz
|
|
||||||
tar -C /usr/local/bin -xzf /tmp/nfpm.tar.gz
|
|
||||||
""")
|
|
||||||
|
|
||||||
nfpmBin = "${installDir}/nfpm"
|
|
||||||
}
|
|
||||||
|
|
||||||
sh("""
|
|
||||||
mkdir -p '${target}'
|
|
||||||
${nfpmBin} package --config '${config}' ${packager ? '--packager ' + packager : ''} --target '${target}'
|
|
||||||
""")
|
|
||||||
}
|
|
@ -1,44 +0,0 @@
|
|||||||
void buildCadolesPodPackage(String imageName, String imageTag, Map options = [:]) {
|
|
||||||
String destDir = options.get('destDir', env.WORKSPACE + '/dist')
|
|
||||||
Map nfpmOptions = options.get('nfpmOptions', [:])
|
|
||||||
|
|
||||||
nfpmOptions['target'] = destDir
|
|
||||||
|
|
||||||
Map env = options.get('env', [:])
|
|
||||||
|
|
||||||
env['IMAGE_NAME'] = imageName
|
|
||||||
env['IMAGE_TAG'] = imageTag
|
|
||||||
|
|
||||||
withPodmanPackagingTempDir {
|
|
||||||
gomplate('post-install.sh.gotmpl', 'post-install.sh', env)
|
|
||||||
gomplate('pod.service.gotmpl', 'pod.service', env)
|
|
||||||
gomplate('pod.conf.gotmpl', 'pod.conf', env)
|
|
||||||
gomplate('nfpm.yaml.gotmpl', 'nfpm.yaml', env)
|
|
||||||
|
|
||||||
nfpm(nfpmOptions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void withPodmanPackagingTempDir(Closure fn) {
|
|
||||||
File tempDir = File.createTempDir()
|
|
||||||
tempDir.deleteOnExit()
|
|
||||||
|
|
||||||
tempDir.mkdirs()
|
|
||||||
|
|
||||||
dir(tempDir.getAbsolutePath()) {
|
|
||||||
List<String> resources = [
|
|
||||||
'com/cadoles/podman/nfpm.yaml.gotmpl',
|
|
||||||
'com/cadoles/podman/pod.conf.gotmpl',
|
|
||||||
'com/cadoles/podman/pod.service.gotmpl',
|
|
||||||
'com/cadoles/podman/post-install.sh.gotmpl',
|
|
||||||
]
|
|
||||||
|
|
||||||
for (res in resources) {
|
|
||||||
String fileContent = libraryResource res
|
|
||||||
String fileName = res.substring(res.lastIndexOf('/') + 1)
|
|
||||||
writeFile file:fileName, text:fileContent
|
|
||||||
}
|
|
||||||
|
|
||||||
fn()
|
|
||||||
}
|
|
||||||
}
|
|
117
vars/pulp.groovy
117
vars/pulp.groovy
@ -1,117 +0,0 @@
|
|||||||
import groovy.json.JsonOutput
|
|
||||||
|
|
||||||
def exportPackages(
|
|
||||||
String credentials,
|
|
||||||
List packages = [],
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def exportTasks = []
|
|
||||||
packages.each {
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/content/deb/packages/", httpMode: 'POST', ignoreSslErrors: true, multipartName: "file", timeout: 900, responseHandle: 'NONE', uploadFile: "${it}"
|
|
||||||
jsonResponse = readJSON text: response.content
|
|
||||||
println(jsonResponse)
|
|
||||||
exportTasks << jsonResponse['task']
|
|
||||||
}
|
|
||||||
return exportTasks
|
|
||||||
}
|
|
||||||
|
|
||||||
def getRepositoryHREF(
|
|
||||||
String credentials,
|
|
||||||
String repositoryLevel = 'dev',
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def repositoriesMapping = ['dev': 'Cadoles4MSE']
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/repositories/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
println(jsonResponse)
|
|
||||||
def repositories = jsonResponse.results
|
|
||||||
def repositoryHREF = repositories.find { it -> it['name'] == repositoriesMapping[repositoryLevel] }
|
|
||||||
return repositoryHREF.pulp_href
|
|
||||||
}
|
|
||||||
|
|
||||||
def addToRepository(
|
|
||||||
String credentials,
|
|
||||||
List packagesHREF,
|
|
||||||
String repositoryHREF,
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def packagesHREFURL = ["add_content_units": packagesHREF.collect { "https://$pulpHost$it" }]
|
|
||||||
def postBody = JsonOutput.toJson(packagesHREFURL)
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${repositoryHREF}modify/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
|
||||||
}
|
|
||||||
|
|
||||||
def publishRepository(
|
|
||||||
String credentials,
|
|
||||||
String repositoryHREF,
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def postBody = JsonOutput.toJson(["repository": repositoryHREF, "simple": true])
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/publications/deb/apt/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
println(jsonResponse)
|
|
||||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
|
||||||
}
|
|
||||||
|
|
||||||
def distributePublication(
|
|
||||||
String credentials,
|
|
||||||
String publicationHREF,
|
|
||||||
String distributionName,
|
|
||||||
String basePath,
|
|
||||||
String pulpHost = 'pulp.bbohard.lan',
|
|
||||||
String contentGuard = null
|
|
||||||
) {
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/distributions/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
def httpMode = ''
|
|
||||||
def url = ''
|
|
||||||
def distribution = jsonResponse.results.find { it -> it.name == distributionName}
|
|
||||||
if (distribution) {
|
|
||||||
httpMode = 'PUT'
|
|
||||||
url = distribution.pulp_href
|
|
||||||
|
|
||||||
} else {
|
|
||||||
httpMode = 'POST'
|
|
||||||
url = '/pulp/api/v3/distributions/deb/apt/'
|
|
||||||
}
|
|
||||||
def postBody = JsonOutput.toJson(["publication": publicationHREF, "name": distributionName, "base_path": basePath, "content_guard": contentGuard])
|
|
||||||
response = httpRequest authentication: credentials, url: "https://${pulpHost}${url}", httpMode: httpMode, requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
|
|
||||||
jsonResponse = readJSON text: response.content
|
|
||||||
if (distribution) {
|
|
||||||
waitForTaskCompletion(credentials, jsonResponse.task)
|
|
||||||
return [url]
|
|
||||||
} else {
|
|
||||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def waitForTaskCompletion(
|
|
||||||
String credentials,
|
|
||||||
String taskHREF,
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def status = ''
|
|
||||||
def created_resources = []
|
|
||||||
while (status != 'completed') {
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${taskHREF}", httpMode: 'GET', ignoreSslErrors: true
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
status = jsonResponse.state
|
|
||||||
if (status == 'completed') {
|
|
||||||
created_resources = jsonResponse.created_resources
|
|
||||||
}
|
|
||||||
sleep(10)
|
|
||||||
}
|
|
||||||
return created_resources
|
|
||||||
}
|
|
||||||
|
|
||||||
def getDistributionURL(
|
|
||||||
String credentials,
|
|
||||||
String resourceHREF,
|
|
||||||
String pulpHost = 'pulp.bbohard.lan'
|
|
||||||
) {
|
|
||||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${resourceHREF}", httpMode: 'GET', ignoreSslErrors: true
|
|
||||||
def jsonResponse = readJSON text: response.content
|
|
||||||
println(jsonResponse)
|
|
||||||
return jsonResponse.base_url
|
|
||||||
}
|
|
@ -1,80 +0,0 @@
|
|||||||
// Pipeline de scan de projet avec SonarQube
|
|
||||||
def call() {
|
|
||||||
pipeline {
|
|
||||||
agent {
|
|
||||||
label 'docker'
|
|
||||||
}
|
|
||||||
|
|
||||||
environment {
|
|
||||||
projectDir = "${env.project_name}_${env.BUILD_ID}"
|
|
||||||
}
|
|
||||||
|
|
||||||
stages {
|
|
||||||
stage("Package project") {
|
|
||||||
when {
|
|
||||||
not {
|
|
||||||
triggeredBy 'TimerTrigger'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
stage("Clone repository") {
|
|
||||||
checkout scm:
|
|
||||||
[
|
|
||||||
$class: 'GitSCM',
|
|
||||||
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
|
|
||||||
branches: [[name: env.ref]],
|
|
||||||
extensions: [
|
|
||||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
|
|
||||||
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
|
|
||||||
[$class: 'WipeWorkspace' ]
|
|
||||||
]
|
|
||||||
],
|
|
||||||
changelog: false,
|
|
||||||
poll: false
|
|
||||||
}
|
|
||||||
|
|
||||||
stage("Scan project") {
|
|
||||||
dir(env.projectDir) {
|
|
||||||
withCredentials([
|
|
||||||
string(credentialsId: 'SONARQUBE_URL', variable: 'SONARQUBE_URL'),
|
|
||||||
string(credentialsId: 'SONARQUBE_TOKEN', variable: 'SONARQUBE_TOKEN'),
|
|
||||||
]) {
|
|
||||||
sh """
|
|
||||||
docker run \
|
|
||||||
--rm \
|
|
||||||
-e SONAR_HOST_URL="${env.SONARQUBE_URL}" \
|
|
||||||
-e SONAR_LOGIN="${env.SONARQUBE_TOKEN}" \
|
|
||||||
-v "${env.WORKSPACE}/${env.projectDir}/:/usr/src" \
|
|
||||||
sonarsource/sonar-scanner-cli \
|
|
||||||
-Dsonar.projectKey=${env.sonarqubeProjectKey} \
|
|
||||||
-Dsonar.projectVersion=${env.ref}
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
// On notifie le canal Rocket.Chat du scan
|
|
||||||
// rocketSend (
|
|
||||||
// avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
|
||||||
// message: """
|
|
||||||
// Le projet ${env.project_name} a été scanné par SonarQube.
|
|
||||||
|
|
||||||
// - [Voir les résultats](${env.SONARQUBE_URL}/dashboard?id=${env.sonarqubeProjectKey})
|
|
||||||
// - [Visualiser le job](${env.RUN_DISPLAY_URL})
|
|
||||||
|
|
||||||
// @${env.sender_login}
|
|
||||||
// """.stripIndent(),
|
|
||||||
// rawMessage: true,
|
|
||||||
// )
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
post {
|
|
||||||
always {
|
|
||||||
sh "rm -rf '${env.projectDir}'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,134 +0,0 @@
|
|||||||
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
|
|
||||||
|
|
||||||
def call(String baseImage = 'ubuntu:22.04', Map options = [:]) {
|
|
||||||
Map hooks = options.get('hooks', [:])
|
|
||||||
String jobHistory = options.get('jobHistory', '10')
|
|
||||||
|
|
||||||
node {
|
|
||||||
properties([
|
|
||||||
buildDiscarder(logRotator(daysToKeepStr: jobHistory, numToKeepStr: jobHistory)),
|
|
||||||
])
|
|
||||||
stage('Cancel older jobs') {
|
|
||||||
def buildNumber = env.BUILD_NUMBER as int
|
|
||||||
if (buildNumber > 1) milestone(buildNumber - 1)
|
|
||||||
milestone(buildNumber)
|
|
||||||
}
|
|
||||||
stage('Checkout project') {
|
|
||||||
checkout(scm)
|
|
||||||
}
|
|
||||||
stage('Run pre hooks') {
|
|
||||||
runHook(hooks, 'preSymfonyAppPipeline')
|
|
||||||
}
|
|
||||||
stage('Run in Symfony image') {
|
|
||||||
def symfonyImage = buildDockerImage(baseImage, hooks)
|
|
||||||
symfonyImage.inside() {
|
|
||||||
def repo = env.JOB_NAME
|
|
||||||
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
|
|
||||||
repo = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Install composer dependencies') {
|
|
||||||
sh '''
|
|
||||||
symfony composer install
|
|
||||||
'''
|
|
||||||
}
|
|
||||||
|
|
||||||
parallel([
|
|
||||||
'php-security-check': {
|
|
||||||
stage('Check PHP security issues') {
|
|
||||||
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
|
||||||
def auditReport = sh(script: 'local-php-security-checker --format=markdown || true', returnStdout: true)
|
|
||||||
if (auditReport.trim() != '') {
|
|
||||||
if (env.CHANGE_ID) {
|
|
||||||
gitea.commentPullRequest(repo, env.CHANGE_ID, auditReport)
|
|
||||||
} else {
|
|
||||||
print auditReport
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!auditReport.contains('No packages have known vulnerabilities.')) {
|
|
||||||
throw new Exception('Dependencies check failed !')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'php-cs-fixer': {
|
|
||||||
stage('Run PHP-CS-Fixer on modified code') {
|
|
||||||
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
|
|
||||||
if ( !fileExists('.php-cs-fixer.dist.php') ) {
|
|
||||||
def phpCsFixerConfig = libraryResource 'com/cadoles/symfony/.php-cs-fixer.dist.php'
|
|
||||||
writeFile file:'.php-cs-fixer.dist.php', text:phpCsFixerConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
sh '''
|
|
||||||
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "HEAD~..HEAD" | fgrep ".php" | tr "\n" " ")
|
|
||||||
if ! echo "${CHANGED_FILES}" | grep -qE "^(\\.php-cs-fixer(\\.dist)\\.php?|composer\\.lock)$"; then EXTRA_ARGS=$(printf -- '--path-mode=intersection -- %s' "${CHANGED_FILES}"); else EXTRA_ARGS=''; fi
|
|
||||||
symfony php $(which php-cs-fixer) fix --config=.php-cs-fixer.dist.php -v --dry-run --using-cache=no --format junit ${EXTRA_ARGS} > php-cs-fixer.xml || true
|
|
||||||
'''
|
|
||||||
def report = sh(script: 'junit2md php-cs-fixer.xml', returnStdout: true)
|
|
||||||
if (env.CHANGE_ID) {
|
|
||||||
gitea.commentPullRequest(repo, env.CHANGE_ID, report)
|
|
||||||
} else {
|
|
||||||
print report
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'phpstan': {
|
|
||||||
stage('Run phpstan') {
|
|
||||||
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
|
||||||
if ( !fileExists('phpstan.neon') ) {
|
|
||||||
def phpStanConfig = libraryResource 'com/cadoles/symfony/phpstan.neon'
|
|
||||||
writeFile file:'phpstan.neon', text:phpStanConfig
|
|
||||||
}
|
|
||||||
sh '''
|
|
||||||
symfony php $(which phpstan) analyze -l 1 --error-format=table src > phpstan.txt || true
|
|
||||||
'''
|
|
||||||
def report = sh(script: 'cat phpstan.txt', returnStdout: true)
|
|
||||||
report = '## Rapport PHPStan\n\n```\n' + report
|
|
||||||
report = report + '\n```\n'
|
|
||||||
if (env.CHANGE_ID) {
|
|
||||||
gitea.commentPullRequest(repo, env.CHANGE_ID, report)
|
|
||||||
} else {
|
|
||||||
print report
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Run post hooks') {
|
|
||||||
runHook(hooks, 'postSymfonyAppPipeline')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void buildDockerImage(String baseImage, Map hooks) {
|
|
||||||
def imageName = 'cadoles-symfony-ci'
|
|
||||||
dir(".${imageName}") {
|
|
||||||
def dockerfile = libraryResource 'com/cadoles/symfony/Dockerfile'
|
|
||||||
writeFile file:'Dockerfile', text: "FROM ${baseImage}\n\n" + dockerfile
|
|
||||||
|
|
||||||
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
|
|
||||||
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
|
|
||||||
|
|
||||||
runHook(hooks, 'buildSymfonyImage')
|
|
||||||
|
|
||||||
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
|
|
||||||
def imageTag = "${safeJobName}-${env.BUILD_ID}"
|
|
||||||
return docker.build("${imageName}:${imageTag}", '.')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void runHook(Map hooks, String name) {
|
|
||||||
if (!hooks[name]) {
|
|
||||||
println("No hook '${name}' defined. Skipping.")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hooks[name] instanceof Closure) {
|
|
||||||
hooks[name]()
|
|
||||||
} else {
|
|
||||||
error("Hook '${name}' seems to be defined but is not a closure !")
|
|
||||||
}
|
|
||||||
}
|
|
@ -122,9 +122,6 @@ def buildDockerImage() {
|
|||||||
def runTamarinScript = libraryResource 'com/cadoles/tamarin/run-tamarin.sh'
|
def runTamarinScript = libraryResource 'com/cadoles/tamarin/run-tamarin.sh'
|
||||||
writeFile file:'run-tamarin.sh', text:runTamarinScript
|
writeFile file:'run-tamarin.sh', text:runTamarinScript
|
||||||
|
|
||||||
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
|
|
||||||
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
|
|
||||||
|
|
||||||
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
|
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
|
||||||
def imageTag = "${safeJobName}-${env.BUILD_ID}"
|
def imageTag = "${safeJobName}-${env.BUILD_ID}"
|
||||||
return docker.build("tamarin:${imageTag}", ".")
|
return docker.build("tamarin:${imageTag}", ".")
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
|
|
||||||
|
|
||||||
void when(Boolean condition, body) {
|
|
||||||
Map config = [:]
|
|
||||||
body.resolveStrategy = Closure.OWNER_FIRST
|
|
||||||
body.delegate = config
|
|
||||||
|
|
||||||
if (condition) {
|
|
||||||
body()
|
|
||||||
} else {
|
|
||||||
Utils.markStageSkippedForConditional(STAGE_NAME)
|
|
||||||
}
|
|
||||||
}
|
|
Reference in New Issue
Block a user