Compare commits
45 Commits
pa11y-audi
...
pipeline/c
Author | SHA1 | Date | |
---|---|---|---|
8f33a8a4c9 | |||
bf7e3cf9b9 | |||
cc802f6fdd | |||
e8198240de | |||
03d07acc23 | |||
71a94bb201 | |||
5dee07ce46 | |||
502c2c7ed9 | |||
9cf903ce41 | |||
cf66210f4e | |||
83688cc56c | |||
8699ec0a9d | |||
ed1fb84ea9 | |||
d50a9c6b77 | |||
ce2c30003e | |||
f2602a8d27 | |||
15adc72606 | |||
62615af5e6 | |||
c5684aafea | |||
2222c30054 | |||
b39c380368 | |||
0dd899a291 | |||
a7a820ac6f | |||
ce5192d1d9 | |||
94abda3f1a | |||
8f0d37213b | |||
d69ee2368a | |||
8c23bc688e | |||
81017f0b3c | |||
18bf648aa7 | |||
a021d96ca6 | |||
499aaca632 | |||
61f5eb8d3d | |||
bfbef60b9d | |||
1ef8151a51 | |||
279223b6f6 | |||
385a83d9ef | |||
7b0159c351 | |||
f09f349189 | |||
b52cac4f42 | |||
7c80e8c6cc | |||
c653c09fbc | |||
14a7bef425 | |||
cd8525c8b1 | |||
407fe87318 |
90
pipelines/mse-rgaa.jenkinsfile
Normal file
90
pipelines/mse-rgaa.jenkinsfile
Normal file
@ -0,0 +1,90 @@
|
||||
import hudson.tasks.test.AbstractTestResultAction
|
||||
|
||||
@Library("cadoles") _
|
||||
|
||||
pipeline {
|
||||
|
||||
parameters {
|
||||
text(name: 'URLS', defaultValue: 'https://msedev.crous-toulouse.fr\nhttps://msedev.crous-toulouse.fr/envole/enregistrement\nhttps://msedev.crous-toulouse.fr/envole/page/faq\nhttps://msedev.crous-toulouse.fr/envole/page/?t=liens_utiles\nhttps://msedev.crous-toulouse.fr/envole/page/?t=mentions_legales\nhttps://msedev.crous-toulouse.fr/envole/message/new\nhttps://msedev.crous-toulouse.fr/envole/recuperation/email\nhttps://msedev.crous-toulouse.fr/envole/courriel/raz', description: 'Liste des URLs à tester, une par ligne')
|
||||
string(name: 'USERNAME', defaultValue: '', description: "Nom d'utilisateur pour l'authentification Basic Auth, si nécessaire")
|
||||
password(name: 'PASSWORD', defaultValue: '', description: "Mot de passe pour l'authentification Basic Auth, si nécessaire")
|
||||
booleanParam(name: 'INCLUDE_WARNINGS', defaultValue: false, description: 'Inclure les avertissements')
|
||||
booleanParam(name: 'INCLUDE_NOTICES', defaultValue: false, description: 'Inclure les notifications')
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
|
||||
agent {
|
||||
node {
|
||||
label "docker"
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Run RGAA audit") {
|
||||
steps {
|
||||
script {
|
||||
def urls = params.URLS.split('\n')
|
||||
|
||||
def count = 0
|
||||
urls.each { u ->
|
||||
stage("Audit page '${u}'") {
|
||||
def report = pa11y.audit(u.trim(), [
|
||||
reporter: 'junit',
|
||||
username: params.USERNAME,
|
||||
password: params.PASSWORD,
|
||||
standard: 'WCAG2AA',
|
||||
includeNotices: params.INCLUDE_NOTICES,
|
||||
includeWarnings: params.INCLUDE_WARNINGS,
|
||||
]);
|
||||
|
||||
writeFile file:"./report_${count}.xml", text:report
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
junit testResults: '*.xml', skipPublishingChecks: true
|
||||
|
||||
rocketSend (
|
||||
channel: "#cnous-mse",
|
||||
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
||||
message: """
|
||||
Audit RGAA | ${testStatuses()}
|
||||
|
||||
- [Voir les tests](${env.RUN_DISPLAY_URL})
|
||||
|
||||
@here
|
||||
""".stripIndent(),
|
||||
rawMessage: true,
|
||||
)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def testStatuses() {
|
||||
def testStatus = ""
|
||||
AbstractTestResultAction testResultAction = currentBuild.rawBuild.getAction(AbstractTestResultAction.class)
|
||||
if (testResultAction != null) {
|
||||
def total = testResultAction.totalCount
|
||||
def failed = testResultAction.failCount
|
||||
def skipped = testResultAction.skipCount
|
||||
def passed = total - failed - skipped
|
||||
testStatus = "Passant(s): ${passed}, Échoué(s): ${failed} ${testResultAction.failureDiffString}, Désactivé(s): ${skipped}"
|
||||
}
|
||||
return testStatus
|
||||
}
|
76
pipelines/sentry.jenkinsfile
Normal file
76
pipelines/sentry.jenkinsfile
Normal file
@ -0,0 +1,76 @@
|
||||
pipeline {
|
||||
agent {
|
||||
docker {
|
||||
image "getsentry/sentry-cli"
|
||||
args "--entrypoint="
|
||||
}
|
||||
}
|
||||
|
||||
environment {
|
||||
projectDir = "${env.project_name}_${env.BUILD_ID}"
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage("Clone repository") {
|
||||
steps {
|
||||
checkout scm:
|
||||
[
|
||||
$class: 'GitSCM',
|
||||
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
|
||||
branches: [[name: env.ref]],
|
||||
extensions: [
|
||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
|
||||
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
|
||||
[$class: 'WipeWorkspace' ]
|
||||
]
|
||||
],
|
||||
changelog: false,
|
||||
poll: false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
stage('Create sentry release') {
|
||||
steps {
|
||||
dir(env.projectDir) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'sentry-url', variable: 'SENTRY_URL'),
|
||||
string(credentialsId: 'sentry-release-auth-token', variable: 'SENTRY_AUTH_TOKEN')
|
||||
]) {
|
||||
sh '''
|
||||
SENTRY_CMD="sentry-cli --auth-token \"${SENTRY_AUTH_TOKEN}\" --url \"${SENTRY_URL}\""
|
||||
PROJECT_VERSION=$(sentry-cli releases propose-version)
|
||||
|
||||
$SENTRY_CMD \
|
||||
releases \
|
||||
--org "${sentry_org}" \
|
||||
new \
|
||||
-p "${sentry_project}" ${PROJECT_VERSION}
|
||||
|
||||
(
|
||||
$SENTRY_CMD \
|
||||
releases \
|
||||
--org "${sentry_org}" \
|
||||
set-commits --local \
|
||||
${PROJECT_VERSION} || exit 0
|
||||
)
|
||||
|
||||
$SENTRY_CMD \
|
||||
releases \
|
||||
--org "${sentry_org}" \
|
||||
finalize \
|
||||
${PROJECT_VERSION}
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
}
|
26
resources/com/cadoles/common/add-letsencrypt-ca.sh
Normal file
26
resources/com/cadoles/common/add-letsencrypt-ca.sh
Normal file
@ -0,0 +1,26 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
DESTDIR=/usr/local/share/ca-certificates
|
||||
UPDATE_CERTS_CMD=update-ca-certificates
|
||||
CERTS="$(cat <<EOF
|
||||
https://letsencrypt.org/certs/isrgrootx1.pem
|
||||
https://letsencrypt.org/certs/isrg-root-x2.pem
|
||||
https://letsencrypt.org/certs/lets-encrypt-r3.pem
|
||||
https://letsencrypt.org/certs/lets-encrypt-e1.pem
|
||||
https://letsencrypt.org/certs/lets-encrypt-r4.pem
|
||||
https://letsencrypt.org/certs/lets-encrypt-e2.pem
|
||||
EOF
|
||||
)"
|
||||
|
||||
cd "$DESTDIR"
|
||||
|
||||
for cert in $CERTS; do
|
||||
echo "Downloading '$cert'..."
|
||||
filename=$(basename "$cert")
|
||||
wget --tries=10 --timeout=30 -O "$filename" "$cert"
|
||||
openssl x509 -in "$filename" -inform PEM -out "$filename.crt"
|
||||
done
|
||||
|
||||
$UPDATE_CERTS_CMD
|
@ -1,4 +1,4 @@
|
||||
FROM golang:1.13 as envtpl
|
||||
FROM golang:1.15 as envtpl
|
||||
|
||||
ARG HTTP_PROXY=
|
||||
ARG HTTPS_PROXY=
|
||||
@ -14,7 +14,7 @@ RUN git clone https://github.com/subfuzion/envtpl /src \
|
||||
-ldflags "-X main.AppVersionMetadata=$(date -u +%s)" \
|
||||
-a -installsuffix cgo -o ./bin/envtpl ./cmd/envtpl/.
|
||||
|
||||
FROM alpine:3.10
|
||||
FROM alpine:3.13
|
||||
|
||||
ARG HTTP_PROXY=
|
||||
ARG HTTPS_PROXY=
|
||||
|
@ -10,10 +10,9 @@ rm -f reports/*
|
||||
cd reports
|
||||
|
||||
lighthouse \
|
||||
"$LIGHTHOUSE_URL" \
|
||||
--no-enable-error-reporting \
|
||||
--chrome-flags="--headless --disable-dev-shm-usage --no-sandbox --disable-gpu" \
|
||||
--config=../config.js \
|
||||
--output json --output html \
|
||||
--output-path=lighthouse \
|
||||
-- \
|
||||
"$LIGHTHOUSE_URL"
|
||||
--output-path=lighthouse
|
||||
|
@ -35,7 +35,7 @@ RUN apk add --no-cache \
|
||||
chromium \
|
||||
bash
|
||||
|
||||
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
|
||||
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y@^5.0.0 pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
|
||||
|
||||
RUN adduser -D pa11y
|
||||
|
||||
|
@ -9,6 +9,7 @@ cd reports
|
||||
|
||||
export PUPPETEER_EXECUTABLE_PATH=$(which chromium-browser)
|
||||
export PA11Y_REPORTER="${PA11Y_REPORTER:-html}"
|
||||
export PA11Y_STANDARD=${PA11Y_STANDARD:-WCAG2AA}
|
||||
|
||||
PA11Y_ARGS=""
|
||||
|
||||
|
41
resources/com/cadoles/symfony/.php-cs-fixer.dist.php
Normal file
41
resources/com/cadoles/symfony/.php-cs-fixer.dist.php
Normal file
@ -0,0 +1,41 @@
|
||||
<?php
|
||||
|
||||
$finder = PhpCsFixer\Finder::create()
|
||||
->in(__DIR__.'/src')
|
||||
->name('*.php')
|
||||
;
|
||||
|
||||
return (new PhpCsFixer\Config())
|
||||
->setRules([
|
||||
'@Symfony' => true,
|
||||
'concat_space' => ['spacing' => 'none'],
|
||||
'array_syntax' => ['syntax' => 'short'],
|
||||
'combine_consecutive_issets' => true,
|
||||
'explicit_indirect_variable' => true,
|
||||
'no_useless_return' => true,
|
||||
'ordered_imports' => true,
|
||||
'no_unused_imports' => true,
|
||||
'no_spaces_after_function_name' => true,
|
||||
'no_spaces_inside_parenthesis' => true,
|
||||
'ternary_operator_spaces' => true,
|
||||
'class_definition' => ['single_line' => true],
|
||||
'whitespace_after_comma_in_array' => true,
|
||||
'phpdoc_add_missing_param_annotation' => ['only_untyped' => true],
|
||||
'phpdoc_order' => true,
|
||||
'phpdoc_types_order' => [
|
||||
'null_adjustment' => 'always_last',
|
||||
'sort_algorithm' => 'alpha',
|
||||
],
|
||||
'phpdoc_no_empty_return' => false,
|
||||
'phpdoc_summary' => false,
|
||||
'general_phpdoc_annotation_remove' => [
|
||||
'annotations' => [
|
||||
'expectedExceptionMessageRegExp',
|
||||
'expectedException',
|
||||
'expectedExceptionMessage',
|
||||
'author',
|
||||
],
|
||||
],
|
||||
])
|
||||
->setFinder($finder)
|
||||
;
|
42
resources/com/cadoles/symfony/Dockerfile
Normal file
42
resources/com/cadoles/symfony/Dockerfile
Normal file
@ -0,0 +1,42 @@
|
||||
ARG PHP_SECURITY_CHECKER_VERSION=1.0.0
|
||||
ARG JQ_VERSION=1.6
|
||||
|
||||
RUN apt update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
||||
wget tar curl ca-certificates \
|
||||
openssl bash git unzip \
|
||||
php-cli php-dom php-mbstring php-ctype php-xml php-iconv
|
||||
|
||||
COPY add-letsencrypt-ca.sh /root/add-letsencrypt-ca.sh
|
||||
|
||||
RUN bash /root/add-letsencrypt-ca.sh \
|
||||
&& rm -f /root/add-letsencrypt-ca.sh
|
||||
|
||||
RUN wget -O /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-${JQ_VERSION}/jq-linux64 \
|
||||
&& chmod +x /usr/local/bin/jq
|
||||
|
||||
# Install local-php-security-checker
|
||||
RUN wget -O /usr/local/bin/local-php-security-checker https://github.com/fabpot/local-php-security-checker/releases/download/v${PHP_SECURITY_CHECKER_VERSION}/local-php-security-checker_${PHP_SECURITY_CHECKER_VERSION}_linux_amd64 \
|
||||
&& chmod +x /usr/local/bin/local-php-security-checker
|
||||
|
||||
# Install junit2md
|
||||
RUN junit2md_download_url=$(curl "https://forge.cadoles.com/api/v1/repos/Cadoles/junit2md/releases" -H "accept:application/json" | jq -r 'sort_by(.published_at) | reverse | .[0] | .assets[] | select(.name == "junit2md-linux-amd64.tar.gz") | .browser_download_url') \
|
||||
&& wget -O junit2md-linux-amd64.tar.gz "$junit2md_download_url" \
|
||||
&& tar -xzf junit2md-linux-amd64.tar.gz \
|
||||
&& cp junit2md-linux-amd64/junit2md /usr/local/bin/junit2md
|
||||
|
||||
# Install composer
|
||||
RUN wget https://raw.githubusercontent.com/composer/getcomposer.org/76a7060ccb93902cd7576b67264ad91c8a2700e2/web/installer -O - -q | php -- --force --install-dir /usr/local/bin --filename composer \
|
||||
&& chmod +x /usr/local/bin/composer
|
||||
|
||||
# Install php-cs-fixer
|
||||
RUN mkdir --parents /tools/php-cs-fixer \
|
||||
&& composer require --working-dir=/tools/php-cs-fixer friendsofphp/php-cs-fixer \
|
||||
&& ln -s /tools/php-cs-fixer/vendor/bin/php-cs-fixer /usr/local/bin/php-cs-fixer
|
||||
|
||||
# Install php-stan
|
||||
RUN mkdir --parents /tools/phpstan \
|
||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan \
|
||||
&& ln -s /tools/phpstan/vendor/bin/phpstan /usr/local/bin/phpstan \
|
||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-symfony \
|
||||
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-doctrine
|
4
resources/com/cadoles/symfony/phpstan.neon
Normal file
4
resources/com/cadoles/symfony/phpstan.neon
Normal file
@ -0,0 +1,4 @@
|
||||
includes:
|
||||
- /tools/phpstan/vendor/phpstan/phpstan-symfony/extension.neon
|
||||
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/extension.neon
|
||||
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/rules.neon
|
@ -1,15 +1,16 @@
|
||||
FROM alpine:3.8
|
||||
FROM alpine:latest
|
||||
|
||||
ARG HTTP_PROXY=
|
||||
ARG HTTPS_PROXY=
|
||||
ARG http_proxy=
|
||||
ARG https_proxy=
|
||||
|
||||
ARG TAMARIN_VERSION=develop
|
||||
RUN apk add --no-cache git docker python3 bash openssl curl
|
||||
|
||||
RUN apk add --no-cache git docker python3 bash
|
||||
RUN curl -k https://forge.cadoles.com/Cadoles/Jenkins/raw/branch/master/resources/com/cadoles/common/add-letsencrypt-ca.sh | bash
|
||||
|
||||
RUN git clone http://forge.cadoles.com/Cadoles/Tamarin /tamarin\
|
||||
ARG TAMARIN_VERSION=feature/doc-compile
|
||||
RUN git clone https://forge.cadoles.com/Cadoles/Tamarin /tamarin\
|
||||
&& cd /tamarin\
|
||||
&& git checkout ${TAMARIN_VERSION}
|
||||
|
||||
@ -23,4 +24,4 @@ VOLUME /dist
|
||||
ADD run-tamarin.sh /usr/local/bin/run-tamarin
|
||||
RUN chmod +x /usr/local/bin/run-tamarin
|
||||
|
||||
CMD /usr/local/bin/run-tamarin
|
||||
CMD /usr/local/bin/run-tamarin
|
||||
|
@ -28,6 +28,6 @@ DEST_DIR=${TAMARIN_DEST_DIR:-dist}
|
||||
mkdir -p ${DEST_DIR}
|
||||
for f in /dist/*; do
|
||||
if [ -e "$f" ]; then
|
||||
cp "$f" ./${DEST_DIR}
|
||||
cp -r "$f" ./${DEST_DIR}
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
@ -37,55 +37,10 @@ RUN apk --no-cache add \
|
||||
python-dev \
|
||||
sqlite-dev \
|
||||
yaml-dev \
|
||||
sudo \
|
||||
nodejs \
|
||||
npm
|
||||
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install \
|
||||
pyClamd==0.4.0 \
|
||||
GitPython==2.1.3 \
|
||||
chardet==3.0.4 \
|
||||
futures==3.2.0 \
|
||||
pyOpenSSL==18.0.0 \
|
||||
ndg-httpsclient==0.4.0 \
|
||||
pyasn1==0.4.2 \
|
||||
scapy==2.4.0 \
|
||||
msgpack==0.5.6 \
|
||||
Jinja2==2.10 \
|
||||
vulndb==0.1.1 \
|
||||
psutil==5.4.8 \
|
||||
ds-store==1.1.2 \
|
||||
pebble==4.3.8 \
|
||||
acora==2.1 \
|
||||
diff-match-patch==20121119 \
|
||||
lz4==1.1.0 \
|
||||
vulners==1.3.0 \
|
||||
ipaddresses==0.0.2 \
|
||||
PyGithub==1.21.0 \
|
||||
pybloomfiltermmap==0.3.14 \
|
||||
phply==0.9.1 nltk==3.0.1 \
|
||||
tblib==0.2.0 \
|
||||
pdfminer==20140328 \
|
||||
lxml==3.4.4 \
|
||||
guess-language==0.2 \
|
||||
cluster==1.1.1b3 \
|
||||
python-ntlm==1.0.1 \
|
||||
halberd==0.2.4 \
|
||||
darts.util.lru==0.5 \
|
||||
markdown==2.6.1 \
|
||||
termcolor==1.1.0 \
|
||||
mitmproxy==0.13 \
|
||||
ruamel.ordereddict==0.4.8 \
|
||||
Flask==0.10.1 \
|
||||
PyYAML==3.12 \
|
||||
tldextract==1.7.2 \
|
||||
esmre==0.3.1 \
|
||||
bravado-core==5.12.1 \
|
||||
subprocess32==3.5.4 \
|
||||
&& npm install -g retire \
|
||||
&& rm -rf /root/.cache/pip \
|
||||
&& apk del build-base linux-headers
|
||||
|
||||
RUN adduser -D w3af
|
||||
|
||||
RUN git clone --depth=1 \
|
||||
@ -94,6 +49,9 @@ RUN git clone --depth=1 \
|
||||
&& rm -rf /home/w3af/w3af/.git \
|
||||
&& chown -R w3af /home/w3af/w3af
|
||||
|
||||
RUN cd /home/w3af/w3af \
|
||||
&& ( ./w3af_console || . /tmp/w3af_dependency_install.sh )
|
||||
|
||||
COPY run-audit.sh /usr/local/bin/run-audit
|
||||
RUN chmod +x /usr/local/bin/run-audit
|
||||
|
||||
|
246
vars/compileDoc.groovy
Normal file
246
vars/compileDoc.groovy
Normal file
@ -0,0 +1,246 @@
|
||||
// Pipeline de construction des images Docker des services Zéphir
|
||||
def call() {
|
||||
def buildTag
|
||||
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
|
||||
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
|
||||
|
||||
pipeline {
|
||||
|
||||
agent any
|
||||
|
||||
environment {
|
||||
projectDir = "${env.project_name}_${env.BUILD_ID}"
|
||||
}
|
||||
|
||||
triggers {
|
||||
// Execute pipeline every day at 7h30 to prepare docker images
|
||||
cron('30 7 * * 1-5')
|
||||
}
|
||||
|
||||
|
||||
parameters {
|
||||
string(
|
||||
name: 'targetUrl',
|
||||
description: 'URL cible pour le dépôt de fichier',
|
||||
defaultValue: 'https://nextcloud.cadoles.com/nextcloud'
|
||||
)
|
||||
string(
|
||||
name: 'targetFolder',
|
||||
description: 'Répertoire racine cible partagé avec l’utilisateur',
|
||||
defaultValue: 'Cadoles Formation'
|
||||
)
|
||||
string(
|
||||
name: 'credentialsId',
|
||||
description: "Identifiant du compte de type login/mot de passe",
|
||||
defaultValue: 'nextcloud-user-for-formation-documents'
|
||||
)
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage("Prepare build environment") {
|
||||
when {
|
||||
anyOf {
|
||||
triggeredBy cause: "UserIdCause", detail: "bbohard"
|
||||
triggeredBy 'TimerTrigger'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
tamarin.prepareEnvironment()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Build doc") {
|
||||
when {
|
||||
not {
|
||||
triggeredBy 'TimerTrigger'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
stage("Check tag") {
|
||||
buildTag = env.ref
|
||||
if (!buildTag.startsWith('build/')) {
|
||||
currentBuild.result= 'ABORTED'
|
||||
error("La référence `${buildTag}` n’est pas une demande de paquet valide.")
|
||||
}
|
||||
}
|
||||
stage("Clone repository") {
|
||||
checkout scm:
|
||||
[
|
||||
$class: 'GitSCM',
|
||||
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
|
||||
branches: [[name: env.ref]],
|
||||
extensions: [
|
||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
|
||||
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
|
||||
[$class: 'WipeWorkspace' ]
|
||||
]
|
||||
],
|
||||
changelog: false,
|
||||
poll: false
|
||||
}
|
||||
stage("Checkout ref") {
|
||||
dir(env.projectDir) {
|
||||
sh """
|
||||
git checkout ${env.ref}
|
||||
"""
|
||||
return
|
||||
}
|
||||
}
|
||||
stage("Compile document") {
|
||||
dir(env.projectDir) {
|
||||
def date = new Date()
|
||||
def dateTag = date.format('yyyyMMdd')
|
||||
def splittedTag = env.ref.split('/')
|
||||
def docProfile = splittedTag[1]
|
||||
withCredentials([
|
||||
usernamePassword(
|
||||
credentialsId: params.credentialsId,
|
||||
usernameVariable: "NEXTCLOUD_USER",
|
||||
passwordVariable: "NEXTCLOUD_PASSWORD"
|
||||
)
|
||||
]) {
|
||||
targetFolder = targetFolder.replace(' ', '%20')
|
||||
def rootFolder = "${params.targetUrl}/remote.php/dav/files/${NEXTCLOUD_USER}/${targetFolder}"
|
||||
def projectName = env.project_name
|
||||
def destFolder = "${projectName}/${docProfile}"
|
||||
def result = tamarin.compileDoc(env.buildProfile)
|
||||
if(result.size() == 0) {
|
||||
error('No artefact produced')
|
||||
}
|
||||
println(result)
|
||||
if(docProfile != 'draft') {
|
||||
def publicFolder = "${destFolder}/latest/public"
|
||||
def privateFolder = "${destFolder}/latest/private"
|
||||
def archivePublicFolder = "${destFolder}/archive/${dateTag}/public"
|
||||
def archivePrivateFolder = "${destFolder}/archive/${dateTag}/private"
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, publicFolder)
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, privateFolder)
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, archivePublicFolder)
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, archivePrivateFolder)
|
||||
result.each { r ->
|
||||
println(r)
|
||||
splittedDest = r.split('/')
|
||||
if(splittedDest[2] == 'public') {
|
||||
def destPath = "${rootFolder}/${publicFolder}/${splittedDest[-1]}"
|
||||
def destArchivePath = "${rootFolder}/${archivePublicFolder}/${splittedDest[-1]}"
|
||||
copyWebDAVFile (params.credentialsId, r, destPath)
|
||||
copyWebDAVFile (params.credentialsId, r, destArchivePath)
|
||||
} else {
|
||||
def destPath = "${rootFolder}/${privateFolder}/${splittedDest[-1]}"
|
||||
def destArchivePath = "${rootFolder}/${archivePrivateFolder}/${splittedDest[-1]}"
|
||||
copyWebDAVFile (params.credentialsId, r, destPath)
|
||||
copyWebDAVFile (params.credentialsId, r, destArchivePath)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
def draftPublicFolder = "${destFolder}/public"
|
||||
def draftPrivateFolder = "${destFolder}/private"
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, draftPublicFolder)
|
||||
createWebDAVFolder (params.credentialsId, rootFolder, draftPrivateFolder)
|
||||
result.each { r ->
|
||||
println(r)
|
||||
splittedDest = r.split('/')
|
||||
if(splittedDest[2] == 'public') {
|
||||
def destPath = "${rootFolder}/${draftPublicFolder}/${splittedDest[-1]}"
|
||||
copyWebDAVFile (params.credentialsId, r, destPath)
|
||||
} else {
|
||||
def destPath = "${rootFolder}/${draftPrivateFolder}/${splittedDest[-1]}"
|
||||
copyWebDAVFile (params.credentialsId, r, destPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
withCredentials([sshUserPrivateKey(credentialsId: 'jenkins-forge-ssh', keyFileVariable: 'FORGE_SSH_KEY')]) {
|
||||
writeFile(
|
||||
file : "./sshForJenkins.sh",
|
||||
text: '''
|
||||
#!/bin/sh
|
||||
ssh -i "${FORGE_SSH_KEY}" -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null "$@"
|
||||
'''
|
||||
)
|
||||
sh(script: "chmod +x ./sshForJenkins.sh")
|
||||
if (docProfile != 'draft') {
|
||||
withEnv(["GIT_SSH=./sshForJenkins.sh"]) {
|
||||
// Add git username/email
|
||||
sh("git config user.email '${gitEmail}'")
|
||||
sh("git config user.username '${gitUsername}'")
|
||||
|
||||
sh """
|
||||
git tag -am "paquet" release/v${dateTag}
|
||||
"""
|
||||
sh """
|
||||
git push --tags origin
|
||||
"""
|
||||
}
|
||||
}
|
||||
withEnv(["GIT_SSH=./sshForJenkins.sh"]) {
|
||||
// Add git username/email
|
||||
sh("git config user.email '${gitEmail}'")
|
||||
sh("git config user.username '${gitUsername}'")
|
||||
|
||||
sh """
|
||||
git tag -d ${env.ref}
|
||||
git push origin :${env.ref}
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def createWebDAVFolder (
|
||||
String creds,
|
||||
String rootUrl,
|
||||
String folder
|
||||
) {
|
||||
withCredentials([
|
||||
usernamePassword(
|
||||
credentialsId: creds,
|
||||
usernameVariable: "NEXTCLOUD_USER",
|
||||
passwordVariable: "NEXTCLOUD_PASSWORD"
|
||||
)
|
||||
]) {
|
||||
println(rootUrl)
|
||||
println(folder)
|
||||
def splittedFolder = folder.split('/')
|
||||
splittedFolder.eachWithIndex { subfolder, i ->
|
||||
def newFolder = ""
|
||||
if(i == 0) {
|
||||
newFolder = subfolder
|
||||
} else {
|
||||
def prec = i - 1
|
||||
def parentFolder = splittedFolder[0..prec].join('/')
|
||||
newFolder = "${parentFolder}/${subfolder}"
|
||||
}
|
||||
println(newFolder)
|
||||
|
||||
sh 'curl -X MKCOL --user ${NEXTCLOUD_USER}:${NEXTCLOUD_PASSWORD} --basic ' + "${rootUrl}/${newFolder}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def copyWebDAVFile (
|
||||
String creds,
|
||||
String newFile,
|
||||
String destUrl
|
||||
) {
|
||||
withCredentials([
|
||||
usernamePassword(
|
||||
credentialsId: creds,
|
||||
usernameVariable: "NEXTCLOUD_USER",
|
||||
passwordVariable: "NEXTCLOUD_PASSWORD"
|
||||
)
|
||||
]) {
|
||||
|
||||
sh "curl -T ${newFile}" + ' --user ${NEXTCLOUD_USER}:${NEXTCLOUD_PASSWORD} --basic ' + destUrl
|
||||
}
|
||||
}
|
@ -8,6 +8,7 @@ def call(Map params = [:]) {
|
||||
def distVersion = params.distVersion ? params.distVersion : '2.7.0'
|
||||
def distBranchName = params.distBranchName ? params.distBranchName : env.GIT_BRANCH
|
||||
def gitCredentials = params.gitCredentials ? params.gitCredentials : null
|
||||
def gitCredentialsType = params.gitCredentialsType ? params.gitCredentialsType : 'http'
|
||||
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
|
||||
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
|
||||
def skipCi = params.containsKey('skipCi') ? params.skipCi : false
|
||||
@ -89,8 +90,16 @@ def call(Map params = [:]) {
|
||||
}
|
||||
|
||||
if (gitCredentials != null) {
|
||||
git.withHTTPCredentials(gitCredentials) {
|
||||
proc.call()
|
||||
if (gitCredentialsType == 'http') {
|
||||
git.withHTTPCredentials(gitCredentials) {
|
||||
proc.call()
|
||||
}
|
||||
} else if (gitCredentialsType == 'ssh') {
|
||||
git.withSSHCredentials(gitCredentials) {
|
||||
proc.call()
|
||||
}
|
||||
} else {
|
||||
throw new Exception("Unknown git credentials type '${gitCredentialsType}' ! Expected 'ssh' or 'http' (default).")
|
||||
}
|
||||
} else {
|
||||
proc.call()
|
||||
|
@ -1,7 +1,8 @@
|
||||
def waitForRepoPackage(String packageName, Map params = [:]) {
|
||||
def expectedVersion = params.expectedVersion ? params.expectedVersion : null
|
||||
def delay = params.delay ? params.delay : 30
|
||||
def waitTimeout = params.timeout ? params.timeout : 1200
|
||||
def waitTimeout = params.timeout ? params.timeout : 2400
|
||||
def asPattern = params.containsKey("asPattern") ? params.asPattern : true
|
||||
|
||||
def message = "Waiting for package '${packageName}'"
|
||||
if (expectedVersion != null) {
|
||||
@ -26,9 +27,11 @@ def waitForRepoPackage(String packageName, Map params = [:]) {
|
||||
println("Package found !")
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
def versionFound = packages.find {
|
||||
return it['version'] =~ expectedVersion
|
||||
def matches = asPattern ? it['version'] =~ expectedVersion : it['version'] == expectedVersion
|
||||
println("Comparing expected version '${expectedVersion}' to '${it['version']}': ${matches}")
|
||||
return matches
|
||||
}
|
||||
|
||||
if (versionFound) {
|
||||
@ -76,5 +79,10 @@ def listRepoPackages(Map params = [:]) {
|
||||
}
|
||||
}
|
||||
|
||||
println "Found packages:"
|
||||
packages.each{
|
||||
println " - Package: ${it.key}, Version: ${it.value['version']}"
|
||||
}
|
||||
|
||||
return packages
|
||||
}
|
@ -27,4 +27,18 @@ def withHTTPCredentials(String credentialsId, Closure fn) {
|
||||
sh(script: "rm -f '${tmpAskPassScript}'")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def withSSHCredentials(String credentialsId, Closure fn) {
|
||||
def randomUUID = UUID.randomUUID().toString()
|
||||
withCredentials([
|
||||
sshUserPrivateKey(
|
||||
credentialsId: credentialsId,
|
||||
keyFileVariable: 'GIT_SSH_IDENTITY_FILE',
|
||||
)
|
||||
]) {
|
||||
withEnv(['GIT_SSH_VARIANT=ssh', 'GIT_SSH_COMMAND=ssh -i $GIT_SSH_IDENTITY_FILE -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null']) {
|
||||
fn.call()
|
||||
}
|
||||
}
|
||||
}
|
40
vars/gitea.groovy
Normal file
40
vars/gitea.groovy
Normal file
@ -0,0 +1,40 @@
|
||||
def commentPullRequest(String repo, String issueId, String comment, Integer commentIndex = 0) {
|
||||
comment = comment.replaceAll('"', '\\"')
|
||||
withCredentials([
|
||||
string(credentialsId: 'GITEA_JENKINS_PERSONAL_TOKEN', variable: 'GITEA_TOKEN'),
|
||||
]) {
|
||||
writeFile(file: ".prComment", text: comment)
|
||||
sh """#!/bin/bash
|
||||
set -xeo pipefail
|
||||
|
||||
# Récupération si il existe du commentaire existant
|
||||
previous_comment_id=\$(curl -v --fail \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments \
|
||||
| jq -c '[ .[] | select(.user.login=="jenkins") ] | .[${commentIndex}] | .id' \
|
||||
)
|
||||
|
||||
# Génération du payload pour l'API Gitea
|
||||
echo '{}' | jq -c --rawfile body .prComment '.body = \$body' > payload.json
|
||||
|
||||
if [[ "\$previous_comment_id" == "null" ]]; then
|
||||
# Création du commentaire via l'API Gitea
|
||||
curl -v --fail \
|
||||
-XPOST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @payload.json \
|
||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments
|
||||
else
|
||||
# Modification du commentaire existant
|
||||
curl -v --fail \
|
||||
-XPATCH \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @payload.json \
|
||||
https://forge.cadoles.com/api/v1/repos/${repo}/issues/comments/\$previous_comment_id
|
||||
fi
|
||||
"""
|
||||
}
|
||||
}
|
@ -30,7 +30,7 @@ def call() {
|
||||
}
|
||||
}
|
||||
|
||||
stage("Run Lighthouse and pa11y audits") {
|
||||
stage("Run Lighthouse audit") {
|
||||
steps {
|
||||
script {
|
||||
def lighthouseImage = buildDockerImage()
|
||||
@ -66,10 +66,9 @@ def call() {
|
||||
rocketSend (
|
||||
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
||||
message: """
|
||||
Les audits pour `${params.url}` sont terminés:
|
||||
L'audit Lighthouse pour `${params.url}` est terminé:
|
||||
|
||||
- [Voir le rapport Lighthouse (bonnes pratiques)](${env.BUILD_URL}Rapports_20d_27audit/lighthouse.report.html)
|
||||
- [Voir le rapport pa11y (accessibilité)](${env.BUILD_URL}Rapports_20d_27audit/pa11y.report.html)
|
||||
- [Voir le rapport](${env.BUILD_URL}Rapports_20d_27audit/lighthouse.report.html)
|
||||
|
||||
[Lancer un nouvel audit](${env.BUILD_URL}../build)
|
||||
|
||||
|
@ -13,6 +13,15 @@ def getRandomDeliveryAttachment(Integer probability = 25) {
|
||||
'https://media.giphy.com/media/QBRlXHKV5mpbLJ4prc/giphy.gif',
|
||||
'https://media.giphy.com/media/NOsfNQGivMFry/giphy.gif',
|
||||
'https://media.giphy.com/media/M1vu1FJnW6gms/giphy.gif',
|
||||
'https://media.giphy.com/media/555x0gFF89OhVWPkvb/giphy.gif',
|
||||
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
|
||||
'https://media.giphy.com/media/9RZu6ahd8LIYHQlGUD/giphy.gif',
|
||||
'https://media.giphy.com/media/W1fFHj6LvyTgfBNdiz/giphy.gif',
|
||||
'https://media.giphy.com/media/1g2JyW7p6mtZc6bOEY/giphy.gif',
|
||||
'https://media.giphy.com/media/ORiFE3ijpNaIWDoOqP/giphy.gif',
|
||||
'https://media.giphy.com/media/r16Zmuvt1hSTK/giphy.gif',
|
||||
'https://media.giphy.com/media/bF8Tvy2Ta0mqxXgaPV/giphy.gif',
|
||||
'https://media.giphy.com/media/C0XT6BmLC3nGg/giphy.gif'
|
||||
]
|
||||
Random rnd = new Random()
|
||||
if (rnd.nextInt(100) > probability) {
|
||||
|
117
vars/pulp.groovy
Normal file
117
vars/pulp.groovy
Normal file
@ -0,0 +1,117 @@
|
||||
import groovy.json.JsonOutput
|
||||
|
||||
def exportPackages(
|
||||
String credentials,
|
||||
List packages = [],
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def exportTasks = []
|
||||
packages.each {
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/content/deb/packages/", httpMode: 'POST', ignoreSslErrors: true, multipartName: "file", timeout: 900, responseHandle: 'NONE', uploadFile: "${it}"
|
||||
jsonResponse = readJSON text: response.content
|
||||
println(jsonResponse)
|
||||
exportTasks << jsonResponse['task']
|
||||
}
|
||||
return exportTasks
|
||||
}
|
||||
|
||||
def getRepositoryHREF(
|
||||
String credentials,
|
||||
String repositoryLevel = 'dev',
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def repositoriesMapping = ['dev': 'Cadoles4MSE']
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/repositories/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
|
||||
def jsonResponse = readJSON text: response.content
|
||||
println(jsonResponse)
|
||||
def repositories = jsonResponse.results
|
||||
def repositoryHREF = repositories.find { it -> it['name'] == repositoriesMapping[repositoryLevel] }
|
||||
return repositoryHREF.pulp_href
|
||||
}
|
||||
|
||||
def addToRepository(
|
||||
String credentials,
|
||||
List packagesHREF,
|
||||
String repositoryHREF,
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def packagesHREFURL = ["add_content_units": packagesHREF.collect { "https://$pulpHost$it" }]
|
||||
def postBody = JsonOutput.toJson(packagesHREFURL)
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${repositoryHREF}modify/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
|
||||
def jsonResponse = readJSON text: response.content
|
||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
||||
}
|
||||
|
||||
def publishRepository(
|
||||
String credentials,
|
||||
String repositoryHREF,
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def postBody = JsonOutput.toJson(["repository": repositoryHREF, "simple": true])
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/publications/deb/apt/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true
|
||||
def jsonResponse = readJSON text: response.content
|
||||
println(jsonResponse)
|
||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
||||
}
|
||||
|
||||
def distributePublication(
|
||||
String credentials,
|
||||
String publicationHREF,
|
||||
String distributionName,
|
||||
String basePath,
|
||||
String pulpHost = 'pulp.bbohard.lan',
|
||||
String contentGuard = null
|
||||
) {
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/distributions/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
|
||||
def jsonResponse = readJSON text: response.content
|
||||
def httpMode = ''
|
||||
def url = ''
|
||||
def distribution = jsonResponse.results.find { it -> it.name == distributionName}
|
||||
if (distribution) {
|
||||
httpMode = 'PUT'
|
||||
url = distribution.pulp_href
|
||||
|
||||
} else {
|
||||
httpMode = 'POST'
|
||||
url = '/pulp/api/v3/distributions/deb/apt/'
|
||||
}
|
||||
def postBody = JsonOutput.toJson(["publication": publicationHREF, "name": distributionName, "base_path": basePath, "content_guard": contentGuard])
|
||||
response = httpRequest authentication: credentials, url: "https://${pulpHost}${url}", httpMode: httpMode, requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
|
||||
jsonResponse = readJSON text: response.content
|
||||
if (distribution) {
|
||||
waitForTaskCompletion(credentials, jsonResponse.task)
|
||||
return [url]
|
||||
} else {
|
||||
return waitForTaskCompletion(credentials, jsonResponse.task)
|
||||
}
|
||||
}
|
||||
|
||||
def waitForTaskCompletion(
|
||||
String credentials,
|
||||
String taskHREF,
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def status = ''
|
||||
def created_resources = []
|
||||
while (status != 'completed') {
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${taskHREF}", httpMode: 'GET', ignoreSslErrors: true
|
||||
def jsonResponse = readJSON text: response.content
|
||||
status = jsonResponse.state
|
||||
if (status == 'completed') {
|
||||
created_resources = jsonResponse.created_resources
|
||||
}
|
||||
sleep(10)
|
||||
}
|
||||
return created_resources
|
||||
}
|
||||
|
||||
def getDistributionURL(
|
||||
String credentials,
|
||||
String resourceHREF,
|
||||
String pulpHost = 'pulp.bbohard.lan'
|
||||
) {
|
||||
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${resourceHREF}", httpMode: 'GET', ignoreSslErrors: true
|
||||
def jsonResponse = readJSON text: response.content
|
||||
println(jsonResponse)
|
||||
return jsonResponse.base_url
|
||||
}
|
80
vars/sonarqube.groovy
Normal file
80
vars/sonarqube.groovy
Normal file
@ -0,0 +1,80 @@
|
||||
// Pipeline de scan de projet avec SonarQube
|
||||
def call() {
|
||||
pipeline {
|
||||
agent {
|
||||
label 'docker'
|
||||
}
|
||||
|
||||
environment {
|
||||
projectDir = "${env.project_name}_${env.BUILD_ID}"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Package project") {
|
||||
when {
|
||||
not {
|
||||
triggeredBy 'TimerTrigger'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
stage("Clone repository") {
|
||||
checkout scm:
|
||||
[
|
||||
$class: 'GitSCM',
|
||||
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
|
||||
branches: [[name: env.ref]],
|
||||
extensions: [
|
||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
|
||||
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
|
||||
[$class: 'WipeWorkspace' ]
|
||||
]
|
||||
],
|
||||
changelog: false,
|
||||
poll: false
|
||||
}
|
||||
|
||||
stage("Scan project") {
|
||||
dir(env.projectDir) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'SONARQUBE_URL', variable: 'SONARQUBE_URL'),
|
||||
string(credentialsId: 'SONARQUBE_TOKEN', variable: 'SONARQUBE_TOKEN'),
|
||||
]) {
|
||||
sh """
|
||||
docker run \
|
||||
--rm \
|
||||
-e SONAR_HOST_URL="${env.SONARQUBE_URL}" \
|
||||
-e SONAR_LOGIN="${env.SONARQUBE_TOKEN}" \
|
||||
-v "${env.WORKSPACE}/${env.projectDir}/:/usr/src" \
|
||||
sonarsource/sonar-scanner-cli \
|
||||
-Dsonar.projectKey=${env.sonarqubeProjectKey} \
|
||||
-Dsonar.projectVersion=${env.ref}
|
||||
"""
|
||||
}
|
||||
|
||||
// On notifie le canal Rocket.Chat du scan
|
||||
// rocketSend (
|
||||
// avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
|
||||
// message: """
|
||||
// Le projet ${env.project_name} a été scanné par SonarQube.
|
||||
|
||||
// - [Voir les résultats](${env.SONARQUBE_URL}/dashboard?id=${env.sonarqubeProjectKey})
|
||||
// - [Visualiser le job](${env.RUN_DISPLAY_URL})
|
||||
|
||||
// @${env.sender_login}
|
||||
// """.stripIndent(),
|
||||
// rawMessage: true,
|
||||
// )
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh "rm -rf '${env.projectDir}'"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
115
vars/symfonyAppPipeline.groovy
Normal file
115
vars/symfonyAppPipeline.groovy
Normal file
@ -0,0 +1,115 @@
|
||||
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
|
||||
|
||||
def call(String baseImage = "ubuntu:22.04") {
|
||||
node {
|
||||
stage("Checkout project") {
|
||||
checkout(scm)
|
||||
}
|
||||
|
||||
stage('Run in Symfony image') {
|
||||
def symfonyImage = buildDockerImage(baseImage)
|
||||
symfonyImage.inside() {
|
||||
def repo = env.JOB_NAME
|
||||
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
|
||||
repo = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
|
||||
}
|
||||
|
||||
stage("Install composer dependencies") {
|
||||
sh '''
|
||||
composer install
|
||||
'''
|
||||
}
|
||||
|
||||
parallel([
|
||||
'php-security-check': {
|
||||
stage("Check PHP security issues") {
|
||||
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
||||
def auditReport = sh(script: "local-php-security-checker --format=markdown || true", returnStdout: true)
|
||||
if (auditReport.trim() != "") {
|
||||
if (env.CHANGE_ID) {
|
||||
gitea.commentPullRequest(repo, env.CHANGE_ID, auditReport, 0)
|
||||
} else {
|
||||
print auditReport
|
||||
}
|
||||
}
|
||||
if (!auditReport.contains("No packages have known vulnerabilities.")) {
|
||||
throw new Exception("Dependencies check failed !")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'php-cs-fixer': {
|
||||
stage("Run PHP-CS-Fixer on modified code") {
|
||||
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
|
||||
if ( !fileExists('.php-cs-fixer.dist.php') ) {
|
||||
def phpCsFixerConfig = libraryResource 'com/cadoles/symfony/.php-cs-fixer.dist.php'
|
||||
writeFile file:'.php-cs-fixer.dist.php', text:phpCsFixerConfig
|
||||
}
|
||||
|
||||
sh '''
|
||||
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "HEAD~..HEAD" | fgrep ".php" | tr "\n" " ")
|
||||
if ! echo "${CHANGED_FILES}" | grep -qE "^(\\.php-cs-fixer(\\.dist)\\.php?|composer\\.lock)$"; then EXTRA_ARGS=$(printf -- '--path-mode=intersection -- %s' "${CHANGED_FILES}"); else EXTRA_ARGS=''; fi
|
||||
php-cs-fixer fix --config=.php-cs-fixer.dist.php -v --dry-run --using-cache=no --format junit ${EXTRA_ARGS} > php-cs-fixer.xml || true
|
||||
'''
|
||||
def report = sh(script: "junit2md php-cs-fixer.xml", returnStdout: true)
|
||||
if (env.CHANGE_ID) {
|
||||
gitea.commentPullRequest(repo, env.CHANGE_ID, report, 1)
|
||||
} else {
|
||||
print report
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'phpstan': {
|
||||
stage("Run phpstan") {
|
||||
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
||||
if ( !fileExists('phpstan.neon') ) {
|
||||
def phpStanConfig = libraryResource 'com/cadoles/symfony/phpstan.neon'
|
||||
writeFile file:'phpstan.neon', text:phpStanConfig
|
||||
}
|
||||
sh '''
|
||||
phpstan analyze -l 1 --error-format=table src > phpstan.txt || true
|
||||
'''
|
||||
def report = sh(script: "cat phpstan.txt", returnStdout: true)
|
||||
report = "## Rapport PHPStan\n\n```\n" + report
|
||||
report = report + "\n```\n"
|
||||
if (env.CHANGE_ID) {
|
||||
gitea.commentPullRequest(repo, env.CHANGE_ID, report, 2)
|
||||
} else {
|
||||
print report
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def buildDockerImage(String baseImage) {
|
||||
def imageName = "cadoles-symfony-ci"
|
||||
dir (".${imageName}") {
|
||||
def dockerfile = libraryResource 'com/cadoles/symfony/Dockerfile'
|
||||
writeFile file:'Dockerfile', text: "FROM ${baseImage}\n\n" + dockerfile
|
||||
|
||||
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
|
||||
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
|
||||
|
||||
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
|
||||
def imageTag = "${safeJobName}-${env.BUILD_ID}"
|
||||
return docker.build("${imageName}:${imageTag}", ".")
|
||||
}
|
||||
}
|
||||
|
||||
def when(boolean condition, body) {
|
||||
def config = [:]
|
||||
body.resolveStrategy = Closure.OWNER_FIRST
|
||||
body.delegate = config
|
||||
|
||||
if (condition) {
|
||||
body()
|
||||
} else {
|
||||
Utils.markStageSkippedForConditional(STAGE_NAME)
|
||||
}
|
||||
}
|
@ -67,6 +67,7 @@ def buildPackage(
|
||||
|
||||
stage("Run Tamarin") {
|
||||
def dockerArgs = """
|
||||
-u 0
|
||||
-v /var/run/docker.sock:/var/run/docker.sock
|
||||
${forceRebuild ? '-e TAMARIN_FORCE_REBUILD=1' : ''}
|
||||
${packageArch ? '-e TAMARIN_PACKAGE_ARCH='+packageArch : ''}
|
||||
@ -89,6 +90,41 @@ def buildPackage(
|
||||
|
||||
}
|
||||
|
||||
def compileDoc(
|
||||
String buildProfile,
|
||||
String destDir = "./packages",
|
||||
Boolean forceRebuild = false
|
||||
) {
|
||||
|
||||
def tamarinImage
|
||||
def packages = []
|
||||
|
||||
stage("Create Tamarin environment") {
|
||||
tamarinImage = buildDockerImage()
|
||||
}
|
||||
|
||||
stage("Run Tamarin") {
|
||||
def dockerArgs = """
|
||||
-u 0
|
||||
-v /var/run/docker.sock:/var/run/docker.sock
|
||||
-e TAMARIN_PROFILE=${buildProfile}
|
||||
-e TAMARIN_DEST_DIR=${destDir}
|
||||
""".stripIndent()
|
||||
|
||||
tamarinImage.inside(dockerArgs) {
|
||||
sh 'run-tamarin'
|
||||
}
|
||||
|
||||
packages = sh(script: "find '${destDir}' -type f -name *.pdf", returnStdout: true)
|
||||
.split('\n')
|
||||
.collect { return it.trim() }
|
||||
.findAll { it != '' }
|
||||
}
|
||||
println(packages)
|
||||
return packages
|
||||
|
||||
}
|
||||
|
||||
def prepareEnvironment(
|
||||
String packageProfile = "debian",
|
||||
String baseImage = ""
|
||||
@ -101,6 +137,7 @@ def prepareEnvironment(
|
||||
|
||||
stage("Prepare Tamarin") {
|
||||
def dockerArgs = """
|
||||
-u 0
|
||||
-v /var/run/docker.sock:/var/run/docker.sock
|
||||
${baseImage ? '-e TAMARIN_BASE_IMAGE='+baseImage : ''}
|
||||
${packageProfile ? '-e TAMARIN_PROFILE='+packageProfile : ''}
|
||||
@ -121,9 +158,12 @@ def buildDockerImage() {
|
||||
|
||||
def runTamarinScript = libraryResource 'com/cadoles/tamarin/run-tamarin.sh'
|
||||
writeFile file:'run-tamarin.sh', text:runTamarinScript
|
||||
|
||||
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
|
||||
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
|
||||
|
||||
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
|
||||
def imageTag = "${safeJobName}-${env.BUILD_ID}"
|
||||
return docker.build("tamarin:${imageTag}", ".")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user