15 Commits

Author SHA1 Message Date
8f33a8a4c9 Configurer nom et adresse de l’utilisateur pour les interactions avec les dépôts git 2022-09-26 08:44:28 +02:00
bf7e3cf9b9 Mauvais nom de répertoire de destination 2022-08-11 12:35:37 +02:00
cc802f6fdd Il manque nextcloud dans l’URL 2022-08-11 12:29:23 +02:00
e8198240de Rétablir le bon identifiant pour l’authentification 2022-08-11 12:01:32 +02:00
03d07acc23 Encodage naïf du dossier de destination 2022-08-11 11:53:32 +02:00
71a94bb201 Variables pour le contexte de production 2022-08-11 11:06:32 +02:00
5dee07ce46 Première version du pipeline pour la compilation des supports de formation 2022-08-08 16:57:27 +02:00
502c2c7ed9 retrait remontee junit publishing checks 2022-07-19 08:55:11 +02:00
9cf903ce41 feat(debian): possibilité d'interpreter ou non la version attendue comme un patron 2022-06-16 17:20:47 +02:00
cf66210f4e feat(debian): ajout d'info de debug sur la recherche de version de paquets 2022-06-16 16:52:52 +02:00
83688cc56c pipeline(mse-rgaa): correction installation/execution pa11y
Voir CNOUS/mse#664
2022-06-16 12:56:31 +02:00
8699ec0a9d pipeline(symfony): fix php-cs-fixer-stage 2022-05-18 17:33:25 +02:00
ed1fb84ea9 pipeline(symfony): correction exécution php-cs-fixer 2022-05-18 17:25:49 +02:00
d50a9c6b77 pipeline: add symfony app generic integration pipeline 2022-05-18 16:49:05 +02:00
ce2c30003e Fonction pour l’envoi de paquets sur pulp (domaine par défaut à modifier) 2022-01-25 15:11:54 +01:00
14 changed files with 672 additions and 14 deletions

View File

@ -18,7 +18,7 @@ pipeline {
agent {
node {
label "mse"
label "docker"
}
}
@ -45,10 +45,10 @@ pipeline {
}
}
junit "*.xml"
junit testResults: '*.xml', skipPublishingChecks: true
rocketSend (
channel: "#cnous-mse-dev",
channel: "#cnous-mse",
avatar: 'https://jenkins.cadol.es/static/b5f67753/images/headshot.png',
message: """
Audit RGAA | ${testStatuses()}
@ -65,6 +65,14 @@ pipeline {
}
}
post {
always {
cleanWs()
}
}
}
@NonCPS
@ -79,4 +87,4 @@ def testStatuses() {
testStatus = "Passant(s): ${passed}, Échoué(s): ${failed} ${testResultAction.failureDiffString}, Désactivé(s): ${skipped}"
}
return testStatus
}
}

View File

@ -35,7 +35,7 @@ RUN apk add --no-cache \
chromium \
bash
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
RUN PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm install -g pa11y@^5.0.0 pa11y-reporter-html@^1.0.0 pa11y-reporter-junit
RUN adduser -D pa11y

View File

@ -9,6 +9,7 @@ cd reports
export PUPPETEER_EXECUTABLE_PATH=$(which chromium-browser)
export PA11Y_REPORTER="${PA11Y_REPORTER:-html}"
export PA11Y_STANDARD=${PA11Y_STANDARD:-WCAG2AA}
PA11Y_ARGS=""

View File

@ -0,0 +1,41 @@
<?php
$finder = PhpCsFixer\Finder::create()
->in(__DIR__.'/src')
->name('*.php')
;
return (new PhpCsFixer\Config())
->setRules([
'@Symfony' => true,
'concat_space' => ['spacing' => 'none'],
'array_syntax' => ['syntax' => 'short'],
'combine_consecutive_issets' => true,
'explicit_indirect_variable' => true,
'no_useless_return' => true,
'ordered_imports' => true,
'no_unused_imports' => true,
'no_spaces_after_function_name' => true,
'no_spaces_inside_parenthesis' => true,
'ternary_operator_spaces' => true,
'class_definition' => ['single_line' => true],
'whitespace_after_comma_in_array' => true,
'phpdoc_add_missing_param_annotation' => ['only_untyped' => true],
'phpdoc_order' => true,
'phpdoc_types_order' => [
'null_adjustment' => 'always_last',
'sort_algorithm' => 'alpha',
],
'phpdoc_no_empty_return' => false,
'phpdoc_summary' => false,
'general_phpdoc_annotation_remove' => [
'annotations' => [
'expectedExceptionMessageRegExp',
'expectedException',
'expectedExceptionMessage',
'author',
],
],
])
->setFinder($finder)
;

View File

@ -0,0 +1,42 @@
ARG PHP_SECURITY_CHECKER_VERSION=1.0.0
ARG JQ_VERSION=1.6
RUN apt update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y \
wget tar curl ca-certificates \
openssl bash git unzip \
php-cli php-dom php-mbstring php-ctype php-xml php-iconv
COPY add-letsencrypt-ca.sh /root/add-letsencrypt-ca.sh
RUN bash /root/add-letsencrypt-ca.sh \
&& rm -f /root/add-letsencrypt-ca.sh
RUN wget -O /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-${JQ_VERSION}/jq-linux64 \
&& chmod +x /usr/local/bin/jq
# Install local-php-security-checker
RUN wget -O /usr/local/bin/local-php-security-checker https://github.com/fabpot/local-php-security-checker/releases/download/v${PHP_SECURITY_CHECKER_VERSION}/local-php-security-checker_${PHP_SECURITY_CHECKER_VERSION}_linux_amd64 \
&& chmod +x /usr/local/bin/local-php-security-checker
# Install junit2md
RUN junit2md_download_url=$(curl "https://forge.cadoles.com/api/v1/repos/Cadoles/junit2md/releases" -H "accept:application/json" | jq -r 'sort_by(.published_at) | reverse | .[0] | .assets[] | select(.name == "junit2md-linux-amd64.tar.gz") | .browser_download_url') \
&& wget -O junit2md-linux-amd64.tar.gz "$junit2md_download_url" \
&& tar -xzf junit2md-linux-amd64.tar.gz \
&& cp junit2md-linux-amd64/junit2md /usr/local/bin/junit2md
# Install composer
RUN wget https://raw.githubusercontent.com/composer/getcomposer.org/76a7060ccb93902cd7576b67264ad91c8a2700e2/web/installer -O - -q | php -- --force --install-dir /usr/local/bin --filename composer \
&& chmod +x /usr/local/bin/composer
# Install php-cs-fixer
RUN mkdir --parents /tools/php-cs-fixer \
&& composer require --working-dir=/tools/php-cs-fixer friendsofphp/php-cs-fixer \
&& ln -s /tools/php-cs-fixer/vendor/bin/php-cs-fixer /usr/local/bin/php-cs-fixer
# Install php-stan
RUN mkdir --parents /tools/phpstan \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan \
&& ln -s /tools/phpstan/vendor/bin/phpstan /usr/local/bin/phpstan \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-symfony \
&& composer require --working-dir=/tools/phpstan phpstan/phpstan-doctrine

View File

@ -0,0 +1,4 @@
includes:
- /tools/phpstan/vendor/phpstan/phpstan-symfony/extension.neon
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/extension.neon
- /tools/phpstan/vendor/phpstan/phpstan-doctrine/rules.neon

View File

@ -1,17 +1,16 @@
FROM alpine:3.12
FROM alpine:latest
ARG HTTP_PROXY=
ARG HTTPS_PROXY=
ARG http_proxy=
ARG https_proxy=
ARG TAMARIN_VERSION=develop
RUN apk add --no-cache git docker python3 bash openssl curl
RUN curl -k https://forge.cadoles.com/Cadoles/Jenkins/raw/branch/master/resources/com/cadoles/common/add-letsencrypt-ca.sh | bash
RUN git clone http://forge.cadoles.com/Cadoles/Tamarin /tamarin\
ARG TAMARIN_VERSION=feature/doc-compile
RUN git clone https://forge.cadoles.com/Cadoles/Tamarin /tamarin\
&& cd /tamarin\
&& git checkout ${TAMARIN_VERSION}

View File

@ -28,6 +28,6 @@ DEST_DIR=${TAMARIN_DEST_DIR:-dist}
mkdir -p ${DEST_DIR}
for f in /dist/*; do
if [ -e "$f" ]; then
cp "$f" ./${DEST_DIR}
cp -r "$f" ./${DEST_DIR}
fi
done
done

246
vars/compileDoc.groovy Normal file
View File

@ -0,0 +1,246 @@
// Pipeline de construction des images Docker des services Zéphir
def call() {
def buildTag
def gitEmail = params.gitEmail ? params.gitEmail : 'jenkins@cadoles.com'
def gitUsername = params.gitUsername ? params.gitUsername : 'Jenkins'
pipeline {
agent any
environment {
projectDir = "${env.project_name}_${env.BUILD_ID}"
}
triggers {
// Execute pipeline every day at 7h30 to prepare docker images
cron('30 7 * * 1-5')
}
parameters {
string(
name: 'targetUrl',
description: 'URL cible pour le dépôt de fichier',
defaultValue: 'https://nextcloud.cadoles.com/nextcloud'
)
string(
name: 'targetFolder',
description: 'Répertoire racine cible partagé avec lutilisateur',
defaultValue: 'Cadoles Formation'
)
string(
name: 'credentialsId',
description: "Identifiant du compte de type login/mot de passe",
defaultValue: 'nextcloud-user-for-formation-documents'
)
}
stages {
stage("Prepare build environment") {
when {
anyOf {
triggeredBy cause: "UserIdCause", detail: "bbohard"
triggeredBy 'TimerTrigger'
}
}
steps {
script {
tamarin.prepareEnvironment()
}
}
}
stage("Build doc") {
when {
not {
triggeredBy 'TimerTrigger'
}
}
steps {
script {
stage("Check tag") {
buildTag = env.ref
if (!buildTag.startsWith('build/')) {
currentBuild.result= 'ABORTED'
error("La référence `${buildTag}` nest pas une demande de paquet valide.")
}
}
stage("Clone repository") {
checkout scm:
[
$class: 'GitSCM',
userRemoteConfigs: [[url: env.repository_url, credentialsId: 'jenkins-forge-ssh']],
branches: [[name: env.ref]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: env.projectDir ],
[$class: 'CloneOption', noTags: false, shallow: false, depth: 0, reference: ''],
[$class: 'WipeWorkspace' ]
]
],
changelog: false,
poll: false
}
stage("Checkout ref") {
dir(env.projectDir) {
sh """
git checkout ${env.ref}
"""
return
}
}
stage("Compile document") {
dir(env.projectDir) {
def date = new Date()
def dateTag = date.format('yyyyMMdd')
def splittedTag = env.ref.split('/')
def docProfile = splittedTag[1]
withCredentials([
usernamePassword(
credentialsId: params.credentialsId,
usernameVariable: "NEXTCLOUD_USER",
passwordVariable: "NEXTCLOUD_PASSWORD"
)
]) {
targetFolder = targetFolder.replace(' ', '%20')
def rootFolder = "${params.targetUrl}/remote.php/dav/files/${NEXTCLOUD_USER}/${targetFolder}"
def projectName = env.project_name
def destFolder = "${projectName}/${docProfile}"
def result = tamarin.compileDoc(env.buildProfile)
if(result.size() == 0) {
error('No artefact produced')
}
println(result)
if(docProfile != 'draft') {
def publicFolder = "${destFolder}/latest/public"
def privateFolder = "${destFolder}/latest/private"
def archivePublicFolder = "${destFolder}/archive/${dateTag}/public"
def archivePrivateFolder = "${destFolder}/archive/${dateTag}/private"
createWebDAVFolder (params.credentialsId, rootFolder, publicFolder)
createWebDAVFolder (params.credentialsId, rootFolder, privateFolder)
createWebDAVFolder (params.credentialsId, rootFolder, archivePublicFolder)
createWebDAVFolder (params.credentialsId, rootFolder, archivePrivateFolder)
result.each { r ->
println(r)
splittedDest = r.split('/')
if(splittedDest[2] == 'public') {
def destPath = "${rootFolder}/${publicFolder}/${splittedDest[-1]}"
def destArchivePath = "${rootFolder}/${archivePublicFolder}/${splittedDest[-1]}"
copyWebDAVFile (params.credentialsId, r, destPath)
copyWebDAVFile (params.credentialsId, r, destArchivePath)
} else {
def destPath = "${rootFolder}/${privateFolder}/${splittedDest[-1]}"
def destArchivePath = "${rootFolder}/${archivePrivateFolder}/${splittedDest[-1]}"
copyWebDAVFile (params.credentialsId, r, destPath)
copyWebDAVFile (params.credentialsId, r, destArchivePath)
}
}
} else {
def draftPublicFolder = "${destFolder}/public"
def draftPrivateFolder = "${destFolder}/private"
createWebDAVFolder (params.credentialsId, rootFolder, draftPublicFolder)
createWebDAVFolder (params.credentialsId, rootFolder, draftPrivateFolder)
result.each { r ->
println(r)
splittedDest = r.split('/')
if(splittedDest[2] == 'public') {
def destPath = "${rootFolder}/${draftPublicFolder}/${splittedDest[-1]}"
copyWebDAVFile (params.credentialsId, r, destPath)
} else {
def destPath = "${rootFolder}/${draftPrivateFolder}/${splittedDest[-1]}"
copyWebDAVFile (params.credentialsId, r, destPath)
}
}
}
}
withCredentials([sshUserPrivateKey(credentialsId: 'jenkins-forge-ssh', keyFileVariable: 'FORGE_SSH_KEY')]) {
writeFile(
file : "./sshForJenkins.sh",
text: '''
#!/bin/sh
ssh -i "${FORGE_SSH_KEY}" -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null "$@"
'''
)
sh(script: "chmod +x ./sshForJenkins.sh")
if (docProfile != 'draft') {
withEnv(["GIT_SSH=./sshForJenkins.sh"]) {
// Add git username/email
sh("git config user.email '${gitEmail}'")
sh("git config user.username '${gitUsername}'")
sh """
git tag -am "paquet" release/v${dateTag}
"""
sh """
git push --tags origin
"""
}
}
withEnv(["GIT_SSH=./sshForJenkins.sh"]) {
// Add git username/email
sh("git config user.email '${gitEmail}'")
sh("git config user.username '${gitUsername}'")
sh """
git tag -d ${env.ref}
git push origin :${env.ref}
"""
}
}
}
}
}
}
}
}
}
}
def createWebDAVFolder (
String creds,
String rootUrl,
String folder
) {
withCredentials([
usernamePassword(
credentialsId: creds,
usernameVariable: "NEXTCLOUD_USER",
passwordVariable: "NEXTCLOUD_PASSWORD"
)
]) {
println(rootUrl)
println(folder)
def splittedFolder = folder.split('/')
splittedFolder.eachWithIndex { subfolder, i ->
def newFolder = ""
if(i == 0) {
newFolder = subfolder
} else {
def prec = i - 1
def parentFolder = splittedFolder[0..prec].join('/')
newFolder = "${parentFolder}/${subfolder}"
}
println(newFolder)
sh 'curl -X MKCOL --user ${NEXTCLOUD_USER}:${NEXTCLOUD_PASSWORD} --basic ' + "${rootUrl}/${newFolder}"
}
}
}
def copyWebDAVFile (
String creds,
String newFile,
String destUrl
) {
withCredentials([
usernamePassword(
credentialsId: creds,
usernameVariable: "NEXTCLOUD_USER",
passwordVariable: "NEXTCLOUD_PASSWORD"
)
]) {
sh "curl -T ${newFile}" + ' --user ${NEXTCLOUD_USER}:${NEXTCLOUD_PASSWORD} --basic ' + destUrl
}
}

View File

@ -2,6 +2,7 @@ def waitForRepoPackage(String packageName, Map params = [:]) {
def expectedVersion = params.expectedVersion ? params.expectedVersion : null
def delay = params.delay ? params.delay : 30
def waitTimeout = params.timeout ? params.timeout : 2400
def asPattern = params.containsKey("asPattern") ? params.asPattern : true
def message = "Waiting for package '${packageName}'"
if (expectedVersion != null) {
@ -26,9 +27,11 @@ def waitForRepoPackage(String packageName, Map params = [:]) {
println("Package found !")
break
}
def versionFound = packages.find {
return it['version'] =~ expectedVersion
def matches = asPattern ? it['version'] =~ expectedVersion : it['version'] == expectedVersion
println("Comparing expected version '${expectedVersion}' to '${it['version']}': ${matches}")
return matches
}
if (versionFound) {
@ -76,5 +79,10 @@ def listRepoPackages(Map params = [:]) {
}
}
println "Found packages:"
packages.each{
println " - Package: ${it.key}, Version: ${it.value['version']}"
}
return packages
}

40
vars/gitea.groovy Normal file
View File

@ -0,0 +1,40 @@
def commentPullRequest(String repo, String issueId, String comment, Integer commentIndex = 0) {
comment = comment.replaceAll('"', '\\"')
withCredentials([
string(credentialsId: 'GITEA_JENKINS_PERSONAL_TOKEN', variable: 'GITEA_TOKEN'),
]) {
writeFile(file: ".prComment", text: comment)
sh """#!/bin/bash
set -xeo pipefail
# Récupération si il existe du commentaire existant
previous_comment_id=\$(curl -v --fail \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments \
| jq -c '[ .[] | select(.user.login=="jenkins") ] | .[${commentIndex}] | .id' \
)
# Génération du payload pour l'API Gitea
echo '{}' | jq -c --rawfile body .prComment '.body = \$body' > payload.json
if [[ "\$previous_comment_id" == "null" ]]; then
# Création du commentaire via l'API Gitea
curl -v --fail \
-XPOST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d @payload.json \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/${issueId}/comments
else
# Modification du commentaire existant
curl -v --fail \
-XPATCH \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d @payload.json \
https://forge.cadoles.com/api/v1/repos/${repo}/issues/comments/\$previous_comment_id
fi
"""
}
}

117
vars/pulp.groovy Normal file
View File

@ -0,0 +1,117 @@
import groovy.json.JsonOutput
def exportPackages(
String credentials,
List packages = [],
String pulpHost = 'pulp.bbohard.lan'
) {
def exportTasks = []
packages.each {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/content/deb/packages/", httpMode: 'POST', ignoreSslErrors: true, multipartName: "file", timeout: 900, responseHandle: 'NONE', uploadFile: "${it}"
jsonResponse = readJSON text: response.content
println(jsonResponse)
exportTasks << jsonResponse['task']
}
return exportTasks
}
def getRepositoryHREF(
String credentials,
String repositoryLevel = 'dev',
String pulpHost = 'pulp.bbohard.lan'
) {
def repositoriesMapping = ['dev': 'Cadoles4MSE']
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/repositories/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
def repositories = jsonResponse.results
def repositoryHREF = repositories.find { it -> it['name'] == repositoriesMapping[repositoryLevel] }
return repositoryHREF.pulp_href
}
def addToRepository(
String credentials,
List packagesHREF,
String repositoryHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def packagesHREFURL = ["add_content_units": packagesHREF.collect { "https://$pulpHost$it" }]
def postBody = JsonOutput.toJson(packagesHREFURL)
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${repositoryHREF}modify/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
def jsonResponse = readJSON text: response.content
return waitForTaskCompletion(credentials, jsonResponse.task)
}
def publishRepository(
String credentials,
String repositoryHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def postBody = JsonOutput.toJson(["repository": repositoryHREF, "simple": true])
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/publications/deb/apt/", httpMode: 'POST', requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
return waitForTaskCompletion(credentials, jsonResponse.task)
}
def distributePublication(
String credentials,
String publicationHREF,
String distributionName,
String basePath,
String pulpHost = 'pulp.bbohard.lan',
String contentGuard = null
) {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}/pulp/api/v3/distributions/deb/apt/", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
def httpMode = ''
def url = ''
def distribution = jsonResponse.results.find { it -> it.name == distributionName}
if (distribution) {
httpMode = 'PUT'
url = distribution.pulp_href
} else {
httpMode = 'POST'
url = '/pulp/api/v3/distributions/deb/apt/'
}
def postBody = JsonOutput.toJson(["publication": publicationHREF, "name": distributionName, "base_path": basePath, "content_guard": contentGuard])
response = httpRequest authentication: credentials, url: "https://${pulpHost}${url}", httpMode: httpMode, requestBody: postBody, contentType: 'APPLICATION_JSON', ignoreSslErrors: true, validResponseCodes: "100:599"
jsonResponse = readJSON text: response.content
if (distribution) {
waitForTaskCompletion(credentials, jsonResponse.task)
return [url]
} else {
return waitForTaskCompletion(credentials, jsonResponse.task)
}
}
def waitForTaskCompletion(
String credentials,
String taskHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def status = ''
def created_resources = []
while (status != 'completed') {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${taskHREF}", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
status = jsonResponse.state
if (status == 'completed') {
created_resources = jsonResponse.created_resources
}
sleep(10)
}
return created_resources
}
def getDistributionURL(
String credentials,
String resourceHREF,
String pulpHost = 'pulp.bbohard.lan'
) {
def response = httpRequest authentication: credentials, url: "https://${pulpHost}${resourceHREF}", httpMode: 'GET', ignoreSslErrors: true
def jsonResponse = readJSON text: response.content
println(jsonResponse)
return jsonResponse.base_url
}

View File

@ -0,0 +1,115 @@
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
def call(String baseImage = "ubuntu:22.04") {
node {
stage("Checkout project") {
checkout(scm)
}
stage('Run in Symfony image') {
def symfonyImage = buildDockerImage(baseImage)
symfonyImage.inside() {
def repo = env.JOB_NAME
if (env.BRANCH_NAME ==~ /^PR-.*$/) {
repo = env.JOB_NAME - "/${env.JOB_BASE_NAME}"
}
stage("Install composer dependencies") {
sh '''
composer install
'''
}
parallel([
'php-security-check': {
stage("Check PHP security issues") {
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
def auditReport = sh(script: "local-php-security-checker --format=markdown || true", returnStdout: true)
if (auditReport.trim() != "") {
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, auditReport, 0)
} else {
print auditReport
}
}
if (!auditReport.contains("No packages have known vulnerabilities.")) {
throw new Exception("Dependencies check failed !")
}
}
}
},
'php-cs-fixer': {
stage("Run PHP-CS-Fixer on modified code") {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
if ( !fileExists('.php-cs-fixer.dist.php') ) {
def phpCsFixerConfig = libraryResource 'com/cadoles/symfony/.php-cs-fixer.dist.php'
writeFile file:'.php-cs-fixer.dist.php', text:phpCsFixerConfig
}
sh '''
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "HEAD~..HEAD" | fgrep ".php" | tr "\n" " ")
if ! echo "${CHANGED_FILES}" | grep -qE "^(\\.php-cs-fixer(\\.dist)\\.php?|composer\\.lock)$"; then EXTRA_ARGS=$(printf -- '--path-mode=intersection -- %s' "${CHANGED_FILES}"); else EXTRA_ARGS=''; fi
php-cs-fixer fix --config=.php-cs-fixer.dist.php -v --dry-run --using-cache=no --format junit ${EXTRA_ARGS} > php-cs-fixer.xml || true
'''
def report = sh(script: "junit2md php-cs-fixer.xml", returnStdout: true)
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, report, 1)
} else {
print report
}
}
}
},
'phpstan': {
stage("Run phpstan") {
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
if ( !fileExists('phpstan.neon') ) {
def phpStanConfig = libraryResource 'com/cadoles/symfony/phpstan.neon'
writeFile file:'phpstan.neon', text:phpStanConfig
}
sh '''
phpstan analyze -l 1 --error-format=table src > phpstan.txt || true
'''
def report = sh(script: "cat phpstan.txt", returnStdout: true)
report = "## Rapport PHPStan\n\n```\n" + report
report = report + "\n```\n"
if (env.CHANGE_ID) {
gitea.commentPullRequest(repo, env.CHANGE_ID, report, 2)
} else {
print report
}
}
}
}
])
}
}
}
}
def buildDockerImage(String baseImage) {
def imageName = "cadoles-symfony-ci"
dir (".${imageName}") {
def dockerfile = libraryResource 'com/cadoles/symfony/Dockerfile'
writeFile file:'Dockerfile', text: "FROM ${baseImage}\n\n" + dockerfile
def addLetsEncryptCA = libraryResource 'com/cadoles/common/add-letsencrypt-ca.sh'
writeFile file:'add-letsencrypt-ca.sh', text:addLetsEncryptCA
def safeJobName = URLDecoder.decode(env.JOB_NAME).toLowerCase().replace('/', '-').replace(' ', '-')
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("${imageName}:${imageTag}", ".")
}
}
def when(boolean condition, body) {
def config = [:]
body.resolveStrategy = Closure.OWNER_FIRST
body.delegate = config
if (condition) {
body()
} else {
Utils.markStageSkippedForConditional(STAGE_NAME)
}
}

View File

@ -67,6 +67,7 @@ def buildPackage(
stage("Run Tamarin") {
def dockerArgs = """
-u 0
-v /var/run/docker.sock:/var/run/docker.sock
${forceRebuild ? '-e TAMARIN_FORCE_REBUILD=1' : ''}
${packageArch ? '-e TAMARIN_PACKAGE_ARCH='+packageArch : ''}
@ -89,6 +90,41 @@ def buildPackage(
}
def compileDoc(
String buildProfile,
String destDir = "./packages",
Boolean forceRebuild = false
) {
def tamarinImage
def packages = []
stage("Create Tamarin environment") {
tamarinImage = buildDockerImage()
}
stage("Run Tamarin") {
def dockerArgs = """
-u 0
-v /var/run/docker.sock:/var/run/docker.sock
-e TAMARIN_PROFILE=${buildProfile}
-e TAMARIN_DEST_DIR=${destDir}
""".stripIndent()
tamarinImage.inside(dockerArgs) {
sh 'run-tamarin'
}
packages = sh(script: "find '${destDir}' -type f -name *.pdf", returnStdout: true)
.split('\n')
.collect { return it.trim() }
.findAll { it != '' }
}
println(packages)
return packages
}
def prepareEnvironment(
String packageProfile = "debian",
String baseImage = ""
@ -101,6 +137,7 @@ def prepareEnvironment(
stage("Prepare Tamarin") {
def dockerArgs = """
-u 0
-v /var/run/docker.sock:/var/run/docker.sock
${baseImage ? '-e TAMARIN_BASE_IMAGE='+baseImage : ''}
${packageProfile ? '-e TAMARIN_PROFILE='+packageProfile : ''}
@ -129,4 +166,4 @@ def buildDockerImage() {
def imageTag = "${safeJobName}-${env.BUILD_ID}"
return docker.build("tamarin:${imageTag}", ".")
}
}
}