Compare commits

..

No commits in common. "develop" and "feature/python-rkt" have entirely different histories.

28 changed files with 307 additions and 693 deletions

View File

@ -1,10 +1,6 @@
# Tamarin # Tamarin
Usine à paquets GNU/Linux Usine à paquets expérimentale basée sur rkt/acbuild.
## Statut
Expérimental
## Formats de paquets/distributions supportés ## Formats de paquets/distributions supportés
@ -13,7 +9,11 @@ Expérimental
## Dépendances ## Dépendances
- [Python 3](https://www.python.org/downloads/) - [Python 3](https://www.python.org/downloads/)
- [Docker](>= 17.03) - Un noyau Linux > 2.6.24 (avec support des cgroups)
**Optionnel mais conseillé**
- [systemd](https://freedesktop.org/wiki/Software/systemd/)
## Usage ## Usage
@ -30,6 +30,10 @@ TODO
TODO TODO
### Répertoire de travail et mise en cache des images
TODO
## Licence ## Licence
GPLv3 GPLv3

View File

@ -1,14 +0,0 @@
#!/usr/bin/env bash
set -e
step=$1
hook=src/.tamarin/$step
if [ ! -f "$hook" ] || [ ! -x "$hook" ]; then
tamarin_info "No project hook (or not executable) for \"$step\"... Skipping."
exit
fi
tamarin_info "Project hook found for \"$step\". Running it..."
"$hook"

View File

@ -2,6 +2,26 @@
set -e set -e
echo 'ENV DEBIAN_FRONTEND=noninteractive' >> Dockerfile ${TAMARIN_ACBUILD} environment add DEBIAN_FRONTEND noninteractive
echo 'RUN apt-get update && apt-get install --yes --no-install-recommends build-essential devscripts equivs python3' >> Dockerfile
echo 'ENV DEBIAN_FRONTEND=' >> Dockerfile if [ "${TAMARIN_ACBUILD_ENGINE}" == 'chroot' ]; then
# Ugly fix for Python installation in chrooted environment (require /dev/urandom)
head -c 65536 /dev/urandom > ./urandom
${TAMARIN_ACBUILD} copy ./urandom /dev/urandom
fi
sudo -E /usr/bin/env bash - <<EOF
export PATH=${PATH}
${TAMARIN_ACBUILD} run --engine "${TAMARIN_ACBUILD_ENGINE}" -- apt-get update
${TAMARIN_ACBUILD} run --engine "${TAMARIN_ACBUILD_ENGINE}" -- apt-get install --yes --no-install-recommends build-essential devscripts equivs python3
EOF
if [ "${TAMARIN_ACBUILD_ENGINE}" == 'chroot' ]; then
# Clean up Python fix (see above)
sudo -E /usr/bin/env bash - <<EOF
export PATH=${PATH}
${TAMARIN_ACBUILD} run --engine "${TAMARIN_ACBUILD_ENGINE}" -- rm -f /dev/urandom
EOF
fi
${TAMARIN_ACBUILD} environment remove DEBIAN_FRONTEND

View File

@ -2,6 +2,10 @@
set -e set -e
echo 'ENV DEBIAN_FRONTEND=noninteractive' >> Dockerfile ${TAMARIN_ACBUILD} environment add DEBIAN_FRONTEND noninteractive
echo 'RUN apt-get update && apt-get install --yes --no-install-recommends git-core' >> Dockerfile sudo -E /usr/bin/env bash - <<EOF
echo 'ENV DEBIAN_FRONTEND=' >> Dockerfile export PATH=${PATH}
${TAMARIN_ACBUILD} run --engine "${TAMARIN_ACBUILD_ENGINE}" -- apt-get update
${TAMARIN_ACBUILD} run --engine "${TAMARIN_ACBUILD_ENGINE}" -- apt-get install --yes --no-install-recommends git-core
EOF
${TAMARIN_ACBUILD} environment remove DEBIAN_FRONTEND

View File

@ -1,28 +0,0 @@
#!/usr/bin/env bash
set -e
DESTDIR=/usr/local/share/ca-certificates
UPDATE_CERTS_CMD=update-ca-certificates
CERTS="$(cat <<EOF
https://letsencrypt.org/certs/isrgrootx1.pem
https://letsencrypt.org/certs/isrg-root-x2.pem
https://letsencrypt.org/certs/lets-encrypt-r3.pem
https://letsencrypt.org/certs/lets-encrypt-e1.pem
https://letsencrypt.org/certs/lets-encrypt-r4.pem
https://letsencrypt.org/certs/lets-encrypt-e2.pem
EOF
)"
echo "ENV DEBIAN_FRONTEND=noninteractive" >> Dockerfile
echo "RUN apt-get update && apt-get install --yes --no-install-recommends wget openssl ca-certificates" >> Dockerfile
for cert in $CERTS; do
filename=$(basename "$cert")
echo "RUN wget -O '$DESTDIR/$filename' $cert" >> Dockerfile
echo "RUN openssl x509 -in '$DESTDIR/$filename' -inform PEM -out '$DESTDIR/$filename.crt'" >> Dockerfile
done
echo "RUN $UPDATE_CERTS_CMD" >> Dockerfile
echo "ENV DEBIAN_FRONTEND=" >> Dockerfile

View File

@ -12,5 +12,3 @@ move_output_to_dist "*.deb"
move_output_to_dist "*.changes" move_output_to_dist "*.changes"
move_output_to_dist "*.dsc" move_output_to_dist "*.dsc"
move_output_to_dist "*.tar.{bz2,gz,lzma,xz}" move_output_to_dist "*.tar.{bz2,gz,lzma,xz}"
tamarin_success "Done :-)"

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$(readlink -f $DIR/../../common/run-project-hooks) postbuild

View File

@ -2,15 +2,12 @@
cd src cd src
if [ ! -f debian/control ]; then if [ -z "$(tamarin_db get project_name)" ]; then
tamarin_error "No debian/control file found !" if [ ! -d ".git" ]; then
exit 1 tamarin_error "This project is not managed with Git ! Cannot extract the project's name without it !"
fi exit 1
fi
package_name=$(sed -n 's/^Source:[ \t]*\(.*\)$/\1/p' debian/control | tr -d '[:space:]') project_name=$(basename $(git config --get remote.origin.url) | sed 's/.git$//' | tr '[:upper:]' '[:lower:]')
project_name=$(tamarin_db get project_name) tamarin_info "Extracted project name from Git metadata: \"${project_name}\""
tamarin_db set project_name "${project_name}"
if [ "${package_name}" != "${project_name}" ]; then
tamarin_warn "The project's name \"${project_name}\" differs from the package one (\"${package_name}\"). Using package's one..."
tamarin_db set project_name "${package_name}"
fi fi

View File

@ -7,13 +7,6 @@ if [ -f debian/changelog ] || [ ! -d .git ]; then
exit exit
fi fi
# Check ignore release tag
IGNORE_RELEASE_TAG=$(tamarin_db get ignore_release_tag "yes")
if [ "${IGNORE_RELEASE_TAG}" == "yes" ]; then
tamarin_warn "Release tag is ignored. Add 'ignore_release_tag=no' in .tamarinrc to enable."
exit
fi
# Get commits log as changelog # Get commits log as changelog
BUILD_TAG=$(tamarin_db get build_tag "last") BUILD_TAG=$(tamarin_db get build_tag "last")
@ -22,7 +15,7 @@ tamarin_debug "BUILD TAG IS ${BUILD_TAG}"
if [[ ${BUILD_TAG} == "last" ]] if [[ ${BUILD_TAG} == "last" ]]
then then
tags=$(git describe --match "release/*" --abbrev=0) tags=$(git tag master -l "release/*"|sort -r)
else else
tagbranch="build-tag-${BUILD_TAG}" tagbranch="build-tag-${BUILD_TAG}"
git checkout -b ${tagbranch} git checkout -b ${tagbranch}

View File

@ -19,8 +19,3 @@ echo >> ${changelog}
echo " * Package built with Tamarin. Based on commit ${current_commit}." >> ${changelog} echo " * Package built with Tamarin. Based on commit ${current_commit}." >> ${changelog}
echo >> ${changelog} echo >> ${changelog}
echo " -- ${top_contributor} ${date}" >> ${changelog} echo " -- ${top_contributor} ${date}" >> ${changelog}
tamarin_info "Generated dummy changelog:"
tamarin_info " "
cat ${changelog} | tamarin_info
tamarin_info " "

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$(readlink -f $DIR/../../common/run-project-hooks) prebuild

View File

@ -1,39 +0,0 @@
#!/usr/bin/env bash
cd src
if [ ! -f debian/changelog ]; then
tamarin_info "No changelog. Skipping adding suffix to package version"
exit
fi
if [ $(tamarin_db get no_version_suffix 'no') == 'yes' ]; then
tamarin_info "Not adding version suffix."
exit
fi
tamarin_info "Suffixing package version"
if [ -d .git ]; then
tamarin_info "It seems to be a Git repository. Generating version suffix based on Git history..."
release_tag=$(git describe --match "release/*" --abbrev=0 2>/dev/null)
if [ -z "${release_tag}" ]
then
dev_commit_count=$(git rev-list --count --no-merges develop)
else
dev_commit_count=$(git rev-list --count ${release_tag}..master)
fi
if [ ${dev_commit_count} -gt 0 ]
then
build_level=$(tamarin_db get build_level 'dev')
version_suffix="~$build_level+${dev_commit_count}-$(git log -n1 --format=%h)"
else
version_suffix=''
fi
else
tamarin_info "Not a Git project. Fallback to timestamp for suffix generation..."
version_suffix=tamarin$(date +%Y%m%d%H%M)
fi
tamarin_info "Suffixing package version with $version_suffix"
sed -i "0,/(\(.*\))/s/(\(.*\))/(\1${version_suffix})/" debian/changelog
cp debian/changelog /dist/changelog

View File

@ -1,276 +0,0 @@
#!/usr/bin/env bash
cd src
if [ -f debian/changelog ] || [ ! -d .git ]; then
tamarin_info "Not a Git repository or Debian changelog already exists !"
exit
else
tamarin_info "Creating changelog with commits information."
fi
# Get pkg tags as tie points in commit history
pkg_tags="$(git for-each-ref --format '%(refname)' refs/tags | tac)"
# Set starting commit
ceiling_commit=$(git describe --match "build/*" --abbrev=0 2>/dev/null)
if [ -z "$ceiling_commit" ]
then
ceiling_commit="HEAD"
fi
first_commit=$(git rev-list --max-parents=0 HEAD)
# Get commits log as changelog
current_release_tag=$(git describe --match "release/*" --abbrev=0 2>/dev/null)
if [[ -z ${current_release_tag} ]]
then
tamarin_warn "No release tag found, you repo should have a tag like 'release/*'"
tamarin_info "Assuming tag release/0.0.0 on first commit of branch master"
current_release="release/0.0.0"
current_release_tag=${first_commit}
else
current_release=$current_release_tag
fi
touch debian/changelog
project_name=$(tamarin_db get project_name)
distribution=$(tamarin_db get distribution UNRELEASED)
urgency=$(tamarin_db get urgency low)
package_level=$(tamarin_db get package_level dev)
function get_hash {
# Return commit hash from various pointer
pointer="$1"
echo "$(git log -n1 --format=%H ${pointer})"
}
function get_short_hash {
# Return commit short hash from various pointer
pointer="$1"
echo "$(git log -n1 --format=%h ${pointer})"
}
function get_previous_pkg_tag {
# Return previous pkg/* tag or current tag if no previous pkg/* exists.
commit="$1"
echo "$(git describe --abbrev=0 --match='pkg/*' $commit 2>/dev/null)"
}
function parse_tag {
tag="$1"
flavor="${tag%%/*}"
extended_version="${tag##*/}"
if [ "$flavor" = "pkg" ]
then
exploded_version="$(echo $extended_version | sed "s/\([a-z0-9.+]\+\)-\([0-9]\+\)\(-[a-z]\++[0-9]\+\)\?\(-\([0-9]\+\)-\(g[a-z0-9]\+\)\)\?$/version:\1 revision:\2 modification:\3 distance:\5 anchor:\6/")"
elif [ "$flavor" = "release" ]
then
exploded_version="$(echo $extended_version | sed "s/\([a-z0-9.+]\+\)\(-\([0-9]\+\)-\(g[a-z0-9]\+\)\)\?$/version:\1 distance:\3 anchor:\4/")"
fi
echo $exploded_version
}
function get_distance_from_tag {
description_tag="$1"
tag=${description_tag%-*-*}
anchor="$2"
if [[ "$(get_hash $tag)" =~ "$(get_hash $anchor)" ]]
then
echo 0
else
distance="$(parse_tag $description_tag)"
distance="${distance#*distance:}"
distance="${distance%% *}"
echo $distance
fi
}
function get_upstream_version_from_tag {
tag="$1"
upstream="$(parse_tag $tag)"
upstream="${upstream#*version:}"
upstream="${upstream%% *}"
echo "$upstream"
}
function get_package_version_from_tag {
tag="$1"
package="$(parse_tag $tag)"
package="${package#*revision:}"
package="${package%% *}"
echo "$package"
}
function get_distribution_from_tag {
# tag pkg like pkg/<level>/<distrib>/<version>
# <distrib> may be composed
tag="$1"
distribution="${tag#pkg/*/}"
distribution="${distribution%/*}"
distribution="${distribution/\//-}"
echo $distribution
}
function get_previous_release_tag {
# Return previous pkg/* tag or current tag if no previous pkg/* exists.
commit="$1"
echo "$(git describe --abbrev=0 --always --match='release/*' $commit)"
}
function on_pkg_tag {
# Return 1 if current commit is tagged with pkg/* tag.
commit="$1"
nearest_old_pkg_tag="$(get_previous_pkg_tag $commit)"
if [ -n "${nearest_old_pkg_tag}" ] && [ "$(get_hash ${commit})" = "$(get_hash ${nearest_old_pkg_tag})" ]
then
return 0
else
return 1
fi
}
function next_step {
# Return previous pkg/* tag or first commit if no pkg/* tag exists
commit="$1"
if [ "$(get_hash $commit)" = "$(get_hash $first_commit)" ]
then
echo $commit
elif on_pkg_tag $commit
then
nearest_old_pkg_tag="$(get_previous_pkg_tag ${commit}^1)"
else
nearest_old_pkg_tag="$(get_previous_pkg_tag ${commit})"
fi
if [[ "$nearest_old_pkg_tag" =~ 'pkg/' ]]
then
echo $nearest_old_pkg_tag
else
echo $first_commit
fi
}
function date_from_commit {
# Return date suitable for changelog entry signature
commit="$1"
if [ "$(get_hash ${commit})" = "$(get_hash HEAD)" ]
then
package_date=$(date --rfc-2822)
else
maintainer_commit="$(get_previous_pkg_tag $commit)"
package_date="$(git tag -l --format='%(creator)' ${maintainer_commit})"
package_date="${package_date##*> }"
package_date="$(date --rfc-2822 -d @${package_date% *})"
fi
echo "$package_date"
}
function packager_from_commit {
# Return Name <mail> id format, suitable for changelog entry signature
commit="$1"
if on_pkg_tag "${commit}"
then
maintainer_commit="$(get_previous_pkg_tag $commit)"
maintainer="$(git tag -l --format='%(creator)' ${maintainer_commit})"
maintainer="${maintainer%>*}>"
else
maintainer="$(git log -n1 --format='%cn <%ce>')"
fi
maintainer=$(tamarin_db get maintainer "${maintainer}")
echo "$maintainer"
}
function next_version {
set -x
commit="$1"
# upstream version is given by most recent of release or pkg tag
previous_pkg="$(git describe --long --match='pkg/*' $commit 2>/dev/null)"
previous_release="$(git describe --long --match='release/*' $commit 2>/dev/null)"
if [ -n "$previous_release" ] && [ -n "$previous_pkg" ]
then
distance_from_pkg=$(get_distance_from_tag "$previous_pkg" "$commit")
distance_from_release=$(get_distance_from_tag "$previous_release" "$commit")
if [ $distance_from_release -le $distance_from_pkg ]
then
distance=$distance_from_release
version="$(get_upstream_version_from_tag $previous_release)-1"
else
distance=$distance_from_pkg
version="$(get_upstream_version_from_tag $previous_pkg)-$(expr $(get_package_version_from_tag $previous_pkg) + 1)"
fi
elif [ -n "$previous_release" ]
then
distance_from_release=$(get_distance_from_tag "$previous_release" "$commit")
distance=$distance_from_release
version="$(get_upstream_version_from_tag $previous_release)-1"
elif [ -n "$previous_pkg" ]
then
distance_from_pkg=$(get_distance_from_tag "$previous_pkg" "$commit")
distance=$distance_from_pkg
version="$(get_upstream_version_from_tag $previous_pkg)-$(expr $(get_package_version_from_tag $previous_pkg) + 1)"
else
distance=$(git rev-list --no-merges --count ${commit}..${first_commit})
version="0.0.0-1"
fi
if [ "$package_level" = 'dev' ] || [ "$package_level" = 'staging' ]
then
version="${version}~${package_level}+${distance}"
fi
echo $version
set +x
}
function gen_changelog_entry {
ceiling_commit=$1
floor_commit="$(next_step "${ceiling_commit}")"
if [ "$(get_hash ${ceiling_commit})" = "$(get_hash ${floor_commit})" ]
then
return 1
fi
if on_pkg_tag $ceiling_commit
then
ceiling_commit="$(get_previous_pkg_tag $ceiling_commit)"
version="$(get_upstream_version_from_tag $ceiling_commit)-$(get_package_version_from_tag $ceiling_commit)"
distribution="$(get_distribution_from_tag $ceiling_commit)"
else
tamarin_info "current commit $ceiling_commit"
version=$(next_version $ceiling_commit)
distribution="UNRELEASED"
fi
#current_release="$(git describe --abbrev=0 --always --match='release/*' $ceiling_commit)"
tamarin_info "Création de lentrée de changelog entre ${ceiling_commit} et ${floor_commit}"
maintainer="$(packager_from_commit ${ceiling_commit})"
package_date="$(date_from_commit ${ceiling_commit})"
version=${version/_/-}
changelog_entry="${project_name} (${version}) ${distribution}; urgency=${urgency}"
echo "$changelog_entry" >> debian/changelog
echo >> debian/changelog
for commit in $(git log --no-merges --format='%H' ${floor_commit}..${ceiling_commit})
do
subject="$(git log -n1 --format=%s ${commit})"
echo " * ${subject}" >> debian/changelog
#ceiling_commit="$(git log -n1 --format='%H' ${commit}^1)"
done
echo >> debian/changelog
changelog_sign=" -- ${maintainer} ${package_date}"
echo "$changelog_sign" >> debian/changelog
echo >> debian/changelog
ceiling_commit=${floor_commit}
}
function gen_changelog() {
limit=10
while gen_changelog_entry $ceiling_commit
do
limit=`expr $limit - 1`
echo $changelog_entry
if [ "$limit" -le 0 ]
then
break
fi
done
}
gen_changelog
cp debian/changelog /dist/changelog

View File

@ -1,6 +1,6 @@
import sys, os, argparse, tempfile import sys, os, argparse, tempfile
sys.path.append(os.path.dirname(__file__) + '/lib') sys.path.append(os.path.dirname(__file__) + '/lib')
import tamarin import tamarin, system, rkt
def get_args_parser(): def get_args_parser():
parser = argparse.ArgumentParser(description="Tamarin's container entrypoint") parser = argparse.ArgumentParser(description="Tamarin's container entrypoint")
@ -12,6 +12,7 @@ def get_args_parser():
def get_buildtools_dir(): def get_buildtools_dir():
return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/buildtools") return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/buildtools")
if __name__ == '__main__': if __name__ == '__main__':
parser = get_args_parser() parser = get_args_parser()

View File

@ -21,7 +21,7 @@ function tamarin_load_project_db {
function tamarin_db_get { function tamarin_db_get {
local opt_name=${KEY_PREFIX}${1} local opt_name=${KEY_PREFIX}${1}
local default_value=${@:2} local default_value=${2}
touch "${GLOBAL_DB_FILE}" touch "${GLOBAL_DB_FILE}"
source "${GLOBAL_DB_FILE}" source "${GLOBAL_DB_FILE}"
echo ${!opt_name:-${default_value}} echo ${!opt_name:-${default_value}}
@ -29,7 +29,7 @@ function tamarin_db_get {
function tamarin_db_set { function tamarin_db_set {
local opt_name=${1} local opt_name=${1}
local opt_value=${@:2} local opt_value=${2}
mkdir -p "$(dirname ${GLOBAL_DB_FILE})" mkdir -p "$(dirname ${GLOBAL_DB_FILE})"
touch "${GLOBAL_DB_FILE}" touch "${GLOBAL_DB_FILE}"
sed -i "s/^${KEY_PREFIX}${opt_name}*$//" "${GLOBAL_DB_FILE}" sed -i "s/^${KEY_PREFIX}${opt_name}*$//" "${GLOBAL_DB_FILE}"

38
lib/rkt.py Normal file
View File

@ -0,0 +1,38 @@
import system, subprocess, os, tamarin, json, re
def run(args, as_root = False, capture_output=False, debug=False):
"""Run rkt with the specified args (use the local copy if rkt is not found in the $PATH)"""
rkt_bin = system.which('rkt', tamarin.get_workspace_subdir('rkt'))
cmd = ( ["sudo", "-E", rkt_bin] if os.geteuid() != 0 and as_root == True else [rkt_bin] ) + args
if debug:
print(" ".join(cmd))
if capture_output:
return subprocess.check_output(cmd, stdin=subprocess.PIPE)
else:
return subprocess.check_call(cmd, stdin=subprocess.PIPE)
def get_images_list(rkt_flags = [], debug=False):
output = run([
"image",
"list",
"--format=json"
] + rkt_flags, capture_output=True, debug=debug)
# Fetch the list of installed images
return json.loads(output.decode('utf-8'))
def find_image_by_name(name_pattern, rkt_flags = []):
if type(name_pattern) is str:
name_pattern = re.compile(name_pattern)
images_list = get_images_list(rkt_flags = rkt_flags)
for image in images_list:
if name_pattern.search(image['name']):
return image
return None
def export_image(image_id, dest_file, rkt_flags = [], debug=False):
run([
"image",
"export",
image_id,
dest_file,
] + rkt_flags, debug=debug)

29
lib/system.py Normal file
View File

@ -0,0 +1,29 @@
import tarfile, os
def extract_tar(file_path, dest_dir = ".", debug=False):
if debug:
print('Extracting "{:s}" to "{:s}"'.format(file_path, dest_dir))
with tarfile.open(file_path) as tar:
tar.extractall(dest_dir)
tar.close()
def which(program, additional_paths = None):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
paths = os.environ["PATH"].split(os.pathsep);
if additional_paths != None:
paths.append(additional_paths)
for path in paths:
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None

View File

@ -1,6 +1,8 @@
import os, glob, subprocess, configparser, codecs, sys import os, glob, subprocess, configparser
import web, system
import codecs
def run_profile_hooks(profile, step, **kwargs): def run_profile_hooks(profile, step, cwd=None, env=None, debug=False):
hooks_dir = get_hooks_dir() hooks_dir = get_hooks_dir()
step_hooks = profile[step]["hooks"] step_hooks = profile[step]["hooks"]
if not step_hooks: if not step_hooks:
@ -10,10 +12,7 @@ def run_profile_hooks(profile, step, **kwargs):
if not trimmed_hook_name: if not trimmed_hook_name:
continue continue
hook_path = os.path.join(hooks_dir, trimmed_hook_name) hook_path = os.path.join(hooks_dir, trimmed_hook_name)
run([hook_path], **kwargs) code = subprocess.check_call(hook_path, cwd=cwd, stdin=subprocess.PIPE, env=env)
def get_base_dir():
return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/..")
def get_hooks_dir(): def get_hooks_dir():
return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../hooks") return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../hooks")
@ -21,9 +20,6 @@ def get_hooks_dir():
def get_lib_dir(): def get_lib_dir():
return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../lib") return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../lib")
def get_utils_dir():
return os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../utils")
def load_profile(profile_name, debug=False): def load_profile(profile_name, debug=False):
profile_filename = profile_name+".conf" profile_filename = profile_name+".conf"
for profile_file in get_available_profiles(): for profile_file in get_available_profiles():
@ -55,22 +51,37 @@ def get_workspace_subdir(subdir):
os.makedirs(dir_path, exist_ok=True) os.makedirs(dir_path, exist_ok=True)
return dir_path return dir_path
def run(cmd, captureOutput=False, pty=False, debug=False, **kwargs): def get_acbuild_achive_dest_dir():
"""Execute an arbitrary command on the system""" """Return the first path matching the acbuild archive extraction destination in tamarin workspace"""
if debug: workspace_tmp = get_workspace_subdir('tmp')
print(" ".join(cmd) if isinstance(cmd, list) else cmd) return glob.glob(os.path.join(os.sep, workspace_tmp, 'acbuild-v*'))[0]
stdin=subprocess.PIPE
kwargs['shell'] = False if isinstance(cmd, list) else True
if pty:
kwargs['stdin'] = sys.stdin
if captureOutput:
return subprocess.check_output(cmd, **kwargs)
else:
return subprocess.check_call(cmd, **kwargs)
def run_docker(args, captureOutput=False, **kwargs): def get_rkt_achive_dest_dir():
if isinstance(args, list): """Return the first path matching the rkt archive extraction destination in tamarin workspace"""
cmd = ["docker"] + args workspace_tmp = get_workspace_subdir('tmp')
return glob.glob(os.path.join(os.sep, workspace_tmp, 'rkt-v*'))[0]
def download_rkt(debug=False):
"""Download a local copy of rkt in the tamarin workspace and return the absolute path to the archive"""
url = "https://github.com/coreos/rkt/releases/download/v1.25.0/rkt-v1.25.0.tar.gz"
file_path=os.path.join(os.sep, get_workspace_subdir('tmp'), "rkt.tar.gz")
web.download_file(file_url=url, dest_path=file_path)
return file_path
def download_acbuild(debug=False):
"""Download a local copy of acbuild in the tamarin workspace and return the absolute path to the archive"""
url = "https://github.com/containers/build/releases/download/v0.4.0/acbuild-v0.4.0.tar.gz"
file_path=os.path.join(os.sep, get_workspace_subdir('tmp'), "acbuild.tar.gz")
web.download_file(file_url=url, dest_path=file_path)
return file_path
def run_acbuild(args, captureOutput=False, as_root=False, debug=False):
"""Run acbuild with the specified args (use the local copy if acbuild is not found in the $PATH)"""
acbuild_bin = system.which('acbuild', get_workspace_subdir('acbuild'))
cmd = ( ["sudo", "-E", acbuild_bin] if os.geteuid() != 0 and as_root == True else [acbuild_bin] ) + args
if debug:
print(" ".join(cmd))
if captureOutput:
return subprocess.check_output(cmd, stdin=subprocess.PIPE)
else: else:
cmd = "docker " + args return subprocess.check_call(cmd, stdin=subprocess.PIPE)
return run(cmd, captureOutput=captureOutput, **kwargs)

33
lib/web.py Normal file
View File

@ -0,0 +1,33 @@
from urllib import request
import math, sys
def print_progress_bar(percent_progress=0, char_size=50, clear_line=True):
bar_progress = math.floor(char_size*(percent_progress/100))
bar = "=" * bar_progress + " " * (char_size - bar_progress)
if clear_line:
sys.stdout.write(u"\u001b[1000D")
sys.stdout.write("[{:s}] {:d}%".format(bar, int(percent_progress)))
sys.stdout.flush()
def download_file(file_url, dest_path, bulk_size = 8192):
req = request.urlopen(file_url)
meta = req.info()
file_size = int(meta.get('Content-Length'))
with open(dest_path, 'wb') as dest_file:
print('Downloading "{:s}". Size: {:d}b'.format(file_url, file_size))
downloaded_size = 0
while True:
buff = req.read(bulk_size)
if not buff:
break
downloaded_size += len(buff)
dest_file.write(buff)
progress = downloaded_size/file_size*100
print_progress_bar(progress)
dest_file.close()
# Add linebreak
print("")

179
package
View File

@ -1,10 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import argparse, sys, shutil, os, subprocess import argparse, sys, shutil, os, hashlib
sys.path.append(os.path.dirname(__file__) + '/lib') sys.path.append(os.path.dirname(__file__) + '/lib')
import tamarin import tamarin, system, rkt
def create_args_parser(): def create_args_parser():
'''Return a new configured ArgumentParser''' '''Return a new configured ArgumentParser'''
@ -20,42 +20,80 @@ def create_args_parser():
parser.add_argument("-b", "--base", help="Use the specified image instead of the profile's one", default='') parser.add_argument("-b", "--base", help="Use the specified image instead of the profile's one", default='')
parser.add_argument("--rebuild", help="Ignore cache and rebuild container's image", action="store_true", default=False) parser.add_argument("--rebuild", help="Ignore cache and rebuild container's image", action="store_true", default=False)
parser.add_argument("--debug", help="Will add extra output and start the container in interactive mode", action="store_true", default=False) parser.add_argument("--debug", help="Will add extra output and start the container in interactive mode", action="store_true", default=False)
parser.add_argument("--cleanup", help="Clear the workspace and remove obsolete Docker images before build", action="store_true", default=False)
parser.add_argument("--override-docker-args", help="Override all 'docker run' arguments. Use '[IMAGE_TAG]', '[PROFILE]' and '[ARCH]' to insert the corresponding values into your command.", default="")
parser.add_argument("--prepare-only", help="Only prepare build environment for the given profile", action="store_true", default=False)
parser.add_argument("--no-lib-mounts", help="Disable Tamarin library volumes mount", action="store_true", default=False)
return parser return parser
def build_image(build_workspace, base_image, profile_name, profile, debug=False, rebuild=False): def download_and_extract_rkt(dest_dir, debug=False):
shutil.copytree(tamarin.get_base_dir(), os.path.join(build_workspace, '.tamarin')) '''Download and extract rkt to the given destination directory'''
rkt_archive_path = tamarin.download_rkt(debug=debug)
system.extract_tar(rkt_archive_path, workspace_tmp, debug=debug)
rkt_archive_dir = tamarin.get_rkt_achive_dest_dir()
shutil.rmtree(local_rkt_dir, ignore_errors=True)
os.rename(rkt_archive_dir, dest_dir)
with open("{:s}/Dockerfile".format(build_workspace), 'w') as dockerfile: def download_and_extract_acbuild(dest_dir, debug=False):
dockerfile.write("FROM {:s}\n".format(base_image)) '''Download and extract acbuild to the given destination directory'''
dockerfile.write("COPY .tamarin /tamarin\n") acbuild_archive_path = tamarin.download_acbuild(debug=debug)
system.extract_tar(acbuild_archive_path, workspace_tmp, debug=debug)
acbuild_archive_dir = tamarin.get_acbuild_achive_dest_dir()
shutil.rmtree(local_acbuild_dir, ignore_errors=True)
os.rename(acbuild_archive_dir, dest_dir)
def get_cached_image_path(profile, debug=False):
'''Compute and return the path for an hypothetic cached image for the given profile'''
containerbuild_hooks = profile['containerbuild']['hooks']
hasher = hashlib.sha1()
hasher.update(base_image.encode())
hasher.update(containerbuild_hooks.encode())
image_hash = hasher.hexdigest()
cache_dir = tamarin.get_workspace_subdir('cache')
return os.path.join(os.sep, cache_dir, '{:s}.aci'.format(image_hash[:12]));
def build_image(build_workspace, aci_file, base_image, profile, debug=False):
acbuild_flags = ["--work-path", build_workspace]
# Find and export base image from rkt' store
name_pattern = base_image.split('/')[-1] + '$'
image = rkt.find_image_by_name(name_pattern, rkt_flags=rkt_flags)
rkt.export_image(image['id'], aci_file, rkt_flags=rkt_flags, debug=debug);
# Build image
tamarin.run_acbuild(acbuild_flags+["begin", aci_file], debug=debug)
tamarin.run_acbuild(acbuild_flags+["set-name", "image_{:d}".format(pid)], debug=debug)
tamarin.run_acbuild(acbuild_flags+["mount", "add", "src", "/src", "--read-only"], debug=debug)
tamarin.run_acbuild(acbuild_flags+["mount", "add", "dist", "/dist"], debug=debug)
tamarin.run_acbuild(acbuild_flags+["mount", "add", "tamarin-hooks", "/tamarin/hooks", "--read-only"], debug=debug)
tamarin.run_acbuild(acbuild_flags+["mount", "add", "tamarin-lib", "/tamarin/lib", "--read-only"], debug=debug)
tamarin.run_acbuild(acbuild_flags+["mount", "add", "tamarin-profiles", "/tamarin/profiles", "--read-only"], debug=debug)
# Configure "containerbuild" hooks environment # Configure "containerbuild" hooks environment
hooks_env = os.environ.copy() hooks_env = os.environ.copy()
hooks_env["PATH"] = os.environ['PATH'] + ':' + tamarin.get_lib_dir() hooks_env["PATH"] = os.environ['PATH'] + ':' + tamarin.get_workspace_subdir('acbuild')
hooks_env["TAMARIN_ACBUILD"] = " ".join([system.which('acbuild', local_acbuild_dir)]+acbuild_flags)
hooks_env["TAMARIN_ACBUILD_ENGINE"] = "chroot" if not system.which('systemctl') else "systemd-nspawn"
# Run hooks # Run hooks
tamarin.run_profile_hooks(profile, 'containerbuild', cwd=build_workspace, env=hooks_env, debug=debug) tamarin.run_profile_hooks(profile, 'containerbuild', cwd=build_workspace, env=hooks_env, debug=debug)
image_tag = "tamarin:{:s}_{:s}_{:d}".format(profile_name, base_image.replace(':', '_').replace('/', '_'), os.getpid()) tamarin.run_acbuild(acbuild_flags+["write", "--overwrite", aci_file], as_root=True, debug=debug)
tamarin.run_acbuild(acbuild_flags+["end"], as_root=True, debug=debug)
build_args = [ "build", "-t", image_tag ] return aci_file
if rebuild: def cleanup(build_workspace, rkt_flags, debug=False):
build_args += [ "--no-cache" ]
tamarin.run_docker(build_args + [build_workspace], debug=debug) # Nettoyage des conteneurs
rkt.run([
"gc",
"--grace-period=0"
] + rkt_flags, as_root=True, debug=debug)
return image_tag # Nettoyage des images obsolètes du store
rkt.run([
def cleanup(build_workspace=None, debug=False): "image",
"gc"
if build_workspace == None: ] + rkt_flags, as_root=True, debug=debug)
build_workspace = tamarin.get_workspace_subdir('tmp')
# Suppression de l'espace de travail de build # Suppression de l'espace de travail de build
shutil.rmtree(build_workspace, ignore_errors=True) shutil.rmtree(build_workspace, ignore_errors=True)
@ -68,8 +106,7 @@ if __name__ == "__main__":
parser = create_args_parser() parser = create_args_parser()
args = parser.parse_args() args = parser.parse_args()
if args.cleanup: validate_args(args)
cleanup(debug=args.debug)
# Verify project directory # Verify project directory
project_dir = os.path.abspath(args.project_directory) project_dir = os.path.abspath(args.project_directory)
@ -79,61 +116,69 @@ if __name__ == "__main__":
profile = tamarin.load_profile(args.profile, debug=args.debug) profile = tamarin.load_profile(args.profile, debug=args.debug)
workspace = tamarin.get_workspace_dir() workspace = tamarin.get_workspace_dir()
workspace_tmp = tamarin.get_workspace_subdir('tmp')
local_rkt_dir = tamarin.get_workspace_subdir('rkt')
if not system.which('rkt', local_rkt_dir):
download_and_extract_rkt(local_rkt_dir)
local_acbuild_dir = tamarin.get_workspace_subdir('acbuild')
if not system.which('acbuild', local_acbuild_dir):
download_and_extract_acbuild(local_acbuild_dir)
pid = os.getpid() pid = os.getpid()
build_workspace = tamarin.get_workspace_subdir('tmp/build_{:d}'.format(pid)) build_workspace = tamarin.get_workspace_subdir('tmp/build_{:d}'.format(pid))
shutil.copytree(tamarin.get_utils_dir(), os.path.join(build_workspace, 'utils')) rkt_store = tamarin.get_workspace_subdir('store')
rkt_flags = ["--dir={:s}".format(rkt_store)]
base_image = args.base if args.base != '' else profile['profile']['default_image'] base_image = args.base if args.base != '' else profile['profile']['default_image']
image_tag = build_image(build_workspace, base_image, args.profile, profile, debug=args.debug, rebuild=args.rebuild) # If the base image is Docker-based, download it
if base_image.startswith('docker://'):
rkt.run([
"fetch",
"--insecure-options=image",
base_image
] + rkt_flags, debug=args.debug)
if args.prepare_only: aci_file = os.path.join(os.sep, build_workspace, 'image.aci')
exit() cached_image_file = get_cached_image_path(profile, debug=args.debug)
kwargs = dict() if not args.rebuild and os.path.exists(cached_image_file):
kwargs['debug'] = args.debug # Copy cached image
shutil.copyfile(cached_image_file, aci_file)
docker_args = []
# Append custom arguments
if args.override_docker_args != "":
docker_args = args.override_docker_args.replace('[IMAGE_TAG]', image_tag)
docker_args = docker_args.replace('[PROFILE]', args.profile)
docker_args = docker_args.replace('[ARCH]', args.architecture)
else: else:
build_image(build_workspace, aci_file, base_image, profile, debug=args.debug)
# Cache image
shutil.copyfile(aci_file, cached_image_file)
docker_args += [ "run", "--rm" ] # rkt run arguments
rkt_args = [
"run",
"--insecure-options=image",
aci_file, "--net=host",
"--volume=src,kind=host,source={:s}".format(project_dir),
"--volume=dist,kind=host,source={:s}".format(output_dir),
"--volume=tamarin-hooks,kind=host,source={:s}".format(tamarin.get_hooks_dir()),
"--volume=tamarin-lib,kind=host,source={:s}".format(tamarin.get_lib_dir()),
"--volume=tamarin-profiles,kind=host,source={:s}".format(tamarin.get_profiles_dir())
]
# volumes definition # Use environment proxy if defined
docker_args += [ for proxy_var in ['HTTP_PROXY', 'HTTPS_PROXY', 'http_proxy', 'https_proxy']:
"-v", "{:s}:/src:ro".format(project_dir), if proxy_var in os.environ:
"-v", "{:s}:/dist".format(output_dir), rkt_args += ["--set-env={:s}={:s}".format(proxy_var, os.environ[proxy_var])]
]
if not args.no_lib_mounts: if args.debug:
docker_args += [ rkt_args += ["--interactive", "--exec", "/bin/bash"]
"-v", "{:s}:/tamarin/hooks:ro".format(tamarin.get_hooks_dir()), helper_cmd = " ".join(["/usr/bin/python3", "/tamarin/lib/build.py", args.profile, args.architecture])
"-v", "{:s}:/tamarin/lib:ro".format(tamarin.get_lib_dir()), print("Executer '{:s}' pour lancer la construction du paquet.".format(helper_cmd))
"-v", "{:s}:/tamarin/profiles:ro".format(tamarin.get_profiles_dir()), else:
"-v", "{:s}:/tamarin/utils:ro".format(tamarin.get_utils_dir()) rkt_args += ["--exec", "/usr/bin/python3", "--", "/tamarin/lib/build.py", args.profile, args.architecture]
]
# Use environment proxy if defined
for proxy_var in ['HTTP_PROXY', 'HTTPS_PROXY', 'http_proxy', 'https_proxy']:
if proxy_var in os.environ:
docker_args += ["-e", "{:s}={:s}".format(proxy_var, os.environ[proxy_var])]
if args.debug:
kwargs['pty'] = True
docker_args += ["-it", image_tag, "/bin/sh"]
helper_cmd = " ".join(["/usr/bin/python3", "/tamarin/lib/build.py", args.profile, args.architecture])
print("Executer '{:s}' pour lancer la construction du paquet.".format(helper_cmd))
else:
docker_args += [image_tag, "/usr/bin/python3", "/tamarin/lib/build.py", args.profile, args.architecture]
# Start container # Start container
tamarin.run_docker(docker_args, **kwargs) rkt.run(rkt_flags+rkt_args, as_root=True, debug=args.debug)
cleanup(build_workspace, debug=args.debug) # Cleanup
cleanup(build_workspace, rkt_flags, debug=args.debug)

View File

@ -1,21 +1,17 @@
# Configuration générale du profil # Configuration générale du profil
[profile] [profile]
# Image Docker par défaut # Image Docker par défaut
default_image=reg.cadoles.com/proxy_cache/library/debian:bookworm default_image=docker://debian:jessie
# Configuration de l'étape de pré-construction du conteneur # Configuration de l'étape de pré-construction du conteneur
[containerbuild] [containerbuild]
hooks= hooks=containerbuild/debian/install-build-essential,
containerbuild/debian/install-build-essential, containerbuild/debian/install-git
containerbuild/debian/install-git,
containerbuild/debian/install-letsencrypt-ca
# Configuration de l'étape de pré-construction du paquet # Configuration de l'étape de pré-construction du paquet
[prebuild] [prebuild]
hooks= hooks=prebuild/debian/load-project-db,
prebuild/debian/copy-sources-to-workspace, prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db, prebuild/debian/complete-project-db,
prebuild/debian/create-changelog, prebuild/debian/create-changelog,
prebuild/debian/create-dummy-changelog, prebuild/debian/create-dummy-changelog,
@ -28,6 +24,4 @@ hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet # Configuration de l'étape de post-construction du paquet
[postbuild] [postbuild]
hooks= hooks=postbuild/debian/export-dist-postbuild
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,30 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:bionic
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,30 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:bionic
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,30 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:bionic
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,30 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:focal
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,30 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:focal
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,31 +0,0 @@
# Configuration générale du profil
[profile]
# Image Docker par défaut
default_image=ubuntu:jammy
# Configuration de l'étape de pré-construction du conteneur
[containerbuild]
hooks=
containerbuild/debian/install-build-essential,
containerbuild/debian/install-git,
containerbuild/eole-2.9.0/configure-additional-repository,
# Configuration de l'étape de pré-construction du paquet
[prebuild]
hooks=
prebuild/debian/copy-sources-to-workspace,
prebuild/debian/run-project-hooks,
prebuild/debian/load-project-db,
prebuild/debian/complete-project-db,
prebuild/eole/create-changelog,
prebuild/debian/install-build-depends
# Configuration de l'étape de construction du paquet
[build]
hooks=build/debian/build
# Configuration de l'étape de post-construction du paquet
[postbuild]
hooks=
postbuild/debian/run-project-hooks,
postbuild/debian/export-dist

View File

@ -1,2 +0,0 @@
#!/bin/sh
exec ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null "$@"