feat(cleanup): cleaning up the project with somme standards

This commit is contained in:
Philippe Caseiro 2023-05-04 17:06:19 +02:00
parent 53ece61384
commit d84b111049
13 changed files with 151 additions and 0 deletions

View File

@ -13,6 +13,11 @@ secretGenerator:
literals:
- webserver-secret-key=c94b62cffbf4dd1c42747fc65007054432f10c185c5e6160
configMapGenerator:
- name: 'airflow-connections'
literals:
- AIRFLOW_CONN_TEST="test://test.do.not.use"
helmCharts:
- name: airflow
repo: https://airflow.apache.org
@ -31,6 +36,7 @@ helmCharts:
value: "$(AIRFLOW_DATABASE_SERVICE_NAME)-rw"
- name: "DB_SERVICE_PORT"
value: "5432"
webserver:
defaultUser:
username: admin
@ -40,3 +46,17 @@ helmCharts:
dags:
gitSync:
enabled: false
extraEnvFrom: |
- configMapRef:
name: '{{ .Release.Name }}-connections'
scheduler:
extraInitContainers:
- name: airflow-create-connections
image: reg.cadoles.com/cadoles/airflow:latest
args:
- bash
- -c
- |-
exec \
./scripts/create-connections.sh
- --

3
base/secrets/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
*
!.gitignore
!.gitkeep

0
base/secrets/.gitkeep Normal file
View File

View File

@ -0,0 +1,9 @@
FROM apache/airflow:2.5.3-python3.10
USER root
COPY --chown=airflow:root ./dags/ ${AIRFLOW_HOME}/dags/
COPY --chown=airflow:root ./scripts/ ${AIRFLOW_HOME}/scripts/
RUN chmod +x ./scripts/*
USER airflow

View File

@ -0,0 +1,42 @@
import json
from airflow.utils.dates import days_ago
from airflow import DAG
from airflow.providers.http.operators.http import SimpleHttpOperator
# Sensors
from airflow.providers.http.sensors.http import HttpSensor
from airflow.models.param import Param
default_dag_args = {
'start_date': days_ago(2)
}
with DAG(
dag_id='mse_cmd_over_http',
default_args=default_dag_args,
schedule=None,
params={
"cmdName": Param("", type="string"),
"format": Param("", type="string"),
"env": Param("prod", type="string"),
}
) as dag:
is_api_available = HttpSensor(
task_id='is_api_available',
http_conn_id='mse_api',
endpoint='/api/v1/cmds'
)
task = SimpleHttpOperator(
task_id='mse_cmd',
method="GET",
http_conn_id='mse_api',
endpoint='/api/v1/cmds',
data={
"cmdName": "{{ dag_run.conf.get('cmdName') }}",
"format": "{{ dag_run.conf.get('format') }}",
"env": "{{ dag_run.conf.get('env') }}"
},
headers={"Content-Type": "application/json"},
dag=dag
)
is_api_available >> task

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Simple script to provision AIRFLOW_CONNECTIONS !
export SQLALCHEMY_SILENCE_UBER_WARNING=1
conns=$(compgen -v -X '!*AIRFLOW_CONN_*')
for conn in ${conns}
do
echo "====================================="
name="${conn#"AIRFLOW_CONN_"}"
value=$(eval "echo -e ${!conn}")
echo "Creating ${name}: ${value}"
ex=$(airflow connections add "${name}" --conn-uri ${value} 2>&1)
if [ "${?}" -ne 0 ]; then
echo "${conn}: Bad connection definition"
echo "= Error =========================="
echo "${ex}"
echo "= End error======================="
else
echo "= Ok ================================"
fi
done

View File

@ -0,0 +1,9 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: airflow-dev
resources:
- ../../base
- resources/namespace.yaml
namePrefix: dev-

View File

@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: airflow-dev

40
skaffold.yaml Normal file
View File

@ -0,0 +1,40 @@
apiVersion: skaffold/v3
kind: Config
metadata:
name: mse
manifests:
kustomize:
paths:
- base
profiles:
- name: dev
manifests:
kustomize:
buildArgs:
- "--enable-helm"
paths:
- overlays/dev
activation:
- command: dev
build:
cluster:
dockerConfig:
path: base/secrets/dockerconfig/.dockerconfigjson
randomDockerConfigSecret: true
randomPullSecret: true
tagPolicy:
sha256: {}
artifacts:
- image: reg.cadoles.com/cadoles/airflow
context: images/airflow
kaniko:
dockerfile: Dockerfile
cache: {}
deploy:
statusCheckDeadlineSeconds: 600