From d76e3d0aa85f1e76f059c2be01f2b2bf0501013d Mon Sep 17 00:00:00 2001 From: Valentin Matton Date: Mon, 29 Apr 2024 12:13:03 +0200 Subject: [PATCH] chore(pipeline): clean up scalingo related files --- pipeline/Procfile | 3 -- pipeline/entrypoint.sh | 65 --------------------------------------- pipeline/requirements.txt | 1 - pipeline/runtime.txt | 1 - 4 files changed, 70 deletions(-) delete mode 100644 pipeline/Procfile delete mode 100755 pipeline/entrypoint.sh delete mode 100644 pipeline/requirements.txt delete mode 100644 pipeline/runtime.txt diff --git a/pipeline/Procfile b/pipeline/Procfile deleted file mode 100644 index 54b98555..00000000 --- a/pipeline/Procfile +++ /dev/null @@ -1,3 +0,0 @@ -web: ./entrypoint.sh webserver - -scheduler: ./entrypoint.sh scheduler diff --git a/pipeline/entrypoint.sh b/pipeline/entrypoint.sh deleted file mode 100755 index 20b76aa7..00000000 --- a/pipeline/entrypoint.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash - -# Exit immediately if a command exits with a non-zero status. -set -e - -# Trace execution -[[ "${DEBUG}" ]] && set -x - -if [[ $# -eq 0 ]]; then - echo "No service parameter provided."; - exit 1; -fi - -COMMAND=$1 - -# The `DATABASE_URL` env var is automatically set by Scalingo and uses the depreciated -# scheme `postgres://`. Therefore it is replaced. -export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}" - -export AIRFLOW_HOME="${HOME}/airflow" -export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False -export AIRFLOW__CORE__LOAD_EXAMPLES=False -export AIRFLOW__CORE__EXECUTOR=LocalExecutor -export AIRFLOW__CORE__DEFAULT_TIMEZONE=Europe/Paris -export AIRFLOW__CORE__FERNET_KEY="${SECRET_KEY}" -export AIRFLOW__CORE__DAGS_FOLDER="${HOME}/dags" - -export AIRFLOW__LOGGING__REMOTE_LOGGING=True -export AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER=s3://data-inclusion-lake/logs -export AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID=s3_logs -export AIRFLOW__LOGGING__DELETE_LOCAL_LOGS=True - -if [[ "${_AIRFLOW_DB_MIGRATE}" = "true" ]]; then - airflow db migrate -fi - -if [[ "${COMMAND}" = "webserver" ]]; then - airflow webserver --port "${PORT}" -fi - -if [[ "${COMMAND}" = "scheduler" ]]; then - # Create additional virtualenvs for isolated task executions - VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/python/venv" - python -m venv "${VIRTUAL_ENV}" - "${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel - "${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/python/requirements.txt - "${VIRTUAL_ENV}/bin/python" -m pip install . - - # Create additional virtualenvs for isolated task executions - VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/pipx/venv" - python -m venv "${VIRTUAL_ENV}" - "${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel - "${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/pipx/requirements.txt - - # Create additional virtualenvs for isolated task executions - VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/dbt/venv" - python -m venv "${VIRTUAL_ENV}" - "${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel - "${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/dbt/requirements.txt - - # Install dbt packages (not python packages) - "${VIRTUAL_ENV}/bin/dbt" deps --project-dir "${AIRFLOW_VAR_DBT_PROJECT_DIR}" - - airflow scheduler -fi diff --git a/pipeline/requirements.txt b/pipeline/requirements.txt deleted file mode 100644 index e39d31f8..00000000 --- a/pipeline/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --r requirements/airflow/requirements.txt diff --git a/pipeline/runtime.txt b/pipeline/runtime.txt deleted file mode 100644 index 76b6e496..00000000 --- a/pipeline/runtime.txt +++ /dev/null @@ -1 +0,0 @@ -python-3.11.6