Skip to content

Commit

Permalink
fix(scalingo): remove multiline commands
Browse files Browse the repository at this point in the history
  • Loading branch information
vmttn committed Oct 23, 2023
1 parent 596797f commit 89b4795
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 42 deletions.
24 changes: 5 additions & 19 deletions pipeline/Procfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,5 @@
# The `DATABASE_URL` env var is automatically set by Scalingo and uses the depreciated
# scheme `postgres://`. Therefore it is replaced.
web: env \
AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.basic_auth \
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}" \
airflow webserver --port "${PORT}"
postdeploy: env \
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}" \
airflow db migrate
scheduler: env \
AIRFLOW_HOME=./airflow \
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}" \
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False \
AIRFLOW__CORE__LOAD_EXAMPLES=False \
AIRFLOW__CORE__EXECUTOR=LocalExecutor \
AIRFLOW__CORE__DEFAULT_TIMEZONE=Europe/Paris \
AIRFLOW__CORE__FERNET_KEY="${SECRET_KEY}" \
AIRFLOW__CORE__DAGS_FOLDER=./dags \
./entrypoint.sh
web: ./entrypoint.sh webserver

postdeploy: ./entrypoint.sh migrate

scheduler: ./entrypoint.sh scheduler
78 changes: 55 additions & 23 deletions pipeline/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,58 @@ set -e
# Trace execution
[[ "${DEBUG}" ]] && set -x

# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/python/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/python/requirements.txt
"${VIRTUAL_ENV}/bin/python" -m pip install .

# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/pipx/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/pipx/requirements.txt

# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/dbt/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/dbt/requirements.txt

# Install dbt packages (not python packages)
"${VIRTUAL_ENV}/bin/dbt" deps --project-dir "${AIRFLOW_VAR_DBT_PROJECT_DIR}"

airflow scheduler
if [[ $# -eq 0 ]]; then
echo "No service parameter provided.";
exit 1;
fi

COMMAND=$1

# The `DATABASE_URL` env var is automatically set by Scalingo and uses the depreciated
# scheme `postgres://`. Therefore it is replaced.
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}"

if [[ "${COMMAND}" = "webserver" ]]; then
export AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.basic_auth

airflow webserver --port "${PORT}"
fi

if [[ "${COMMAND}" = "migrate" ]]; then
airflow db migrate
fi

if [[ "${COMMAND}" = "scheduler" ]]; then
# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/python/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/python/requirements.txt
"${VIRTUAL_ENV}/bin/python" -m pip install .

# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/pipx/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/pipx/requirements.txt

# Create additional virtualenvs for isolated task executions
VIRTUAL_ENV="${AIRFLOW_HOME}/venvs/dbt/venv"
python -m venv "${VIRTUAL_ENV}"
"${VIRTUAL_ENV}/bin/python" -m pip install -U pip setuptools wheel
"${VIRTUAL_ENV}/bin/python" -m pip install -r requirements/tasks/dbt/requirements.txt

# Install dbt packages (not python packages)
"${VIRTUAL_ENV}/bin/dbt" deps --project-dir "${AIRFLOW_VAR_DBT_PROJECT_DIR}"

export AIRFLOW_HOME=./airflow
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN="${DATABASE_URL/postgres\:\/\//postgresql\:\/\/}"
export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False
export AIRFLOW__CORE__LOAD_EXAMPLES=False
export AIRFLOW__CORE__EXECUTOR=LocalExecutor
export AIRFLOW__CORE__DEFAULT_TIMEZONE=Europe/Paris
export AIRFLOW__CORE__FERNET_KEY="${SECRET_KEY}"
export AIRFLOW__CORE__DAGS_FOLDER=./dags

airflow scheduler
fi

0 comments on commit 89b4795

Please sign in to comment.