Skip to content

feat(deployment): provision scaleway with tf #21

feat(deployment): provision scaleway with tf

feat(deployment): provision scaleway with tf #21

Workflow file for this run

name: deployment
on:
push:
branches:
- "main"
pull_request:
branches:
- "main"
jobs:
provision:
runs-on: ubuntu-20.04
environment: staging
defaults:
run:
working-directory: deployment
outputs:
encrypted_tf_outputs: ${{ steps.wip.outputs.encrypted_tf_outputs }}
container:
image: hashicorp/terraform:1.4.0
env:
TF_IN_AUTOMATION: true
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# `TF_VAR_*` are case sensitive and must match the case of variables
TF_VAR_datawarehouse_admin_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_ADMIN_PASSWORD }}
TF_VAR_datawarehouse_admin_username: ${{ vars.TF_VAR_DATAWAREHOUSE_ADMIN_USERNAME }}
TF_VAR_datawarehouse_di_database: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_DATABASE }}
TF_VAR_datawarehouse_di_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_DI_PASSWORD }}
TF_VAR_datawarehouse_di_username: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_USERNAME }}
TF_VAR_scaleway_access_key: ${{ secrets.TF_VAR_SCALEWAY_ACCESS_KEY }}
TF_VAR_scaleway_project_id: ${{ vars.TF_VAR_SCALEWAY_PROJECT_ID }}
TF_VAR_scaleway_secret_key: ${{ secrets.TF_VAR_SCALEWAY_SECRET_KEY }}
TF_VAR_environment_name: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
TF_VAR_ssh_public_key: ${{ vars.SSH_PUBLIC_KEY }}
ENV: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
volumes:
- .:/deployment
options: --workdir /deployment
steps:
- uses: actions/checkout@v3
- name: tf init
run: |
terraform -chdir="environments/${ENV}" init \
-backend-config "bucket=data-inclusion-terraform" \
-backend-config "key=stack_data/${ENV}" \
-backend-config "region=fr-par" \
-backend-config "endpoint=https://s3.fr-par.scw.cloud"
- name: tf validate
run: |
terraform -chdir="environments/${ENV}" validate
- name: tf plan
run: |
terraform -chdir="environments/${ENV}" plan
- name: tf apply
run: |
terraform -chdir="environments/${ENV}" apply -auto-approve
- id: tf-output
name: tf output
env:
TMP_ENCRYPTION_PASSWORD: ${{ secrets.TMP_ENCRYPTION_PASSWORD }}
run: |
apk --no-cache add gpg
TF_OUTPUTS=$(terraform -chdir="environments/${ENV}" output -json)
ENCRYPTED_TF_OUTPUTS=$(echo "foo" | gpg --symmetric --cipher-algo AES256 --batch --passphrase "${TMP_ENCRYPTION_PASSWORD}" --no-symkey-cache | base64 -w0)
echo "encrypted_tf_outputs=${ENCRYPTED_TF_OUTPUTS}" >> "${GITHUB_OUTPUT}"
deploy:
runs-on: ubuntu-20.04
environment: staging
needs: provision
defaults:
run:
working-directory: deployment/docker
steps:
- uses: actions/checkout@v3
- id: set-outputs
name: set outputs
env:
ENCRYPTED_TF_OUTPUTS: ${{ needs.provision.outputs.encrypted_tf_outputs }}
TMP_ENCRYPTION_PASSWORD: ${{ secrets.TMP_ENCRYPTION_PASSWORD }}
run: |
TF_OUTPUTS=$(echo ${ENCRYPTED_TF_OUTPUTS} | base64 -d | gpg --batch --decrypt --passphrase "${TMP_ENCRYPTION_PASSWORD}")
python -c "$(cat << EOF
import os
import json
TF_OUTPUTS = json.loads(os.environ.get("TF_OUTPUTS"))
airflow_conn_s3 = TF_OUTPUTS["airflow_conn_s3"]["value"]
airflow_conn_pg = TF_OUTPUTS["airflow_conn_pg"]["value"]
public_ip = TF_OUTPUTS["public_ip"]["value"]
docker_host = f"ssh://root@{public_ip}"
print(f"AIRFLOW_CONN_S3={airflow_conn_s3}")
print(f"AIRFLOW_CONN_PG={airflow_conn_pg}")
print(f"DOCKER_HOST_={docker_host}")
EOF
)" >> "${GITHUB_ENV}"
# - name: start services
# env:
# AIRFLOW_CONN_S3: ${{ env.AIRFLOW_CONN_S3 }}
# AIRFLOW_CONN_PG: ${{ env.AIRFLOW_CONN_PG }}
# BAN_API_URL: ${{ vars.BAN_API_URL }}
# DORA_API_URL: ${{ vars.DORA_API_URL }}
# INSEE_FIRSTNAME_FILE_URL: ${{ vars.INSEE_FIRSTNAME_FILE_URL }}
# INSEE_COG_DATASET_URL: ${{ vars.INSEE_COG_DATASET_URL }}
# SIRENE_STOCK_ETAB_GEOCODE_FILE_URL: ${{ vars.SIRENE_STOCK_ETAB_GEOCODE_FILE_URL }}
# SIRENE_STOCK_ETAB_HIST_FILE_URL: ${{ vars.SIRENE_STOCK_ETAB_HIST_FILE_URL }}
# SIRENE_STOCK_ETAB_LIENS_SUCCESSION_URL: ${{ vars.SIRENE_STOCK_ETAB_LIENS_SUCCESSION_URL }}
# SIRENE_STOCK_UNITE_LEGALE_FILE_URL: ${{ vars.SIRENE_STOCK_UNITE_LEGALE_FILE_URL }}
# AIRFLOW_WWW_USER_PASSWORD: ${{ secrets.AIRFLOW_WWW_USER_PASSWORD }}
# DOCKER_HOST_: ${{ env.DOCKER_HOST_ }}
# run: |
# DOCKER_HOST="${DOCKER_HOST_}" docker compose up -d
destroy:
runs-on: ubuntu-20.04
environment: staging
needs: provision
defaults:
run:
working-directory: deployment
container:
image: hashicorp/terraform:1.4.0
env:
TF_IN_AUTOMATION: true
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# `TF_VAR_*` are case sensitive and must match the case of variables
TF_VAR_datawarehouse_admin_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_ADMIN_PASSWORD }}
TF_VAR_datawarehouse_admin_username: ${{ vars.TF_VAR_DATAWAREHOUSE_ADMIN_USERNAME }}
TF_VAR_datawarehouse_di_database: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_DATABASE }}
TF_VAR_datawarehouse_di_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_DI_PASSWORD }}
TF_VAR_datawarehouse_di_username: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_USERNAME }}
TF_VAR_scaleway_access_key: ${{ secrets.TF_VAR_SCALEWAY_ACCESS_KEY }}
TF_VAR_scaleway_project_id: ${{ vars.TF_VAR_SCALEWAY_PROJECT_ID }}
TF_VAR_scaleway_secret_key: ${{ secrets.TF_VAR_SCALEWAY_SECRET_KEY }}
TF_VAR_environment_name: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
TF_VAR_ssh_public_key: ${{ vars.SSH_PUBLIC_KEY }}
ENV: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
volumes:
- .:/deployment
options: --workdir /deployment
steps:
- uses: actions/checkout@v3
- name: tf init
run: |
terraform -chdir="environments/${ENV}" init \
-backend-config "bucket=data-inclusion-terraform" \
-backend-config "key=stack_data/${ENV}" \
-backend-config "region=fr-par" \
-backend-config "endpoint=https://s3.fr-par.scw.cloud"
- name: tf destroy
run: |
terraform -chdir="environments/${ENV}" destroy -auto-approve