-
Notifications
You must be signed in to change notification settings - Fork 1
142 lines (119 loc) · 5.23 KB
/
deployment.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
name: deployment
on:
push:
branches:
- "main"
pull_request:
branches:
- "main"
jobs:
provision:
runs-on: ubuntu-20.04
environment: staging
defaults:
run:
working-directory: deployment
outputs:
encrypted_tf_outputs: ${{ steps.tf-output.outputs.encrypted_tf_outputs }}
container:
image: hashicorp/terraform:1.4.0
env:
TF_IN_AUTOMATION: true
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# `TF_VAR_*` are case sensitive and must match the case of variables
TF_VAR_datawarehouse_admin_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_ADMIN_PASSWORD }}
TF_VAR_datawarehouse_admin_username: ${{ vars.TF_VAR_DATAWAREHOUSE_ADMIN_USERNAME }}
TF_VAR_datawarehouse_di_database: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_DATABASE }}
TF_VAR_datawarehouse_di_password: ${{ secrets.TF_VAR_DATAWAREHOUSE_DI_PASSWORD }}
TF_VAR_datawarehouse_di_username: ${{ vars.TF_VAR_DATAWAREHOUSE_DI_USERNAME }}
TF_VAR_scaleway_access_key: ${{ secrets.TF_VAR_SCALEWAY_ACCESS_KEY }}
TF_VAR_scaleway_project_id: ${{ vars.TF_VAR_SCALEWAY_PROJECT_ID }}
TF_VAR_scaleway_secret_key: ${{ secrets.TF_VAR_SCALEWAY_SECRET_KEY }}
TF_VAR_environment_name: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
ENV: ${{ vars.TF_VAR_ENVIRONMENT_NAME }}
volumes:
- .:/deployment
options: --workdir /deployment
steps:
- uses: actions/checkout@v3
- name: tf init
run: |
terraform -chdir="environments/${ENV}" init \
-backend-config "bucket=data-inclusion-terraform" \
-backend-config "key=stack_data/${ENV}" \
-backend-config "region=fr-par" \
-backend-config "endpoint=https://s3.fr-par.scw.cloud"
- name: tf validate
run: |
terraform -chdir="environments/${ENV}" validate
- name: tf plan
run: |
terraform -chdir="environments/${ENV}" plan
- name: tf apply
run: |
terraform -chdir="environments/${ENV}" apply -auto-approve
- id: tf-output
name: tf output
env:
TMP_ENCRYPTION_PASSWORD: ${{ secrets.TMP_ENCRYPTION_PASSWORD }}
run: |
apk --no-cache add gpg
TF_OUTPUTS=$(terraform -chdir="environments/${ENV}" output -json)
ENCRYPTED_TF_OUTPUTS=$(echo "${TF_OUTPUTS}" | gpg --symmetric --cipher-algo AES256 --batch --passphrase "${TMP_ENCRYPTION_PASSWORD}" --no-symkey-cache | base64 -w0)
echo "encrypted_tf_outputs=${ENCRYPTED_TF_OUTPUTS}" >> "${GITHUB_OUTPUT}"
deploy:
runs-on: ubuntu-20.04
environment: staging
needs: provision
defaults:
run:
working-directory: deployment/docker
steps:
- uses: actions/checkout@v3
- id: set-outputs
name: set outputs
env:
ENCRYPTED_TF_OUTPUTS: ${{ needs.provision.outputs.encrypted_tf_outputs }}
TMP_ENCRYPTION_PASSWORD: ${{ secrets.TMP_ENCRYPTION_PASSWORD }}
run: |
TF_OUTPUTS=$(echo ${ENCRYPTED_TF_OUTPUTS} | base64 -d | gpg --batch --decrypt --passphrase "${TMP_ENCRYPTION_PASSWORD}")
AIRFLOW_CONN_S3=$(echo "${TF_OUTPUTS}" | jq '.airflow_conn_s3.value')
AIRFLOW_CONN_PG=$(echo "${TF_OUTPUTS}" | jq '.airflow_conn_pg.value')
SERVER_PUBLIC_IP=$(echo "${TF_OUTPUTS}" | jq '.public_ip.value')
echo "::add-mask::${AIRFLOW_CONN_S3}"
echo "::add-mask::${AIRFLOW_CONN_PG}"
echo "airflow_conn_s3=${AIRFLOW_CONN_S3}" >> "${GITHUB_OUTPUT}"
echo "airflow_conn_pg=${AIRFLOW_CONN_PG}" >> "${GITHUB_OUTPUT}"
echo "server_public_ip=${SERVER_PUBLIC_IP}" >> "${GITHUB_OUTPUT}"
- name: set up ssh agent
env:
SERVER_PUBLIC_IP: ${{ steps.set-outputs.outputs.server_public_ip }}
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
run: |
mkdir -p ~/.ssh
echo "${SSH_PRIVATE_KEY}" >> ~/.ssh/key
chmod 600 ~/.ssh/key
cat >> ~/.ssh/config << EOF
Host staging
HostName ${SERVER_PUBLIC_IP}
User root
IdentityFile ~/.ssh/key
StrictHostKeyChecking no
EOF
- name: start services
env:
AIRFLOW_CONN_S3: ${{ steps.set-outputs.outputs.airflow_conn_s3 }}
AIRFLOW_CONN_PG: ${{ steps.set-outputs.outputs.airflow_conn_pg }}
API_SECRET_KEY: ${{ secrets.API_SECRET_KEY }}
BAN_API_URL: ${{ vars.BAN_API_URL }}
DORA_API_URL: ${{ vars.DORA_API_URL }}
INSEE_FIRSTNAME_FILE_URL: ${{ vars.INSEE_FIRSTNAME_FILE_URL }}
INSEE_COG_DATASET_URL: ${{ vars.INSEE_COG_DATASET_URL }}
SIRENE_STOCK_ETAB_GEOCODE_FILE_URL: ${{ vars.SIRENE_STOCK_ETAB_GEOCODE_FILE_URL }}
SIRENE_STOCK_ETAB_HIST_FILE_URL: ${{ vars.SIRENE_STOCK_ETAB_HIST_FILE_URL }}
SIRENE_STOCK_ETAB_LIENS_SUCCESSION_URL: ${{ vars.SIRENE_STOCK_ETAB_LIENS_SUCCESSION_URL }}
SIRENE_STOCK_UNITE_LEGALE_FILE_URL: ${{ vars.SIRENE_STOCK_UNITE_LEGALE_FILE_URL }}
AIRFLOW_WWW_USER_PASSWORD: ${{ secrets.AIRFLOW_WWW_USER_PASSWORD }}
run: |
DOCKER_HOST="ssh://staging" docker compose up -d