This repository has been archived by the owner on Aug 14, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Makefile
335 lines (248 loc) · 13.9 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
help: ## Print the help documentation
@grep -E '^[/a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
LOCALSTACK_SERVICES := s3
LOCALSTACK_DATA_DIR := /tmp/localstack/data
services-start: export SERVICES=$(LOCALSTACK_SERVICES)
services-start: export DATA_DIR=$(LOCALSTACK_DATA_DIR)
services-start: ## Start Django app's supporting services
mkdir -p /tmp/localstack
docker-compose -f docker-compose-services.yml up -d --remove-orphans
services-stop: ## Stop Django app's supporting services
docker-compose -f docker-compose-services.yml down
services-logs: ## Show logs for Django app's supporting services
docker-compose -f docker-compose-services.yml logs -f
services-setup: ## Create dependency files for supporting services
bash scripts/gen-redis-certs.sh
services-clean: ## Clean up dependencies
rm -f certs/redis*
redis-cli: ## Connect to Redis service with redis-cli
redis-cli --tls --cacert certs/redisCA.crt --cert certs/redis-client.crt --key certs/redis-client.key
ifeq (, $(shell which docker))
mysql-cli: ## Connect to the MySQL server
mysql -h rds -u user -psecret -D unemployment
else
mysql-cli: ## Connect to the MySQL server
mysql -h 127.0.0.1 -u user -psecret -D unemployment
endif
mysql-reset: ## Reset the local database
mysql -h 127.0.0.1 -u root -psecretpassword -e "DROP DATABASE unemployment"
mysql -h 127.0.0.1 -u root -psecretpassword -e "CREATE DATABASE IF NOT EXISTS unemployment"
clean-claims: ## Delete all claims and events in the local environment
mysql -h 127.0.0.1 -u user -psecret -D unemployment -e "DELETE from claims"
mysql -h 127.0.0.1 -u user -psecret -D unemployment -e "DELETE from events"
schema: ## Dump the MySQL schema to docs/schema.sql (requires mysqldump command)
mysqldump --no-data --no-tablespaces -h 127.0.0.1 -u user -psecret unemployment > docs/schema.sql
erd: ## Create ERD of the app data models (requires graphviz installed locally with "dot" command)
docker exec -it $(DOCKER_CONTAINER_ID) python manage.py graph_models -E --exclude-models TimeStampedModel --dot -o schema-erd.out core -a
dot -Tpng schema-erd.out -o docs/schema-erd.png
rm schema-erd.out
DOCKER_IMG="dolui:claimants"
DOCKER_NAME="dolui-claimants"
ifeq (, $(shell which docker))
DOCKER_CONTAINER_ID := docker-is-not-installed
else
DOCKER_CONTAINER_ID := $(shell docker ps --filter ancestor=$(DOCKER_IMG) --format "{{.ID}}" -a)
endif
REACT_APP = claimant
CI_ENV_FILE=core/.env-ci
CI_SERVICES=-f docker-compose-services.yml
CI_DOCKER_COMPOSE_OPTS=--env-file=$(CI_ENV_FILE) -f docker-compose-ci.yml
CI_OPTS=$(CI_SERVICES) $(CI_DOCKER_COMPOSE_OPTS)
ci-build: ## Build the docker images for CI
docker-compose $(CI_OPTS) build
ci-start: services-setup ## Start Django app's supporting services (in CI)
docker-compose $(CI_OPTS) up -d --no-recreate
ci-stop: ## Stop Django app's supporting services (in CI)
docker-compose $(CI_OPTS) down
ci-tests: ## Run Django app tests in Docker
docker-compose $(CI_OPTS) logs --tail="all"
docker exec web ./run-ci-tests.sh
ci-test: ci-tests ## Alias for ci-tests
ci-clean: ## Remove all the CI service images (including those in docker-compose-services)
docker-compose $(CI_OPTS) down --rmi all
lint-check: ## Run lint check
pre-commit run --all-files
lint-fix: ## Fix lint-checking issues
black .
cd $(REACT_APP) && make lint-fix
lint: lint-check lint-fix ## Lint the code
dockerlint-run: ## Run redcoolbeans/dockerlint
docker run --rm -v "$(PWD)/Dockerfile":/Dockerfile:ro redcoolbeans/dockerlint:0.3.1
migrate: ## Run Django data model migrations (inside container)
python manage.py migrate
migrations: ## Generate Django migrations from models (inside container)
python manage.py makemigrations
migrations-check: ## Check for Django model changes not reflected in migrations (inside container)
python manage.py makemigrations --check --no-input
# this runs 2 workers named w1 and w2. Each worker will have N child prefork processes,
# by default the number of cores on the machine. See
# http://docs.celeryq.org/en/latest/getting-started/next-steps.html#starting-the-worker
# By default logs are written to /var/log/celery but we tail them via start-server.sh
CELERY_OPTS = w1 -c 2 -A core -l info --verbose
CELERY_LOGDIR = /var/log/celery
# log names are directly tied to OPTS so if you change OPTS, change LOGS
CELERY_LOGS = w1 w1-1 w1-2
celery-touch-logs: ## Make sure all the celery log files exist (inside container)
for logfile in $(CELERY_LOGS); do touch "$(CELERY_LOGDIR)/$$logfile.log"; done
celery-watch-logs: ## Tail all the celery log files (inside container)
tail -F -q $(CELERY_LOGDIR)/*
celery-start: ## Run the celery queue manager (inside container)
celery multi start $(CELERY_OPTS)
celery-restart: ## Restart the celery queue manager (inside container)
celery multi restart $(CELERY_OPTS)
celery-stop: ## Stop the celery queue manager (inside container)
celery multi stopwait $(CELERY_OPTS)
celery-status: ## Display status of celery worker(s) (inside the container)
celery -A core status
dev-deps: ## Install local development environment dependencies
pip install pre-commit black bandit safety jsonschema git+https://github.com/pkarman/jsonref.git@590c416#egg=jsonref
dev-env-files: ## Reset local env files based on .env-example files
cp ./core/.env-example ./core/.env
cp ./claimant/.env-example ./claimant/.env
dev-ld-config: ## Reset ld-config.json based on ld-config-test.json
cp ./core/ld-config-test.json ./core/ld-config.json
container-build: ## Build the Django app container image (local development)
docker build --platform linux/amd64 -f Dockerfile -t $(DOCKER_IMG) --build-arg ENV_NAME=devlocal --target djangobase-devlocal .
acr-login: ## Log into the Azure Container Registry
docker login ddphub.azurecr.io
PYTHON_BASE_IMAGE_NAME=3.9.11-slim-bullseye
container-build-wcms: ## Build the Django app container image (to test image configuration for deployed environment)
docker build -f Dockerfile -t $(DOCKER_IMG) --build-arg ENV_NAME=wcms --build-arg BASE_PYTHON_IMAGE_REGISTRY=ddphub.azurecr.io/dol-official --build-arg BASE_PYTHON_IMAGE_VERSION=$(PYTHON_BASE_IMAGE_NAME) .
container-run: ## Run the Django app in Docker
docker run --network arpaui_app-tier --rm -it -p 8004:8000 $(DOCKER_IMG)
container-run-with-env-file: ## Run the Django app in Docker using core/.env env-file (useful in combination with container-build-wcms)
docker run --network arpaui_app-tier --rm -it -p 8004:8000 --env-file=core/.env $(DOCKER_IMG)
container-stop: ## Stop the Django app container with DOCKER_CONTAINER_ID
docker stop $(DOCKER_CONTAINER_ID)
container-rm: ## Remove the Django app container with DOCKER_CONTAINER_ID
docker rm $(DOCKER_CONTAINER_ID)
container-clean: ## Remove the Django app container image
docker image rm $(DOCKER_IMG)
container-build-clean: ## Build ignoring all Docker layers (--no-cache)
docker build --no-cache -f Dockerfile -t $(DOCKER_IMG) --build-arg ENV_NAME=devlocal --target djangobase-devlocal .
container-setup-react-tests: ## Create local artifacts required for running Cypress tests
docker exec -it $(DOCKER_CONTAINER_ID) ./setup-cypress-tests.sh
container-updates: ## Create list of upgradeable apt packages for the current container
docker run --rm -it python:$(PYTHON_BASE_IMAGE_NAME) bash -c "apt-get update && apt list --upgradeable"
container: container-clean container-build ## Alias for container-clean container-build
SECRET_LENGTH := 32
secret: ## Generate string for SECRET_KEY or REDIS_SECRET_KEY env variable
@python -c "import secrets; import base64; print(base64.urlsafe_b64encode(secrets.token_bytes($(SECRET_LENGTH))).decode('utf-8'))"
x509-certs: ## Generate x509 public/private certs for registrying with Identity Provider
scripts/gen-x509-certs.sh
ec-keys: ## Generate ECDSA public/private key pair
scripts/gen-ec-keys.sh $(PREFIX)
add-swa-key: ## Import a public .pem file into a SWA record. Requires SWA=code and PEM=path/file.pem arguments.
ifeq ($(ROTATE),)
python manage.py import_swa_public_key $(SWA) $(PEM)
else
python manage.py import_swa_public_key $(SWA) $(PEM) --rotate
endif
FEATURESET=1
create-swa: ## Create a SWA model record. Requires SWA=code, URL=url, and NAME=name values.
python manage.py create_swa $(SWA) $(NAME) $(URL) --featureset=$(FEATURESET)
activate-swa: ## Set SWA record status=Active
python manage.py activate_swa $(SWA)
deactivate-swa: ## Set SWA record status=Inactive
python manage.py deactivate_swa $(SWA)
bucket: ## Create S3 bucket in localstack service (run inside container)
python manage.py create_bucket
dol-bucket: ## Create DOL S3 bucket in localstack service (run inside container)
python manage.py create_bucket --dol
rotate-claim-secrets: ## Rotate the symmetrical encryption Claim keys. Requires OLD_KEY=str NEW_KEY=str (run inside container)
python manage.py rotate_claim_keys $(OLD_KEY) $(NEW_KEY)
prepackage-claim: ## Encrypt/store a plaintext .json claim and create its related metadata. Requires SWA, CLAIMANT, IDP, JSON, SCHEMA name vars. (run inside container)
python manage.py prepackage_claim $(SWA) $(CLAIMANT) $(IDP) $(JSON) $(SCHEMA)
# this env var just so that settings.py can determine how it was invoked
build-static: export BUILD_STATIC=true
build-static: ## Build the static assets (intended for during container-build (inside the container))
rm -rf static/
rm -f home/static/*.md
mkdir static
python manage.py collectstatic
cp home/templates/favicon.ico static/
cp home/templates/dol-logo512.png static/logo512.png
cp home/templates/dol-logo192.png static/logo192.png
cp claimant/build/manifest.json static/manifest.json
cd static && ln -s ../schemas schemas
build-translations: ## Compiles .po (translation) files into binary files
python manage.py compilemessages
update-translations: ## Update the .po files (run manually inside the container)
python manage.py makemessages --no-wrap --locale=en --locale=es --ignore=claimant/*
build-cleanup: ## Common final tasks for the various Dockerfile targets (intended for during container-build (inside the container))
rm -f requirements*.txt
apt-get purge -y --auto-remove gcc git
apt-get remove -y linux-libc-dev
chown -R doluiapp:doluiapp /app
# Use /run/celery in these commands rather than /var/run/celery
# due to differences in how the docker engine and kaniko handle
# the /var/run directory during the docker image build. In the
# docker image, /var/run is symlinked to /run.
mkdir -p /run/celery
chown -R doluiapp:doluiapp /run/celery
mkdir -p /var/log/celery
chown -R doluiapp:doluiapp /var/log/celery
# the --mount option ignores the local build dir for what is on the image
login: ## Log into the Django app docker container
docker run --rm -it \
--network arpaui_app-tier \
--name $(DOCKER_NAME) \
--mount type=volume,dst=/app/claimant/build \
--mount type=volume,dst=/app/home/static \
--add-host=host.docker.internal:host-gateway \
-v $(PWD):/app \
-p 8004:8000 \
$(DOCKER_IMG) /bin/bash
container-attach: ## Attach to a running container and open a shell (like login for running container)
docker exec -it $(DOCKER_CONTAINER_ID) /bin/bash
dev-run: ## Run the Django app, tracking changes
python manage.py runserver 0:8000
run: ## Run the Django app, without tracking changes
python manage.py runserver 0:8000 --noreload
shell: ## Open interactive Django shell (run inside container)
python manage.py shell
# important! this env var must be set to trigger the correct key/config generation.
test-django: export LOGIN_DOT_GOV_ENV=test
test-django: ## Run Django app tests
coverage run manage.py test -v 2 --pattern="*tests*py"
coverage report -m --skip-covered --fail-under 90
coverage xml --fail-under 90
ci-setup-react-tests: ## Create test data required for React (Cypress) tests
docker exec web ./setup-cypress-tests.sh
ci-test-react: ## Run React tests in CI
cd $(REACT_APP) && make ci-tests
test: test-django ## Run tests (must be run within Django app docker container)
test-wcms: test-django-wcms ## Run tests in WCMS envinronment (must be run within Django app docker container)
list-outdated: ## List outdated dependencies
pip list --outdated
cd $(REACT_APP) && make list-outdated
# https://github.com/suyashkumar/ssl-proxy
dev-ssl-proxy: ## Run ssl-proxy
ssl-proxy -from 0.0.0.0:4430 -to 127.0.0.1:8004
smtp-server: ## Starts the debugging SMTP server
python -m smtpd -n -c DebuggingServer 0.0.0.0:1025
security: ## Run all security scans
bandit -x ./.venv,./$(REACT_APP) -r .
safety check
cd $(REACT_APP) && make security
diff-test: ## Fails if there are any local changes, using git diff
@changed_files=`git diff --name-only`; if [ "$$changed_files" != "" ]; then echo "Local changes exist:\n$$changed_files" && exit 1; fi
schema-check: ## Validate the examples JSON instances against relevant JSON Schema
python scripts/check-json-schema.py schemas/claim-v1.0.json schemas/claim-v1.0-example.json
python scripts/check-json-schema.py schemas/identity-v1.0.json schemas/identity-v1.0-example-ial1.json
python scripts/check-json-schema.py schemas/identity-v1.0.json schemas/identity-v1.0-example-ial2.json
soc: ## Build the SOC codes from the BLS site
curl https://www.bls.gov/soc/2018/major_groups.htm > major_groups.htm
python scripts/parse-soc-2018-webpage.py > soc-entries.json
curl https://www.onetcenter.org/dl_files/database/db_26_1_text/Occupation%20Data.txt > onet-occupation.txt
python scripts/parse-onet-occupations.py onet-occupation.txt > onet-occupation.json
python scripts/merge-onet-soc-2018.py onet-occupation.json soc-entries.json > claimant/src/fixtures/soc_entries_2018.json
soc-clean: ## Clean up the SOC code temp files
rm -f major_groups.htm onet-occupation.txt onet-occupation.json soc-entries.json
hourly-tasks: ## runs named tasks to be called on an hourly schedule
python manage.py delete_expired_partial_claims
python manage.py complete_expired_identity_claims
swa_xid: ## Generate a swa_xid based off the current timestamp
printf "%s-%s-%s-%s\n" `date +%Y%m%d` `date +%H%M%S` "1234567" "123456789";
default: help
.PHONY: services-start services-stop services-logs ci-start ci-stop