diff --git a/.env b/.env index 87dc6688..6a9709c2 100644 --- a/.env +++ b/.env @@ -4,7 +4,6 @@ DATABASE_USER=workflow DATABASE_PASS=workflow DATABASE_PORT=5432 DATABASE_HOST=db -SETTINGS_DIR=/var/www/workflow/app TIME_ZONE=America/New_York APP_SECRET='-0zoc$fl2fa&rmzeo#uh-qz-k+4^1)_9p1qwby1djzybqtl_nn' @@ -17,3 +16,5 @@ WORKER_PASS=worker AMQ_BROKER=[["activemq", 61613]] AMQ_QUEUE=["/topic/SNS.COMMON.STATUS.WORKFLOW.0", "/topic/SNS.COMMON.STATUS.AUTOREDUCE.0", "/topic/SNS.*.APP.DASMON", "/topic/SNS.*.STATUS.DASMON", "/topic/SNS.*.SIGNAL.DASMON", "/topic/SNS.*.APP.SMS", "/topic/SNS.*.STATUS.SMS", "/topic/SNS.*.STATUS.POSTPROCESS", "/topic/SNS.COMMON.STATUS.ACK", "/topic/SNS.*.STATUS.PVSD", "/topic/HFIR.*.APP.DASMON", "/topic/HFIR.*.STATUS.DASMON", "/topic/HFIR.*.SIGNAL.DASMON", "/topic/HFIR.*.APP.SMS", "/topic/HFIR.*.STATUS.SMS", "/topic/HFIR.*.STATUS.POSTPROCESS", "/topic/HFIR.COMMON.STATUS.ACK", "/topic/HFIR.*.STATUS.PVSD"] + +LIVE_DATA_SERVER_DOMAIN=172.16.238.222 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7237bdbd..033db532 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,8 +13,8 @@ jobs: run: shell: bash -l {0} steps: - - uses: actions/checkout@v2 - - uses: conda-incubator/setup-miniconda@v2 + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true - name: Setup libmamba solver diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 93eb7f3e..7411f189 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -3,6 +3,7 @@ name: Wheel + Docker publish on: push: branches: + - next - qa - main @@ -13,17 +14,17 @@ env: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest defaults: run: shell: bash -l {0} steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true - name: Setup libmamba solver @@ -40,29 +41,28 @@ jobs: run: | conda activate webmon make wheel/all - - name: Get version - id: version - run: | - conda activate webmon - echo "::set-output name=version::$(versioningit src/webmon_app)" - name: Create tag version id: tag run: | conda activate webmon - echo "::set-output name=tag::$(versioningit src/webmon_app).b$(date +'%Y%m%d%H%M')" + echo "tag=$(versioningit src/webmon_app)" >> $GITHUB_OUTPUT - name: Create latest tag version id: latest_tag run: | case ${{ github.ref }} in + refs/heads/next) + echo "latest_tag=latest-dev" >> $GITHUB_OUTPUT + ;; + refs/heads/qa) - echo "::set-output name=latest_tag::latest-test" + echo "latest_tag=latest-test" >> $GITHUB_OUTPUT ;; refs/heads/main) - echo "::set-output name=latest_tag::latest-prod" + echo "latest_tag=latest-prod" >> $GITHUB_OUTPUT ;; *) @@ -72,10 +72,10 @@ jobs: esac - name: Upload wheels - uses: softprops/action-gh-release@v1 - if: ${{ github.ref != 'refs/heads/next' }} + uses: softprops/action-gh-release@v2 + if: ${{ github.ref == 'refs/heads/main' }} with: - tag_name: ${{ steps.version.outputs.version }} + tag_name: ${{ steps.tag.outputs.tag }} files: src/*/dist/*.whl - name: Check tag names @@ -87,78 +87,122 @@ jobs: run: make SNSdata.tar.gz - name: Log into registry ${{ env.REGISTRY }} - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push amq_test_gen - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.amq_test_gen tags: | ${{ env.REGISTRY }}/${{ github.repository }}/amq_test_gen:${{ steps.latest_tag.outputs.latest_tag }} + push: true + + - name: Push amq_test_gen with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.amq_test_gen + tags: | ${{ env.REGISTRY }}/${{ github.repository }}/amq_test_gen:${{ steps.tag.outputs.tag }} push: true - name: Build and push Autoreducer - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.autoreducer tags: | ${{ env.REGISTRY }}/${{ github.repository }}/autoreducer:${{ steps.latest_tag.outputs.latest_tag }} - ${{ env.REGISTRY }}/${{ github.repository }}/autoreducer:${{ steps.tag.outputs.tag }} push: true - - name: Build and push Catalog - uses: docker/build-push-action@v2 + - name: Push Autoreducer with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 with: context: . - file: Dockerfile.catalog + file: Dockerfile.autoreducer tags: | - ${{ env.REGISTRY }}/${{ github.repository }}/catalog:${{ steps.latest_tag.outputs.latest_tag }} - ${{ env.REGISTRY }}/${{ github.repository }}/catalog:${{ steps.tag.outputs.tag }} + ${{ env.REGISTRY }}/${{ github.repository }}/autoreducer:${{ steps.tag.outputs.tag }} push: true - name: Build and push Dasmon - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.dasmon tags: | ${{ env.REGISTRY }}/${{ github.repository }}/dasmon:${{ steps.latest_tag.outputs.latest_tag }} + push: true + + - name: Push Dasmon with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.dasmon + tags: | ${{ env.REGISTRY }}/${{ github.repository }}/dasmon:${{ steps.tag.outputs.tag }} push: true - name: Build and push pv_test_gen - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.pv_test_gen tags: | ${{ env.REGISTRY }}/${{ github.repository }}/pv_test_gen:${{ steps.latest_tag.outputs.latest_tag }} + push: true + + - name: Push pv_test_gen with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.pv_test_gen + tags: | ${{ env.REGISTRY }}/${{ github.repository }}/pv_test_gen:${{ steps.tag.outputs.tag }} push: true - name: Build and push Webmon - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.webmon tags: | ${{ env.REGISTRY }}/${{ github.repository }}/webmon:${{ steps.latest_tag.outputs.latest_tag }} + push: true + + - name: Push Webmon with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.webmon + tags: | ${{ env.REGISTRY }}/${{ github.repository }}/webmon:${{ steps.tag.outputs.tag }} push: true - name: Build and push Workflow - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.workflow tags: | ${{ env.REGISTRY }}/${{ github.repository }}/workflow:${{ steps.latest_tag.outputs.latest_tag }} + push: true + + - name: Push Workflow with version tag only for main branch + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.workflow + tags: | ${{ env.REGISTRY }}/${{ github.repository }}/workflow:${{ steps.tag.outputs.tag }} push: true diff --git a/.github/workflows/systemtests.yml b/.github/workflows/systemtests.yml index 51c6506a..ac2a3030 100644 --- a/.github/workflows/systemtests.yml +++ b/.github/workflows/systemtests.yml @@ -13,8 +13,8 @@ jobs: run: shell: bash -l {0} steps: - - uses: actions/checkout@v2 - - uses: conda-incubator/setup-miniconda@v2 + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true - name: Setup libmamba solver @@ -27,12 +27,10 @@ jobs: conda env create --file conda_environment.yml --quiet conda activate webmon conda env update --file conda_development.yml --quiet - - name: Build wheels + - name: Build requirements run: | conda activate webmon - make wheel/dasmon wheel/webmon wheel/workflow - - name: Build test data for autoreducer - run: make SNSdata.tar.gz + make all - name: Stand up docker containers run: docker-compose up --build -d env: diff --git a/Makefile b/Makefile index a43fccc3..c57f0396 100644 --- a/Makefile +++ b/Makefile @@ -17,7 +17,7 @@ help: # this nifty perl one-liner collects all comments headed by the double "#" symbols next to each target and recycles them as comments @perl -nle'print $& if m{^[/a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-25s\033[0m %s\n", $$1, $$2}' -all: wheel/dasmon wheel/webmon wheel/workflow SNSdata.tar.gz +all: wheel/dasmon wheel/webmon wheel/workflow SNSdata.tar.gz ssl create/conda: ## create conda environment "webmon" with file conda_environment.yml conda env create --name webmon --file conda_environment.yml @@ -121,6 +121,11 @@ SNSdata.tar.gz: ## install SNS data for testing and limited info display # it needs to be removed when the directory changes tar czf SNSdata.tar.gz -C tests/data/ . +ssl: nginx/nginx.crt nginx/nginx.key ## self-signed ssl certificates for livedata server + +nginx/nginx.crt nginx/nginx.key: + openssl req -x509 -out nginx/nginx.crt -keyout nginx/nginx.key -newkey rsa:2048 -nodes -sha256 --config nginx/san.cnf + localdev/up: ## create images and start containers for local development. Doesn't update python wheels, though. docker-compose --file docker-compose.yml up --build diff --git a/config/livedata_local_settings.py b/config/livedata_local_settings.py new file mode 100644 index 00000000..62045a32 --- /dev/null +++ b/config/livedata_local_settings.py @@ -0,0 +1 @@ +ALLOWED_HOSTS = ["*"] diff --git a/docker-compose.yml b/docker-compose.yml index 07944063..faede670 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,24 +6,30 @@ services: image: nginx:1.21.1 ports: - "80:80" + - "443:443" volumes: - web-static:/var/www/workflow/static + - web-static-livedata:/var/www/livedata/static - ./nginx/django.conf:/etc/nginx/conf.d/default.conf + - ./nginx/nginx.crt:/etc/nginx/nginx.crt + - ./nginx/nginx.key:/etc/nginx/nginx.key healthcheck: test: ["CMD", "service", "nginx", "status"] depends_on: webmon: condition: service_healthy + networks: + default: + ipv4_address: 172.16.238.222 webmon: restart: always build: context: . dockerfile: Dockerfile.webmon - # build using the host's network, required in certain environments when accessing the debian repositories - network: host volumes: - web-static:/var/www/workflow/static/ + - ./nginx/nginx.crt:/nginx.crt env_file: - .env - .env.ci @@ -36,6 +42,7 @@ services: - CATALOG_ID=${CATALOG_ID} - CATALOG_SECRET=${CATALOG_SECRET} - GUNICORN_CMD_ARGS=--reload --workers=8 + - HTTPLIB2_CA_CERTS=/nginx.crt healthcheck: test: wget --no-verbose --tries=1 --spider http://localhost:8000/ht || exit 1 interval: 60s @@ -50,7 +57,6 @@ services: build: context: . dockerfile: Dockerfile.dasmon - network: host env_file: - .env command: /usr/bin/docker-entrypoint.sh @@ -66,7 +72,6 @@ services: build: context: . dockerfile: Dockerfile.workflow - network: host env_file: - .env command: /usr/bin/docker-entrypoint.sh @@ -114,7 +119,6 @@ services: build: context: . dockerfile: Dockerfile.autoreducer - network: host hostname: autoreducer healthcheck: test: ["CMD", "pgrep", "python"] @@ -126,7 +130,6 @@ services: build: context: . dockerfile: Dockerfile.autoreducer.himem - network: host hostname: autoreducer.himem healthcheck: test: ["CMD", "pgrep", "python"] @@ -139,7 +142,6 @@ services: build: context: . dockerfile: Dockerfile.amq_test_gen - network: host depends_on: activemq: condition: service_healthy @@ -149,7 +151,6 @@ services: build: context: . dockerfile: Dockerfile.pv_test_gen - network: host env_file: - .env depends_on: @@ -158,6 +159,29 @@ services: webmon: condition: service_healthy + livedata: + restart: always + image: ghcr.io/neutrons/live_data_server/live_data_server:latest-dev + env_file: + - .env + environment: + DJANGO_SUPERUSER_USERNAME: ${DATABASE_USER} + DJANGO_SUPERUSER_PASSWORD: ${DATABASE_PASS} + volumes: + - web-static-livedata:/var/www/livedata/static + - ./config/livedata_local_settings.py:/var/www/livedata/app/local_settings.py + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:8000/admin || exit 1 + interval: 60s + retries: 5 + start_period: 20s + timeout: 10s + depends_on: + db: + condition: service_healthy + webmon: + condition: service_healthy + autoheal: restart: always image: willfarrell/autoheal @@ -168,4 +192,12 @@ services: volumes: web-static: + web-static-livedata: pgdata: + + +networks: + default: + ipam: + config: + - subnet: 172.16.238.0/24 diff --git a/docs/developer/instruction/build.rst b/docs/developer/instruction/build.rst index 159f2751..ca261dd8 100644 --- a/docs/developer/instruction/build.rst +++ b/docs/developer/instruction/build.rst @@ -42,12 +42,12 @@ If the environment already exists, ``conda_environment.yml`` can be used to upda Running system test ------------------- -The system test are run via `.github/workflow/system.yml `_ . +The system test are run via `.github/workflow/systemtests.yml `_ . .. code-block:: shell make all # wheels and test data - LDAP_SERVER_URI=. LDAP_DOMAIN_COMPONENT=. docker-compose up --build + LDAP_SERVER_URI=. LDAP_DOMAIN_COMPONENT=. DJANGO_SETTINGS_MODULE=reporting.reporting_app.settings.envtest docker-compose up --build Wait for a time for everything to get up and running. This is normally noted by seeing a collection of worker threads starting. @@ -55,8 +55,7 @@ Once started tests can be run via .. code-block:: shell - DJANGO_SETTINGS_MODULE=reporting.reporting_app.settings.envtest \ - python -m pytest tests + LDAP_SERVER_URI=. LDAP_DOMAIN_COMPONENT=. DJANGO_SETTINGS_MODULE=reporting.reporting_app.settings.envtest python -m pytest tests Setup and Deployment for Development ------------------------------------ diff --git a/nginx/django.conf b/nginx/django.conf index d7a9abb3..8b1e7680 100644 --- a/nginx/django.conf +++ b/nginx/django.conf @@ -2,6 +2,10 @@ upstream django { server webmon:8000; } +upstream livedata { + server livedata:8000; +} + server { listen 80 default_server; @@ -46,3 +50,47 @@ server { stub_status on; } } + + +server { + + listen 443 ssl; + listen [::]:443 ssl ipv6only=on; + + server_name 172.16.238.222; + ssl_certificate nginx.crt; + ssl_certificate_key nginx.key; + + client_max_body_size 0; + + location / { + proxy_pass http://livedata; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $server_name; + proxy_read_timeout 900; + } + + location /static/ { + autoindex on; + alias /var/www/livedata/static/; + } + + error_page 404 /404.html; + location = /40x.html { + root /usr/share/nginx/html; + internal; + } + + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + internal; + } + + location /stats/nginx { + stub_status on; + } +} diff --git a/nginx/san.cnf b/nginx/san.cnf new file mode 100644 index 00000000..0d8a9443 --- /dev/null +++ b/nginx/san.cnf @@ -0,0 +1,18 @@ +[req] +default_bits = 2048 +distinguished_name = req_distinguished_name +req_extensions = req_ext +x509_extensions = v3_req +prompt = no +[req_distinguished_name] +countryName = XX +stateOrProvinceName = N/A +localityName = N/A +organizationName = Self-signed certificate +commonName = 120.0.0.1: Self-signed certificate +[req_ext] +subjectAltName = @alt_names +[v3_req] +subjectAltName = @alt_names +[alt_names] +IP.1 = 172.16.238.222 diff --git a/tests/configuration/post_process_consumer.conf b/tests/configuration/post_process_consumer.conf index 7875163d..7d171dc7 100644 --- a/tests/configuration/post_process_consumer.conf +++ b/tests/configuration/post_process_consumer.conf @@ -24,5 +24,8 @@ "oncat_reduced_processor.ONCatProcessor", "create_reduction_script_processor.CreateReductionScriptProcessor", "reduction_processor.ReductionProcessor" - ] + ], + "publish_url_template": "https://172.16.238.222/plots/$instrument/$run_number/upload_plot_data/", + "publisher_username": "workflow", + "publisher_password": "workflow" } diff --git a/tests/data/ARCS/shared/autoreduce/reduce_ARCS.py b/tests/data/ARCS/shared/autoreduce/reduce_ARCS.py index 8f4e4cf0..9ab0915f 100644 --- a/tests/data/ARCS/shared/autoreduce/reduce_ARCS.py +++ b/tests/data/ARCS/shared/autoreduce/reduce_ARCS.py @@ -1,6 +1,20 @@ #!/usr/bin/env python +import os import sys +import socket from datetime import datetime +from postprocessing.publish_plot import publish_plot if __name__ == "__main__": - print("Running reduction for " + sys.argv[1] + " at " + datetime.isoformat(datetime.now())) + time = datetime.isoformat(datetime.now()) + filename = sys.argv[1] + print("Running reduction for " + filename + " at " + time) + + publish_plot( + "ARCS", + os.path.basename(filename).split(".")[0].split("_")[-1], + files={ + "file": f"
Example Plot Data

Filename: {filename}

" + f"

Time: {time}

Hostname: {socket.gethostname()}

" + }, + ) diff --git a/tests/test_livedata.py b/tests/test_livedata.py new file mode 100644 index 00000000..abdcb953 --- /dev/null +++ b/tests/test_livedata.py @@ -0,0 +1,86 @@ +import time +import os + +import psycopg2 +import requests + +LIVEDATA_TEST_URL = "https://172.16.238.222" +WEBMON_TEST_URL = "http://localhost" + + +class TestLiveDataServer: + instrument = "arcs" + IPTS = "IPTS-27800" + run_number = 214583 + + @classmethod + def setup_class(cls): + """Clean the database before running tests""" + conn = psycopg2.connect( + database=os.environ.get("DATABASE_NAME", "workflow"), + user=os.environ.get("DATABASE_USER", "workflow"), + password=os.environ.get("DATABASE_PASS", "workflow"), + port=os.environ.get("DATABASE_PORT", 5432), + host="localhost", + ) + cur = conn.cursor() + cur.execute("DELETE FROM plots_plotdata") + cur.execute("DELETE FROM plots_datarun") + cur.execute("DELETE FROM plots_instrument") + conn.commit() + conn.close() + + def get_session(self): + URL = WEBMON_TEST_URL + "/users/login" + client = requests.session() + + # Retrieve the CSRF token first + client.get(URL) # sets the cookie + csrftoken = client.cookies["csrftoken"] + + login_data = dict(username="workflow", password="workflow", csrfmiddlewaretoken=csrftoken) + response = client.post(URL, data=login_data) + assert response.status_code == 200 + return client + + def send_request(self, task, run_number, requestType): + client = self.get_session() + data = dict( + csrfmiddlewaretoken=client.cookies["csrftoken"], + instrument=self.instrument, + experiment=self.IPTS, + run_list=self.run_number, + create_as_needed="on", + task=task, + button_choice=requestType, + ) + response = client.post(WEBMON_TEST_URL + "/report/processing", data=data) + assert response.status_code == 200 + time.sleep(1) + return response.text + + def test_reduction_request_livedata(self): + ssl_crt_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../nginx/nginx.crt") + print(ssl_crt_filename) + # first check that the there isn't an existing plot, should 404 + response = requests.get( + f"{LIVEDATA_TEST_URL}/plots/{self.instrument}/{self.run_number}/update/html/", verify=ssl_crt_filename + ) + assert response.status_code == 404 + + # send data ready request, which should trigger autoreduction and therefore publish a plot to livedata + self.send_request("POSTPROCESS.DATA_READY", 123456, requestType="submit") + + # the data should now be on livedata + response = requests.get( + f"{LIVEDATA_TEST_URL}/plots/{self.instrument}/{self.run_number}/update/html/", verify=ssl_crt_filename + ) + assert response.status_code == 200 + assert "Example Plot Data" in response.text + assert "Filename: /SNS/ARCS/IPTS-27800/nexus/ARCS_214583.nxs.h5" in response.text + assert "Hostname: autoreducer" in response.text + + # now verify that the run report page is templated correctly + client = self.get_session() + page = client.get(f"{WEBMON_TEST_URL}/report/{self.instrument}/{self.run_number}/") + assert "https://172.16.238.222:443/plots/arcs/214583/update/html/" in page.text