Skip to content

feat: granule links via event subscription #149

feat: granule links via event subscription

feat: granule links via event subscription #149

Workflow file for this run

name: CI
on:
pull_request:
branches: [main]
defaults:
run:
shell: bash
jobs:
linting:
runs-on: ubuntu-20.04
env:
PIPENV_NO_INHERIT: TRUE
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.11
- name: Install Pipenv
run: curl https://raw.githubusercontent.com/pypa/pipenv/master/get-pipenv.py | python
- name: Install tox
run: |
pip install "tox>=3.18.0"
- name: Get pipenv venv hashes
id: hashes
run: |
echo "root=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/Pipfile)" >> $GITHUB_OUTPUT
echo "alembic=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/alembic_migration/Pipfile)" >> $GITHUB_OUTPUT
echo "dategenerator=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/date_generator/Pipfile)" >> $GITHUB_OUTPUT
echo "linkfetcher=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/link_fetcher/Pipfile)" >> $GITHUB_OUTPUT
echo "downloader=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/downloader/Pipfile)" >> $GITHUB_OUTPUT
echo "mockscihubproductapi=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/mock_scihub_product_api/Pipfile)" >> $GITHUB_OUTPUT
echo "mockscihubsearchapi=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/mock_scihub_search_api/Pipfile)" >> $GITHUB_OUTPUT
echo "requeuer=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/requeuer/Pipfile)" >> $GITHUB_OUTPUT
# echo "db=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/layers/db/Pipfile)" >> $GITHUB_OUTPUT
- name: Setup root cache
uses: actions/cache@v3
id: root-cache
with:
path: /home/runner/.local/share/virtualenvs/hls-sentinel2-downloader-serverless-${{ steps.hashes.outputs.root }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/Pipfile.lock') }}-2
- name: Setup alembic_migration cache
uses: actions/cache@v3
id: alembic-cache
with:
path: /home/runner/.local/share/virtualenvs/alembic_migration-${{ steps.hashes.outputs.alembic }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/alembic_migration/Pipfile.lock') }}
- name: Setup date_generator cache
uses: actions/cache@v3
id: date-generator-cache
with:
path: /home/runner/.local/share/virtualenvs/date_generator-${{ steps.hashes.outputs.dategenerator }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/date_generator/Pipfile.lock') }}
- name: Setup link_fetcher cache
uses: actions/cache@v3
id: link-fetcher-cache
with:
path: /home/runner/.local/share/virtualenvs/link_fetcher-${{ steps.hashes.outputs.linkfetcher }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/link_fetcher/Pipfile.lock') }}
- name: Setup downloader cache
uses: actions/cache@v3
id: downloader-cache
with:
path: /home/runner/.local/share/virtualenvs/downloader-${{ steps.hashes.outputs.downloader }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/downloader/Pipfile.lock') }}
- name: Setup mock_scihub_product_api cache
uses: actions/cache@v3
id: mock-scihub-product-api-cache
with:
path: /home/runner/.local/share/virtualenvs/mock_scihub_product_api-${{ steps.hashes.outputs.mockscihubproductapi }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/mock_scihub_product_api/Pipfile.lock') }}
- name: Setup mock_scihub_search_api cache
uses: actions/cache@v3
id: mock-scihub-search-api-cache
with:
path: /home/runner/.local/share/virtualenvs/mock_scihub_search_api-${{ steps.hashes.outputs.mockscihubsearchapi }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/mock_scihub_search_api/Pipfile.lock') }}
- name: Setup requeuer cache
uses: actions/cache@v3
id: requeuer-cache
with:
path: /home/runner/.local/share/virtualenvs/requeuer-${{ steps.hashes.outputs.requeuer }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/requeuer/Pipfile.lock') }}
# - name: Setup db cache
# uses: actions/cache@v3
# id: db-cache
# with:
# path: /home/runner/.local/share/virtualenvs/db-${{ steps.hashes.outputs.db }}
# key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/layers/db/Pipfile.lock') }}
- name: Install root dependencies
# if: steps.root-cache.outputs.cache-hit != 'true'
run: |
pipenv install --dev
- name: Install alembic dependencies
if: steps.alembic-cache.outputs.cache-hit != 'true'
run: |
make -C alembic_migration install
- name: Install date_generator dependencies
if: steps.date-generator-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/date_generator install
- name: Install link_fetcher dependencies
if: steps.link-fetcher-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/link_fetcher install
- name: Install downloader dependencies
if: steps.downloader-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/downloader install
- name: Install mock_scihub_product_api dependencies
if: steps.mock-scihub-product-api-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/mock_scihub_product_api install
- name: Install mock_scihub_search_api dependencies
if: steps.mock-scihub-search-api-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/mock_scihub_search_api install
- name: Install requeuer dependencies
if: steps.requeuer-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/requeuer install
- name: Install db dependencies
# if: steps.db-cache.outputs.cache-hit != 'true'
run: |
make -C layers/db install
- name: Run linting
run: |
make lint
unit-tests:
runs-on: ubuntu-20.04
env:
PIPENV_NO_INHERIT: TRUE
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.11
- name: Install Pipenv
run: curl https://raw.githubusercontent.com/pypa/pipenv/master/get-pipenv.py | python
- name: Install tox
run: |
pip install "tox>=3.18.0"
- name: Get pipenv venv hashes
id: hashes
run: |
echo "root=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/Pipfile)" >> $GITHUB_OUTPUT
echo "alembic=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/alembic_migration/Pipfile)" >> $GITHUB_OUTPUT
echo "dategenerator=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/date_generator/Pipfile)" >> $GITHUB_OUTPUT
echo "linkfetcher=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/link_fetcher/Pipfile)" >> $GITHUB_OUTPUT
echo "downloader=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/downloader/Pipfile)" >> $GITHUB_OUTPUT
echo "mockscihubproductapi=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/mock_scihub_product_api/Pipfile)" >> $GITHUB_OUTPUT
echo "mockscihubsearchapi=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/mock_scihub_search_api/Pipfile)" >> $GITHUB_OUTPUT
echo "requeuer=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/lambdas/requeuer/Pipfile)" >> $GITHUB_OUTPUT
# echo "db=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/layers/db/Pipfile)" >> $GITHUB_OUTPUT
- name: Setup root cache
uses: actions/cache@v3
id: root-cache
with:
path: /home/runner/.local/share/virtualenvs/hls-sentinel2-downloader-serverless-${{ steps.hashes.outputs.root }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/Pipfile.lock') }}-2
- name: Setup alembic_migration cache
uses: actions/cache@v3
id: alembic-cache
with:
path: /home/runner/.local/share/virtualenvs/alembic_migration-${{ steps.hashes.outputs.alembic }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/alembic_migration/Pipfile.lock') }}
- name: Setup date_generator cache
uses: actions/cache@v3
id: date-generator-cache
with:
path: /home/runner/.local/share/virtualenvs/date_generator-${{ steps.hashes.outputs.dategenerator }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/date_generator/Pipfile.lock') }}
- name: Setup link_fetcher cache
uses: actions/cache@v3
id: link-fetcher-cache
with:
path: /home/runner/.local/share/virtualenvs/link_fetcher-${{ steps.hashes.outputs.linkfetcher }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/link_fetcher/Pipfile.lock') }}
- name: Setup downloader cache
uses: actions/cache@v3
id: downloader-cache
with:
path: /home/runner/.local/share/virtualenvs/downloader-${{ steps.hashes.outputs.downloader }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/downloader/Pipfile.lock') }}
- name: Setup mock_scihub_product_api cache
uses: actions/cache@v3
id: mock-scihub-product-api-cache
with:
path: /home/runner/.local/share/virtualenvs/mock_scihub_product_api-${{ steps.hashes.outputs.mockscihubproductapi }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/mock_scihub_product_api/Pipfile.lock') }}
- name: Setup mock_scihub_search_api cache
uses: actions/cache@v3
id: mock-scihub-search-api-cache
with:
path: /home/runner/.local/share/virtualenvs/mock_scihub_search_api-${{ steps.hashes.outputs.mockscihubsearchapi }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/mock_scihub_search_api/Pipfile.lock') }}
- name: Setup requeuer cache
uses: actions/cache@v3
id: requeuer-cache
with:
path: /home/runner/.local/share/virtualenvs/requeuer-${{ steps.hashes.outputs.requeuer }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/lambdas/requeuer/Pipfile.lock') }}
# - name: Setup db cache
# uses: actions/cache@v3
# id: db-cache
# with:
# path: /home/runner/.local/share/virtualenvs/db-${{ steps.hashes.outputs.db }}
# key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/layers/db/Pipfile.lock') }}
- name: Install root dependencies
# if: steps.root-cache.outputs.cache-hit != 'true'
run: |
pipenv install --dev
- name: Install alembic dependencies
if: steps.alembic-cache.outputs.cache-hit != 'true'
run: |
make -C alembic_migration install
- name: Install date_generator dependencies
if: steps.date-generator-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/date_generator install
- name: Install link_fetcher dependencies
if: steps.link-fetcher-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/link_fetcher install
- name: Install downloader dependencies
if: steps.downloader-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/downloader install
- name: Install mock_scihub_product_api dependencies
if: steps.mock-scihub-product-api-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/mock_scihub_product_api install
- name: Install mock_scihub_search_api dependencies
if: steps.mock-scihub-search-api-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/mock_scihub_search_api install
- name: Install requeuer dependencies
if: steps.requeuer-cache.outputs.cache-hit != 'true'
run: |
make -C lambdas/requeuer install
- name: Install db dependencies
# if: steps.db-cache.outputs.cache-hit != 'true'
run: |
make -C layers/db install
cd layers/db
pipenv run pip list
- name: Create .env files for tests
run: |
cat <<EOF >> lambdas/link_fetcher/.env
PG_PASSWORD="test-pass"
PG_USER="test-user"
PG_DB="test-db"
EOF
cat <<EOF >> lambdas/downloader/.env
PG_PASSWORD="test-pass"
PG_USER="test-user"
PG_DB="test-db"
AWS_DEFAULT_REGION="us-east-1"
EOF
cat <<EOF >> lambdas/requeuer/.env
PG_PASSWORD="test-pass"
PG_USER="test-user"
PG_DB="test-db"
AWS_DEFAULT_REGION="us-east-1"
EOF
cat <<EOF >> layers/db/.env
PG_PASSWORD="test-pass"
PG_USER="test-user"
PG_DB="test-db"
EOF
cat <<EOF >> alembic_migration/.env
PG_PASSWORD="test-pass"
PG_USER="test-user"
PG_DB="test-db"
EOF
- name: Run unit tests
run: |
make unit-tests
integration-tests:
runs-on: ubuntu-20.04
needs: [unit-tests, linting]
environment:
name: dev
steps:
- uses: actions/checkout@v4
- name: Setup git vars
id: vars
run: |
echo "branch=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
echo "commit=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_OUTPUT
- name: Create .env file
run: |
touch .env
echo OWNER="ci-developmentseed" >> .env
echo IDENTIFIER=${{ steps.vars.outputs.branch }}-${{ steps.vars.outputs.commit }} >> .env
echo PERMISSIONS_BOUNDARY_ARN=${{ vars.PERMISSIONS_BOUNDARY_ARN }} >> .env
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.11
- name: Setup up Node
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
- name: Install AWS CDK
run: |
npm install
- name: Install Pipenv
run: curl https://raw.githubusercontent.com/pypa/pipenv/master/get-pipenv.py | python
shell: bash
- name: Install tox
run: |
pip install "tox>=3.18.0"
- name: Get pipenv venv hashes
id: hashes
run: |
# echo "root=$(python -c 'import sys; import base64; import hashlib; print(base64.urlsafe_b64encode(hashlib.sha256(sys.argv[-1].encode()).digest()[:6]).decode()[:8])' $(pwd)/Pipfile)" >> $GITHUB_OUTPUT
- name: Setup root cache
uses: actions/cache@v3
id: root-cache
with:
path: /home/runner/.local/share/virtualenvs/hls-sentinel2-downloader-serverless-${{ steps.hashes.outputs.root }}
key: ${{ hashFiles('/home/runner/work/hls-sentinel2-downloader-serverless/hls-sentinel2-downloader-serverless/Pipfile.lock') }}-2
- name: Install root dependencies
# if: steps.root-cache.outputs.cache-hit != 'true'
run: |
pipenv install --dev
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Deploy hls-s2-downloader-serverless integration
run: |
make deploy-integration
- name: Run integration tests
run: |
make integration-tests
- name: Destroy hls-s2-downloader-serverless integration
if: always()
run: |
make destroy-integration