add log level option for indexing tool (#601) #3801
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# yamllint disable-file | |
name: Run Code Checks | |
on: | |
pull_request: | |
push: | |
# Only run on push to `develop` branch to prevent double execution of | |
# all the code checks. | |
# Everything should be merged through PRs anyway. | |
branches: | |
- develop | |
- test-ci-* | |
- pypi/publish | |
paths: | |
- '**' | |
- '!notebooks/**' | |
- '!docs/**' | |
- '!old/**' | |
- '!README.md' | |
jobs: | |
build-wheels: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup Python | |
uses: actions/setup-python@v1 | |
with: | |
python-version: 3.9 | |
- uses: actions/cache@v3 | |
id: wheels_cache | |
with: | |
path: ./wheels | |
key: wheels-${{ github.sha }} | |
- name: Install Dependencies | |
run: | | |
python -m pip install --upgrade pip | |
python -m pip install --upgrade setuptools | |
python -m pip install --upgrade \ | |
toml \ | |
wheel \ | |
packaging \ | |
twine | |
python -m pip freeze | |
- name: Build Clean Packages | |
run: | | |
mkdir -p ./wheels/clean | |
./scripts/build-wheels.sh ./wheels/clean | |
find ./wheels/clean -type f | |
- name: Patch Package Versions | |
run: | | |
grep -R --files-with-matches --include '*.py' '__version__ =' \ | |
| xargs python ./scripts/patch_version.py ${GITHUB_RUN_NUMBER:-0} | |
- name: Build Dev Packages | |
run: | | |
mkdir -p ./wheels/dev | |
./scripts/build-wheels.sh ./wheels/dev | |
find ./wheels/dev -type f | |
build-test-env-base: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/cache@v3 | |
id: conda_cache | |
with: | |
path: | | |
tests/env | |
key: ${{ runner.os }}-test-env-${{ hashFiles('tests/test-env.yml') }} | |
- uses: conda-incubator/setup-miniconda@v2 | |
if: steps.conda_cache.outputs.cache-hit != 'true' | |
with: | |
channels: conda-forge,defaults | |
channel-priority: true | |
activate-environment: "" | |
# mamba-version: "*" | |
use-mamba: true | |
miniforge-variant: Mambaforge | |
- name: Dump Conda Environment Info | |
shell: bash -l {0} | |
if: steps.conda_cache.outputs.cache-hit != 'true' | |
run: | | |
conda info | |
conda list | |
mamba -V | |
conda config --show-sources | |
conda config --show | |
printenv | sort | |
- name: Build Python Environment for Testing | |
shell: bash -l {0} | |
if: steps.conda_cache.outputs.cache-hit != 'true' | |
run: | | |
mamba env create -f tests/test-env.yml -p tests/env | |
- name: Check Python Env | |
shell: bash -l {0} | |
if: steps.conda_cache.outputs.cache-hit != 'true' | |
run: | | |
mamba env export -p tests/env | |
test-with-coverage: | |
runs-on: ubuntu-latest | |
needs: | |
- build-test-env-base | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Get Conda Environment from Cache | |
uses: actions/cache@v3 | |
id: conda_cache | |
with: | |
path: | | |
tests/env | |
key: ${{ runner.os }}-test-env-${{ hashFiles('tests/test-env.yml') }} | |
- name: Update PATH | |
shell: bash | |
run: | | |
echo "$(pwd)/tests/env/bin" >> $GITHUB_PATH | |
export PATH="$(pwd)/tests/env/bin" | |
- name: Install in Edit mode | |
shell: bash | |
run: | | |
which python | |
which createdb | |
which datacube | |
./scripts/dev-install.sh --no-deps | |
- name: Start Test DB | |
shell: bash | |
run: | | |
echo "Launching test db" | |
pgdata=$(pwd)/.dbdata | |
initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 | |
pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start | |
createdb datacube | |
datacube system init | |
pip list --format=freeze | |
env: | |
DATACUBE_DB_URL: postgresql:///datacube | |
- name: Run Tests | |
shell: bash | |
run: | | |
datacube system check | |
datacube metadata add "https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_sentinel_ard.odc-type.yaml" | |
echo "Running Tests" | |
pytest --cov=. \ | |
--cov-report=html \ | |
--cov-report=xml:coverage.xml \ | |
--timeout=30 \ | |
libs apps | |
env: | |
AWS_DEFAULT_REGION: us-west-2 | |
DASK_TEMPORARY_DIRECTORY: /tmp/dask | |
DATACUBE_DB_URL: postgresql:///datacube | |
- name: Upload Coverage | |
if: | | |
github.repository == 'opendatacube/odc-tools' | |
uses: codecov/codecov-action@v1 | |
with: | |
fail_ci_if_error: false | |
verbose: false | |
test-wheels: | |
runs-on: ubuntu-latest | |
needs: | |
- build-test-env-base | |
- build-wheels | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Get Wheels from Cache | |
uses: actions/cache@v3 | |
id: wheels_cache | |
with: | |
path: ./wheels | |
key: wheels-${{ github.sha }} | |
- name: Get Conda Environment from Cache | |
uses: actions/cache@v3 | |
id: conda_cache | |
with: | |
path: | | |
tests/env | |
key: ${{ runner.os }}-test-env-${{ hashFiles('tests/test-env.yml') }} | |
- name: Update PATH | |
shell: bash | |
run: | | |
echo "$(pwd)/tests/env/bin" >> $GITHUB_PATH | |
- name: Install wheels for testing | |
shell: bash | |
run: | | |
which python | |
which createdb | |
which datacube | |
ls -lh wheels/clean | |
python -m pip install --no-deps wheels/clean/*whl | |
python -m pip check || true | |
- name: Start Test DB | |
shell: bash | |
run: | | |
echo "Launching test db" | |
pgdata=$(pwd)/.dbdata | |
initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 | |
pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start | |
createdb datacube | |
datacube system init | |
env: | |
DATACUBE_DB_URL: postgresql:///datacube | |
- name: Run Tests | |
shell: bash | |
run: | | |
datacube system check | |
datacube metadata add "https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_sentinel_ard.odc-type.yaml" | |
echo "Running Tests" | |
pytest --timeout=30 libs apps | |
env: | |
AWS_DEFAULT_REGION: us-west-2 | |
DASK_TEMPORARY_DIRECTORY: /tmp/dask | |
DATACUBE_DB_URL: postgresql:///datacube | |
publish-pypi: | |
if: | | |
github.event_name == 'push' | |
&& github.repository == 'opendatacube/odc-tools' | |
&& (github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/pypi/publish') | |
strategy: | |
matrix: | |
pkg: | |
- odc-cloud | |
- odc-io | |
- odc-ui | |
- odc-apps-cloud | |
- odc-apps-dc-tools | |
needs: | |
- build-wheels | |
- test-wheels | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Config | |
if: | | |
github.event_name == 'push' | |
&& github.repository == 'opendatacube/odc-tools' | |
&& (github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/pypi/publish') | |
id: cfg | |
env: | |
PKG: ${{ matrix.pkg }} | |
run: | | |
tk="pypi_token_${PKG//-/_}" | |
echo "tk=${tk}" >> $GITHUB_OUTPUT | |
echo "publish=yes" >> $GITHUB_OUTPUT | |
- name: Setup Python | |
if: steps.cfg.outputs.publish == 'yes' | |
uses: actions/setup-python@v1 | |
with: | |
python-version: 3.8 | |
- name: Install Twine | |
if: steps.cfg.outputs.publish == 'yes' | |
run: | | |
python -m pip install --upgrade pip | |
python -m pip install --upgrade setuptools | |
python -m pip install --upgrade \ | |
toml \ | |
wheel \ | |
twine | |
python -m pip freeze | |
- uses: actions/cache@v3 | |
id: wheels_cache | |
if: steps.cfg.outputs.publish == 'yes' | |
with: | |
path: ./wheels | |
key: wheels-${{ github.sha }} | |
- name: Prepare for upload | |
if: steps.cfg.outputs.publish == 'yes' | |
run: | | |
mkdir -p ./pips | |
./scripts/mk-pip-tree.sh ./wheels/clean ./pips | |
find ./pips -type f | |
- name: Upload to PyPI | |
if: steps.cfg.outputs.publish == 'yes' | |
env: | |
TWINE_PASSWORD: ${{ secrets[ steps.cfg.outputs.tk ] }} | |
TWINE_USERNAME: __token__ | |
PKG: ${{ matrix.pkg }} | |
run: | | |
ls pips/${PKG} | |
twine upload --non-interactive --skip-existing pips/${PKG}/* |