add categories for dfpl #442
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Galaxy Tool Linting and Tests for push and PR | |
on: | |
pull_request: | |
paths-ignore: | |
- 'deprecated/**' | |
- 'docs/**' | |
- '*' | |
push: | |
branches: | |
- main | |
- master | |
paths-ignore: | |
- 'deprecated/**' | |
- 'docs/**' | |
- '*' | |
env: | |
GALAXY_FORK: galaxyproject | |
GALAXY_BRANCH: release_24.1 | |
MAX_CHUNKS: 4 | |
MAX_FILE_SIZE: 1M | |
concurrency: | |
# Group runs by PR, but keep runs on the default branch separate | |
# because we do not want to cancel ToolShed uploads | |
group: pr-${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main') && github.run_number || github.ref }} | |
cancel-in-progress: true | |
jobs: | |
# the setup job does two things: | |
# 1. cache the pip cache and .planemo | |
# 2. determine the list of changed repositories | |
# it produces one artifact which contains | |
# - a file with the latest SHA from the chosen branch of the Galaxy repo | |
# - a file containing the list of changed repositories | |
# which are needed in subsequent steps. | |
setup: | |
name: Setup cache and determine changed repositories | |
runs-on: ubuntu-latest | |
outputs: | |
galaxy-head-sha: ${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} | |
repository-list: ${{ steps.discover.outputs.repository-list }} | |
tool-list: ${{ steps.discover.outputs.tool-list }} | |
chunk-count: ${{ steps.discover.outputs.chunk-count }} | |
chunk-list: ${{ steps.discover.outputs.chunk-list }} | |
commit-range: ${{ steps.discover.outputs.commit-range }} | |
strategy: | |
matrix: | |
python-version: ['3.10'] | |
steps: | |
- name: Print github context properties | |
run: | | |
echo 'event: ${{ github.event_name }}' | |
echo 'sha: ${{ github.sha }}' | |
echo 'ref: ${{ github.ref }}' | |
echo 'head_ref: ${{ github.head_ref }}' | |
echo 'base_ref: ${{ github.base_ref }}' | |
echo 'event.before: ${{ github.event.before }}' | |
echo 'event.after: ${{ github.event.after }}' | |
echo 'repository_owner: ${{ github.repository_owner }}' | |
- name: Determine latest commit in the Galaxy repo | |
id: get-galaxy-sha | |
run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} | |
- name: Cache .planemo | |
uses: actions/cache@v4 | |
id: cache-planemo | |
with: | |
path: ~/.planemo | |
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} | |
# Install the `wheel` package so that when installing other packages which | |
# are not available as wheels, pip will build a wheel for them, which can be cached. | |
- name: Install wheel | |
run: pip install wheel | |
- name: Install flake8 | |
run: pip install flake8 flake8-import-order | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks | |
uses: galaxyproject/planemo-ci-action@v1 | |
id: discover | |
with: | |
create-cache: ${{ steps.cache-pip.outputs.cache-hit != 'true' || steps.cache-planemo.outputs.cache-hit != 'true' }} | |
galaxy-fork: ${{ env.GALAXY_FORK }} | |
galaxy-branch: ${{ env.GALAXY_BRANCH }} | |
max-chunks: ${{ env.MAX_CHUNKS }} | |
python-version: ${{ matrix.python-version }} | |
- name: Show commit range | |
run: echo '${{ steps.discover.outputs.commit-range }}' | |
- name: Show repository list | |
run: echo '${{ steps.discover.outputs.repository-list }}' | |
- name: Show tool list | |
run: echo '${{ steps.discover.outputs.tool-list }}' | |
- name: Show chunks | |
run: | | |
echo 'Using ${{ steps.discover.outputs.chunk-count }} chunks (${{ steps.discover.outputs.chunk-list }})' | |
# Planemo lint the changed repositories | |
lint: | |
name: Lint tool-list | |
needs: setup | |
if: ${{ needs.setup.outputs.repository-list != '' || needs.setup.outputs.tool-list != '' }} | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ['3.10'] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Set fail level for pull request | |
if: ${{ github.event_name == 'pull_request' }} | |
run: | |
echo "FAIL_LEVEL=warn" >> "$GITHUB_ENV" | |
- name: Set fail level for merge | |
if: ${{ github.event_name != 'pull_request' }} | |
run: | |
echo "FAIL_LEVEL=error" >> "$GITHUB_ENV" | |
- name: Planemo lint | |
uses: galaxyproject/planemo-ci-action@v1 | |
id: lint | |
with: | |
mode: lint | |
fail-level: ${{ env.FAIL_LEVEL }} | |
repository-list: ${{ needs.setup.outputs.repository-list }} | |
tool-list: ${{ needs.setup.outputs.tool-list }} | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() }} | |
with: | |
name: 'Tool linting output' | |
path: lint_report.txt | |
# flake8 of Python scripts in the changed repositories | |
flake8: | |
name: Lint Python scripts | |
needs: setup | |
if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ['3.10'] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Install flake8 | |
run: pip install flake8 flake8-import-order | |
- name: Flake8 | |
run: echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' flake8 --output-file pylint_report.txt --tee | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() }} | |
with: | |
name: 'Python linting output' | |
path: pylint_report.txt | |
lintr: | |
name: Lint R scripts | |
needs: setup | |
if: ${{ needs.setup.outputs.repository-list != '' }} | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
os: [ubuntu-20.04] | |
r-version: ['release'] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- uses: r-lib/actions/setup-r@v2 | |
with: | |
r-version: ${{ matrix.r-version }} | |
- name: Cache R packages | |
uses: actions/cache@v4 | |
with: | |
path: ${{ env.R_LIBS_USER }} | |
key: r_cache_${{ matrix.os }}_${{ matrix.r-version }} | |
- name: Install packages | |
uses: r-lib/actions/setup-r-dependencies@v2 | |
with: | |
packages: | | |
any::argparse | |
any::styler | |
- name: lintr | |
run: | | |
set -eo pipefail | |
echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' -n 1 ./.github/styler.R --dry off | |
git status | |
git diff --exit-code | tee rlint_report.txt | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() }} | |
with: | |
name: 'R linting output' | |
path: rlint_report.txt | |
file_sizes: | |
name: Check file sizes | |
needs: setup | |
if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- name: Check file sizes | |
run: | | |
touch file_size_report.txt | |
git diff --diff-filter=d --name-only ${{ needs.setup.outputs.commit-range }} > git.diff | |
while read line; do | |
find "$line" -type f -size +${{ env.MAX_FILE_SIZE }} >> file_size_report.txt | |
done < git.diff | |
if [[ -s file_size_report.txt ]]; then | |
echo "Files larger than ${{ env.MAX_FILE_SIZE }} found" | |
cat file_size_report.txt | |
exit 1 | |
fi | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() }} | |
with: | |
name: 'File size report' | |
path: file_size_report.txt | |
# Planemo test the changed repositories, each chunk creates an artifact | |
# containing HTML and JSON reports for the executed tests | |
test: | |
name: Test tools | |
needs: setup | |
if: ${{ needs.setup.outputs.repository-list != '' }} | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
chunk: ${{ fromJson(needs.setup.outputs.chunk-list) }} | |
python-version: ['3.10'] | |
services: | |
postgres: | |
image: postgres:11 | |
env: | |
POSTGRES_USER: postgres | |
POSTGRES_PASSWORD: postgres | |
POSTGRES_DB: postgres | |
ports: | |
- 5432:5432 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Cache .planemo | |
uses: actions/cache@v4 | |
id: cache-planemo | |
with: | |
path: ~/.planemo | |
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Get number of CPU cores | |
uses: SimenB/github-actions-cpu-cores@v2 | |
id: cpu-cores | |
- name: Clean dotnet folder for space | |
run: rm -Rf /usr/share/dotnet | |
# the following 2 steps are needed for testing the scripting tool | |
- name: Install Apptainer's singularity | |
uses: eWaterCycle/setup-apptainer@v2 | |
- name: Symlink singularity | |
run: ln -s $(which apptainer) $(dirname apptainer)/singularity | |
- name: Install containers | |
run: | | |
echo "${{ needs.setup.outputs.repository-list }}" > repository_list.txt | |
if grep -q scripting repository_list.txt; then | |
singularity pull --dir /tmp docker://continuumio/anaconda3 | |
singularity pull --dir /tmp docker://rocker/tidyverse | |
fi | |
# Start OMERO | |
- name: Start OMERO | |
uses: sudo-bot/action-docker-compose@latest | |
with: | |
cli-args: "-f .github/omero-docker-compose.yml up -d" | |
# Setup environment | |
- name: Set up Python environment for omero-py | |
uses: actions/setup-python@v4 | |
with: | |
python-version: '3.11.0' | |
# Upload a dummy dataset in OMERO | |
- name: Install dependencies and upload a OMERO dummy dataset | |
run: | | |
echo "Waiting for OMERO to be ready..." | |
sleep 60 | |
pip install https://github.com/glencoesoftware/zeroc-ice-py-linux-x86_64/releases/download/20240202/zeroc_ice-3.6.5-cp311-cp311-manylinux_2_28_x86_64.whl | |
pip install omero-py==5.19.4 | |
omero login -s localhost -u root -w omero -p 6064 | |
PID=$(omero obj new Project name='test_prj') | |
DID=$(omero obj new Dataset name='test_dts') | |
omero obj new ProjectDatasetLink parent=$PID child=$DID | |
omero import -d $DID .github/dummy-dts-omero | |
omero tag create --name test_tag --desc 'description of my_tag' | |
omero tag link Image:1 1 | |
echo "Created the dummy dataset into OMERO" | |
DID_HCS=$(omero obj new Dataset name='test_hcs_dts') | |
omero import -d $DID_HCS .github/dummy-hcs-omero | |
echo "Created the hcs dummy dataset into OMERO" | |
# download or create large test data via script | |
- name: Create test data | |
run: | | |
set -x | |
pip install planemo > /dev/null | |
echo '${{ needs.setup.outputs.repository-list }}' > repository_list.txt | |
mapfile -t REPO_ARRAY < repository_list.txt | |
planemo ci_find_tools --chunk_count "${{ needs.setup.outputs.chunk-count }}" --chunk "${{ matrix.chunk }}" --output tool_list_chunk.txt "${REPO_ARRAY[@]}" | |
cat tool_list_chunk.txt | sed 's@/[^/]*$@@' | sort -i > repository_list_chunk.txt | |
while read repo | |
do | |
echo "executing test-data.sh for $repo" | |
if [ -x $repo/test-data.sh ]; then | |
cd $repo | |
./test-data.sh | |
fi | |
done < repository_list_chunk.txt | |
- name: Planemo test | |
uses: galaxyproject/planemo-ci-action@v1 | |
id: test | |
with: | |
mode: test | |
repository-list: ${{ needs.setup.outputs.repository-list }} | |
galaxy-fork: ${{ env.GALAXY_FORK }} | |
galaxy-branch: ${{ env.GALAXY_BRANCH }} | |
additional-planemo-options: --docker_run_extra_arguments '--add-host=host.docker.internal:host-gateway' | |
chunk: ${{ matrix.chunk }} | |
chunk-count: ${{ needs.setup.outputs.chunk-count }} | |
galaxy-slots: ${{ steps.cpu-cores.outputs.count }} | |
# Limit each test to 15 minutes | |
test_timeout: 1800 | |
- uses: actions/upload-artifact@v4 | |
with: | |
name: 'Tool test output ${{ matrix.chunk }}' | |
path: upload | |
# - combine the results of the test chunks (which will never fail due | |
# to `|| true`) and create a global test report as json and html which | |
# is provided as artifact | |
# - check if any tool test actually failed (by lookup in the combined json) | |
# and fail this step if this is the case | |
combine_outputs: | |
name: Combine chunked test results | |
needs: [setup, test] | |
if: ${{ always() && needs.setup.outputs.repository-list != '' }} | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
python-version: ['3.10'] | |
steps: | |
- uses: actions/download-artifact@v4 | |
with: | |
path: artifacts | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Combine outputs | |
uses: galaxyproject/planemo-ci-action@v1 | |
id: combine | |
with: | |
mode: combine | |
html-report: true | |
markdown-report: true | |
- uses: actions/upload-artifact@v4 | |
with: | |
name: 'All tool test results' | |
path: upload | |
- run: cat upload/tool_test_output.md >> $GITHUB_STEP_SUMMARY | |
- name: Check outputs | |
uses: galaxyproject/planemo-ci-action@v1 | |
id: check | |
with: | |
mode: check | |
- name: Check if all test chunks succeeded | |
run: | | |
NFILES=$(ls artifacts/ | grep "Tool test output" | wc -l) | |
if [[ "${{ needs.setup.outputs.chunk-count }}" != "$NFILES" ]]; then | |
exit 1 | |
fi | |
# deploy the tools to the toolsheds (first TTS for testing) | |
deploy: | |
name: Deploy | |
needs: [setup, lint, combine_outputs] | |
if: ${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'Helmholtz-UFZ' }} | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
python-version: ['3.10'] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache .cache/pip | |
uses: actions/cache@v4 | |
id: cache-pip | |
with: | |
path: ~/.cache/pip | |
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} | |
- name: Deploy on testtoolshed | |
uses: galaxyproject/planemo-ci-action@v1 | |
with: | |
mode: deploy | |
repository-list: ${{ needs.setup.outputs.repository-list }} | |
shed-target: testtoolshed | |
shed-key: ${{ secrets.TTS_API_KEY }} | |
continue-on-error: true | |
- name: Deploy on toolshed | |
uses: galaxyproject/planemo-ci-action@v1 | |
with: | |
mode: deploy | |
repository-list: ${{ needs.setup.outputs.repository-list }} | |
shed-target: toolshed | |
shed-key: ${{ secrets.TS_API_KEY }} | |
deploy-report: | |
name: Report deploy status | |
needs: [deploy] | |
if: ${{ always() && needs.deploy.result != 'success' && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'Helmholtz-UFZ' }} | |
runs-on: ubuntu-latest | |
steps: | |
# report to the PR if deployment failed | |
- name: Get PR object | |
uses: 8BitJonny/[email protected] | |
id: getpr | |
with: | |
sha: ${{ github.event.after }} | |
- name: Create comment | |
uses: peter-evans/create-or-update-comment@v4 | |
with: | |
token: ${{ secrets.PAT }} | |
issue-number: ${{ steps.getpr.outputs.number }} | |
body: | | |
Attention: deployment ${{ needs.deploy.result }}! | |
https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
determine-success: | |
name: Check workflow success | |
needs: [setup, lint, flake8, lintr, file_sizes, combine_outputs] | |
if: ${{ always() && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/main' }} | |
runs-on: ubuntu-latest | |
steps: | |
- name: Check tool lint status | |
if: ${{ needs.lint.result != 'success' && needs.flake8.result != 'skipped' }} | |
run: exit 1 | |
- name: Indicate Python script lint status | |
if: ${{ needs.flake8.result != 'success' && needs.flake8.result != 'skipped' }} | |
run: exit 1 | |
- name: Indicate R script lint status | |
if: ${{ needs.lintr.result != 'success' && needs.lintr.result != 'skipped' }} | |
run: exit 1 | |
- name: Indicate file size check status | |
if: ${{ needs.file_sizes.result != 'success' && needs.file_sizes.result != 'skipped' }} | |
run: exit 1 | |
- name: Check tool test status | |
if: ${{ needs.combine_outputs.result != 'success' && needs.combine_outputs.result != 'skipped' }} | |
run: exit 1 |