-
Notifications
You must be signed in to change notification settings - Fork 6
503 lines (486 loc) · 17.7 KB
/
pr.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
name: Galaxy Tool Linting and Tests for push and PR
on:
pull_request:
paths-ignore:
- 'deprecated/**'
- 'docs/**'
- '*'
push:
branches:
- main
- master
paths-ignore:
- 'deprecated/**'
- 'docs/**'
- '*'
env:
GALAXY_FORK: galaxyproject
GALAXY_BRANCH: release_24.1
MAX_CHUNKS: 4
MAX_FILE_SIZE: 1M
concurrency:
# Group runs by PR, but keep runs on the default branch separate
# because we do not want to cancel ToolShed uploads
group: pr-${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main') && github.run_number || github.ref }}
cancel-in-progress: true
jobs:
# the setup job does two things:
# 1. cache the pip cache and .planemo
# 2. determine the list of changed repositories
# it produces one artifact which contains
# - a file with the latest SHA from the chosen branch of the Galaxy repo
# - a file containing the list of changed repositories
# which are needed in subsequent steps.
setup:
name: Setup cache and determine changed repositories
runs-on: ubuntu-latest
outputs:
galaxy-head-sha: ${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
repository-list: ${{ steps.discover.outputs.repository-list }}
tool-list: ${{ steps.discover.outputs.tool-list }}
chunk-count: ${{ steps.discover.outputs.chunk-count }}
chunk-list: ${{ steps.discover.outputs.chunk-list }}
commit-range: ${{ steps.discover.outputs.commit-range }}
strategy:
matrix:
python-version: ['3.10']
steps:
- name: Print github context properties
run: |
echo 'event: ${{ github.event_name }}'
echo 'sha: ${{ github.sha }}'
echo 'ref: ${{ github.ref }}'
echo 'head_ref: ${{ github.head_ref }}'
echo 'base_ref: ${{ github.base_ref }}'
echo 'event.before: ${{ github.event.before }}'
echo 'event.after: ${{ github.event.after }}'
echo 'repository_owner: ${{ github.repository_owner }}'
- name: Determine latest commit in the Galaxy repo
id: get-galaxy-sha
run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" >> $GITHUB_OUTPUT
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
- name: Cache .planemo
uses: actions/cache@v4
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
# Install the `wheel` package so that when installing other packages which
# are not available as wheels, pip will build a wheel for them, which can be cached.
- name: Install wheel
run: pip install wheel
- name: Install flake8
run: pip install flake8 flake8-import-order
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks
uses: galaxyproject/planemo-ci-action@v1
id: discover
with:
create-cache: ${{ steps.cache-pip.outputs.cache-hit != 'true' || steps.cache-planemo.outputs.cache-hit != 'true' }}
galaxy-fork: ${{ env.GALAXY_FORK }}
galaxy-branch: ${{ env.GALAXY_BRANCH }}
max-chunks: ${{ env.MAX_CHUNKS }}
python-version: ${{ matrix.python-version }}
- name: Show commit range
run: echo '${{ steps.discover.outputs.commit-range }}'
- name: Show repository list
run: echo '${{ steps.discover.outputs.repository-list }}'
- name: Show tool list
run: echo '${{ steps.discover.outputs.tool-list }}'
- name: Show chunks
run: |
echo 'Using ${{ steps.discover.outputs.chunk-count }} chunks (${{ steps.discover.outputs.chunk-list }})'
# Planemo lint the changed repositories
lint:
name: Lint tool-list
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' || needs.setup.outputs.tool-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Set fail level for pull request
if: ${{ github.event_name == 'pull_request' }}
run:
echo "FAIL_LEVEL=warn" >> "$GITHUB_ENV"
- name: Set fail level for merge
if: ${{ github.event_name != 'pull_request' }}
run:
echo "FAIL_LEVEL=error" >> "$GITHUB_ENV"
- name: Planemo lint
uses: galaxyproject/planemo-ci-action@v1
id: lint
with:
mode: lint
fail-level: ${{ env.FAIL_LEVEL }}
repository-list: ${{ needs.setup.outputs.repository-list }}
tool-list: ${{ needs.setup.outputs.tool-list }}
- uses: actions/upload-artifact@v4
if: ${{ failure() }}
with:
name: 'Tool linting output'
path: lint_report.txt
# flake8 of Python scripts in the changed repositories
flake8:
name: Lint Python scripts
needs: setup
if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Install flake8
run: pip install flake8 flake8-import-order
- name: Flake8
run: echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' flake8 --output-file pylint_report.txt --tee
- uses: actions/upload-artifact@v4
if: ${{ failure() }}
with:
name: 'Python linting output'
path: pylint_report.txt
lintr:
name: Lint R scripts
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04]
r-version: ['release']
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: r-lib/actions/setup-r@v2
with:
r-version: ${{ matrix.r-version }}
- name: Cache R packages
uses: actions/cache@v4
with:
path: ${{ env.R_LIBS_USER }}
key: r_cache_${{ matrix.os }}_${{ matrix.r-version }}
- name: Install packages
uses: r-lib/actions/setup-r-dependencies@v2
with:
packages: |
any::argparse
any::styler
- name: lintr
run: |
set -eo pipefail
echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' -n 1 ./.github/styler.R --dry off
git status
git diff --exit-code | tee rlint_report.txt
- uses: actions/upload-artifact@v4
if: ${{ failure() }}
with:
name: 'R linting output'
path: rlint_report.txt
file_sizes:
name: Check file sizes
needs: setup
if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check file sizes
run: |
touch file_size_report.txt
git diff --diff-filter=d --name-only ${{ needs.setup.outputs.commit-range }} > git.diff
while read line; do
find "$line" -type f -size +${{ env.MAX_FILE_SIZE }} >> file_size_report.txt
done < git.diff
if [[ -s file_size_report.txt ]]; then
echo "Files larger than ${{ env.MAX_FILE_SIZE }} found"
cat file_size_report.txt
exit 1
fi
- uses: actions/upload-artifact@v4
if: ${{ failure() }}
with:
name: 'File size report'
path: file_size_report.txt
# Planemo test the changed repositories, each chunk creates an artifact
# containing HTML and JSON reports for the executed tests
test:
name: Test tools
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
chunk: ${{ fromJson(needs.setup.outputs.chunk-list) }}
python-version: ['3.10']
services:
postgres:
image: postgres:11
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Cache .planemo
uses: actions/cache@v4
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Get number of CPU cores
uses: SimenB/github-actions-cpu-cores@v2
id: cpu-cores
- name: Clean dotnet folder for space
run: rm -Rf /usr/share/dotnet
# the following 2 steps are needed for testing the scripting tool
- name: Install Apptainer's singularity
uses: eWaterCycle/setup-apptainer@v2
- name: Symlink singularity
run: ln -s $(which apptainer) $(dirname apptainer)/singularity
- name: Install containers
run: |
echo "${{ needs.setup.outputs.repository-list }}" > repository_list.txt
if grep -q scripting repository_list.txt; then
singularity pull --dir /tmp docker://continuumio/anaconda3
singularity pull --dir /tmp docker://rocker/tidyverse
fi
# Start OMERO
- name: Start OMERO
uses: sudo-bot/action-docker-compose@latest
with:
cli-args: "-f .github/omero-docker-compose.yml up -d"
# Setup environment
- name: Set up Python environment for omero-py
uses: actions/setup-python@v4
with:
python-version: '3.11.0'
# Upload a dummy dataset in OMERO
- name: Install dependencies and upload a OMERO dummy dataset
run: |
echo "Waiting for OMERO to be ready..."
sleep 60
pip install https://github.com/glencoesoftware/zeroc-ice-py-linux-x86_64/releases/download/20240202/zeroc_ice-3.6.5-cp311-cp311-manylinux_2_28_x86_64.whl
pip install omero-py==5.19.4
omero login -s localhost -u root -w omero -p 6064
PID=$(omero obj new Project name='test_prj')
DID=$(omero obj new Dataset name='test_dts')
omero obj new ProjectDatasetLink parent=$PID child=$DID
omero import -d $DID .github/dummy-dts-omero
omero tag create --name test_tag --desc 'description of my_tag'
omero tag link Image:1 1
echo "Created the dummy dataset into OMERO"
# download or create large test data via script
- name: Create test data
run: |
set -x
pip install planemo > /dev/null
echo '${{ needs.setup.outputs.repository-list }}' > repository_list.txt
mapfile -t REPO_ARRAY < repository_list.txt
planemo ci_find_tools --chunk_count "${{ needs.setup.outputs.chunk-count }}" --chunk "${{ matrix.chunk }}" --output tool_list_chunk.txt "${REPO_ARRAY[@]}"
cat tool_list_chunk.txt | sed 's@/[^/]*$@@' | sort -i > repository_list_chunk.txt
while read repo
do
echo "executing test-data.sh for $repo"
if [ -x $repo/test-data.sh ]; then
cd $repo
./test-data.sh
fi
done < repository_list_chunk.txt
- name: Planemo test
uses: galaxyproject/planemo-ci-action@v1
id: test
with:
mode: test
repository-list: ${{ needs.setup.outputs.repository-list }}
galaxy-fork: ${{ env.GALAXY_FORK }}
galaxy-branch: ${{ env.GALAXY_BRANCH }}
additional-planemo-options: --docker_run_extra_arguments '--add-host=host.docker.internal:host-gateway'
chunk: ${{ matrix.chunk }}
chunk-count: ${{ needs.setup.outputs.chunk-count }}
galaxy-slots: ${{ steps.cpu-cores.outputs.count }}
# Limit each test to 15 minutes
test_timeout: 1800
- uses: actions/upload-artifact@v4
with:
name: 'Tool test output ${{ matrix.chunk }}'
path: upload
# - combine the results of the test chunks (which will never fail due
# to `|| true`) and create a global test report as json and html which
# is provided as artifact
# - check if any tool test actually failed (by lookup in the combined json)
# and fail this step if this is the case
combine_outputs:
name: Combine chunked test results
needs: [setup, test]
if: ${{ always() && needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- uses: actions/download-artifact@v4
with:
path: artifacts
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Combine outputs
uses: galaxyproject/planemo-ci-action@v1
id: combine
with:
mode: combine
html-report: true
markdown-report: true
- uses: actions/upload-artifact@v4
with:
name: 'All tool test results'
path: upload
- run: cat upload/tool_test_output.md >> $GITHUB_STEP_SUMMARY
- name: Check outputs
uses: galaxyproject/planemo-ci-action@v1
id: check
with:
mode: check
- name: Check if all test chunks succeeded
run: |
NFILES=$(ls artifacts/ | grep "Tool test output" | wc -l)
if [[ "${{ needs.setup.outputs.chunk-count }}" != "$NFILES" ]]; then
exit 1
fi
# deploy the tools to the toolsheds (first TTS for testing)
deploy:
name: Deploy
needs: [setup, lint, combine_outputs]
if: ${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'Helmholtz-UFZ' }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v4
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Deploy on testtoolshed
uses: galaxyproject/planemo-ci-action@v1
with:
mode: deploy
repository-list: ${{ needs.setup.outputs.repository-list }}
shed-target: testtoolshed
shed-key: ${{ secrets.TTS_API_KEY }}
continue-on-error: true
- name: Deploy on toolshed
uses: galaxyproject/planemo-ci-action@v1
with:
mode: deploy
repository-list: ${{ needs.setup.outputs.repository-list }}
shed-target: toolshed
shed-key: ${{ secrets.TS_API_KEY }}
deploy-report:
name: Report deploy status
needs: [deploy]
if: ${{ always() && needs.deploy.result != 'success' && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'Helmholtz-UFZ' }}
runs-on: ubuntu-latest
steps:
# report to the PR if deployment failed
- name: Get PR object
uses: 8BitJonny/[email protected]
id: getpr
with:
sha: ${{ github.event.after }}
- name: Create comment
uses: peter-evans/create-or-update-comment@v4
with:
token: ${{ secrets.PAT }}
issue-number: ${{ steps.getpr.outputs.number }}
body: |
Attention: deployment ${{ needs.deploy.result }}!
https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
determine-success:
name: Check workflow success
needs: [setup, lint, flake8, lintr, file_sizes, combine_outputs]
if: ${{ always() && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/main' }}
runs-on: ubuntu-latest
steps:
- name: Check tool lint status
if: ${{ needs.lint.result != 'success' && needs.flake8.result != 'skipped' }}
run: exit 1
- name: Indicate Python script lint status
if: ${{ needs.flake8.result != 'success' && needs.flake8.result != 'skipped' }}
run: exit 1
- name: Indicate R script lint status
if: ${{ needs.lintr.result != 'success' && needs.lintr.result != 'skipped' }}
run: exit 1
- name: Indicate file size check status
if: ${{ needs.file_sizes.result != 'success' && needs.file_sizes.result != 'skipped' }}
run: exit 1
- name: Check tool test status
if: ${{ needs.combine_outputs.result != 'success' && needs.combine_outputs.result != 'skipped' }}
run: exit 1