-
Notifications
You must be signed in to change notification settings - Fork 2
137 lines (119 loc) · 4.02 KB
/
test_and_deploy.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
name: tests
on:
push:
branches:
- "main"
tags:
- "*"
pull_request:
workflow_dispatch:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
linting:
name: Linting
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/lint@v2
manifest:
name: Check manifest
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/check_manifest@v2
test:
needs: [linting, manifest]
name: ${{ matrix.os }} py${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
# Run tests on ubuntu across all supported versions
python-version: ["3.9", "3.10"]
os: [ubuntu-latest]
# Include a Windows test and old/new Mac runs
include:
- os: macos-13
python-version: "3.10"
- os: macos-latest
python-version: "3.10"
- os: windows-latest
python-version: "3.10"
steps:
# Cache atlases
- name: Cache brainglobe directory
uses: actions/cache@v3
with:
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually
~/.brainglobe
!~/.brainglobe/atlas.tar.gz
key: atlases-models
fail-on-cache-miss: true
enableCrossOsArchive: true
# install additional Macos dependencies
- name: install HDF5 libraries (needed on M1 Macs only)
if: matrix.os == 'macos-latest'
run: |
brew install hdf5
# Cache cellfinder workflow data
- name: Cache data for cellfinder workflow tests
uses: actions/cache@v3
with:
path: "~/.brainglobe-tests"
key: cellfinder-test-data
fail-on-cache-miss: true
enableCrossOsArchive: true
# Run tests
- uses: neuroinformatics-unit/actions/test@v2
with:
python-version: ${{ matrix.python-version }}
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
benchmarks:
name: Check benchmarks
runs-on: ubuntu-latest
# Set shell in login mode as global setting for the job
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python-version: ["3.10"]
steps:
- name: Checkout brainglobe-workflows repository
uses: actions/checkout@v4
- name: Create and activate conda environment # we need conda for asv management of environments
uses: conda-incubator/[email protected] # see https://github.com/conda-incubator/setup-miniconda/issues/261
with:
miniconda-version: py310_24.1.2-0 # we need conda<24.3, see https://github.com/airspeed-velocity/asv/pull/1397
python-version: ${{ matrix.python-version }}
activate-environment: asv-only
- name: Install asv
run: |
pip install --upgrade pip
pip install asv
- name: Run asv check with pip dependencies
working-directory: ${{ github.workspace }}/benchmarks
run: |
# check benchmarks with pip dependencies
asv check -v --config $GITHUB_WORKSPACE/benchmarks/asv.pip.conf.json
- name: Run asv check with latest-github dependencies
working-directory: ${{ github.workspace }}/benchmarks
run: |
# check benchmarks with latest-github dependencies
asv check -v --config $GITHUB_WORKSPACE/benchmarks/asv.latest-github.conf.json
build_sdist_wheels:
name: Build source distribution
needs: [test]
if: github.event_name == 'push' && github.ref_type == 'tag'
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/build_sdist_wheels@v2
upload_all:
name: Publish build distributions
needs: [build_sdist_wheels]
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref_type == 'tag'
steps:
- uses: neuroinformatics-unit/actions/upload_pypi@v2
with:
secret-pypi-key: ${{ secrets.TWINE_API_KEY }}