diff --git a/.circleci/config.yml b/.circleci/config.yml index 6d842d2389..302a7d3112 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,524 +1,14 @@ -# reusable anchors -_machine_defaults: &machine_defaults - environment: - TZ: "/usr/share/zoneinfo/America/Los_Angeles" - SCRATCH: "/scratch" - machine: - image: ubuntu-2204:current - docker_layer_caching: true - working_directory: /tmp/src/sdcflows - resource_class: large - -_python_defaults: &python_defaults - docker: - - image: cimg/python:3.10.9 - working_directory: /tmp/src/sdcflows - -_docker_auth: &docker_auth - name: Docker authentication - command: | - if [[ -n $DOCKER_PAT ]]; then - echo "$DOCKER_PAT" | docker login -u $DOCKER_USER --password-stdin - fi - -_setup_docker_registry: &setup_docker_registry - name: Set up Docker registry - command: | - if [[ -f /tmp/images/registry.tar.gz ]]; then - echo "Loading saved registry image" - docker load < /tmp/images/registry.tar.gz - else - echo "Pulling registry image from DockerHub" - docker pull registry:2 - fi - docker run -d -p 5000:5000 --restart=always --name=registry \ - -v /tmp/docker:/var/lib/registry registry:2 - -_pull_from_registry: &pull_from_registry - name: Pull and tag image from local registry - command: | - docker pull localhost:5000/sdcflows - docker tag localhost:5000/sdcflows nipreps/sdcflows:latest - version: 2.1 -orbs: - docker: circleci/docker@2.2.0 - codecov: codecov/codecov@3.2.4 jobs: - cache_test_data: - docker: # executor type - - image: nipreps/miniconda:py39_2209.01 - auth: - username: $DOCKER_USER - password: $DOCKER_PAT - - working_directory: /tmp/data - environment: - - TEMPLATEFLOW_HOME: /tmp/templateflow - steps: - - checkout: - path: /tmp/src/sdcflows - - - run: - name: Configure git (pacify datalad) - command: | - git config --global user.name "First Last" - git config --global user.email "email@domain.com" - - - restore_cache: - keys: - - data-v6-{{ .Branch }}-{{ .Revision }} - - data-v6--{{ .Revision }} - - data-v6-{{ .Branch }}- - - data-v6-main- - - data-v6- - - - run: - name: Ensure some templates are cached - command: | - python -c "from templateflow import api as tfapi; \ - tfapi.get('MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask'); \ - tfapi.get('MNI152NLin2009cAsym', resolution=2, desc='fMRIPrep', suffix='boldref');" - - - run: - name: Install ds001600 - command: | - datalad install -r https://github.com/nipreps-data/ds001600.git - datalad update -r --merge -d ds001600/ - datalad get -r -d ds001600/ ds001600/sub-1/ - - - run: - name: Install HCP/sub-101006 - command: | - datalad install -r https://github.com/nipreps-data/HCP101006.git - datalad update -r --merge -d HCP101006/ - datalad get -r -d HCP101006 - - - run: - name: Install ds001771 - command: | - datalad install -r https://github.com/nipreps-data/ds001771.git - datalad update -r --merge -d ds001771/ - datalad get -r -d ds001771/ ds001771/sub-36/* - datalad get -r -d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/* - - - run: - name: Install ds000206 - command: | - datalad install -r https://github.com/nipreps-data/ds000206.git - datalad update -r --merge -d ds000206/ - datalad get -r -d ds000206/ ds000206/sub-05/* - - - run: - name: Install ds000054 - command: | - datalad install -r https://github.com/nipreps-data/ds000054.git - datalad update -r --merge -d ds000054/ - datalad get -r -J 2 -d ds000054/ ds000054/* ds000054/derivatives/* - - - run: - name: Install Brain extraction tests - command: | - datalad install -r https://gin.g-node.org/nipreps-data/brain-extraction-tests - datalad update --merge -d brain-extraction-tests/ - datalad get -r -J 2 -d brain-extraction-tests - - - run: - name: Install HCPh fieldmaps - command: | - datalad install -r https://github.com/nipreps-data/hcph-pilot_fieldmaps.git - datalad update -r --merge -d hcph-pilot_fieldmaps/ - datalad get -r -J 2 -d hcph-pilot_fieldmaps/ hcph-pilot_fieldmaps/* - - - save_cache: - key: data-v6-{{ .Branch }}-{{ .Revision }} - paths: - - /tmp/data - - /tmp/templateflow - - - restore_cache: - keys: - - freesurfer-v0-{{ .BuildNum }} - - freesurfer-v0- - - run: - name: Pull FreeSurfer down - command: | - if [[ ! -d /tmp/freesurfer ]]; then - curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz | tar zxv --no-same-owner -C /tmp \ - --exclude='freesurfer/diffusion' \ - --exclude='freesurfer/docs' \ - --exclude='freesurfer/fsfast' \ - --exclude='freesurfer/lib/cuda' \ - --exclude='freesurfer/lib/qt' \ - --exclude='freesurfer/matlab' \ - --exclude='freesurfer/mni/share/man' \ - --exclude='freesurfer/subjects/fsaverage_sym' \ - --exclude='freesurfer/subjects/fsaverage3' \ - --exclude='freesurfer/subjects/fsaverage4' \ - --exclude='freesurfer/subjects/cvs_avg35' \ - --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ - --exclude='freesurfer/subjects/bert' \ - --exclude='freesurfer/subjects/lh.EC_average' \ - --exclude='freesurfer/subjects/rh.EC_average' \ - --exclude='freesurfer/subjects/sample-*.mgz' \ - --exclude='freesurfer/subjects/V1_average' \ - --exclude='freesurfer/trctrain' - echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > /tmp/freesurfer/license.txt - else - echo "FreeSurfer was cached." - circleci step halt - fi - - save_cache: - key: freesurfer-v0-{{ .BuildNum }} - paths: - - /tmp/freesurfer - - build_n_pytest: - <<: *machine_defaults - working_directory: /tmp/tests + empty: + machine: + image: ubuntu-2204:current steps: - - restore_cache: - keys: - - build-v2-{{ .Branch }}-{{ epoch }} - - build-v2-{{ .Branch }}- - - build-v2-master- - - build-v2- - paths: - - /tmp/docker - - docker/install-docker-credential-helper - - run: *docker_auth - - run: *setup_docker_registry - - run: - name: Pull Ubuntu/jammy image - command: | - set +e - docker pull localhost:5000/ubuntu - success=$? - set -e - if [[ "$success" = "0" ]]; then - echo "Pulling from local registry" - docker tag localhost:5000/ubuntu ubuntu:jammy - else - echo "Pulling from Docker Hub" - docker pull ubuntu:jammy - docker tag ubuntu:jammy localhost:5000/ubuntu - docker push localhost:5000/ubuntu - fi - - run: - name: Pull SDCFlows Docker image - command: | - set +e - docker pull localhost:5000/sdcflows - success=$? - set -e - if [[ "$success" = "0" ]]; then - echo "Pulling from local registry" - docker tag localhost:5000/sdcflows nipreps/sdcflows:latest - docker tag localhost:5000/sdcflows nipreps/sdcflows - else - echo "Pulling from Docker Hub" - docker pull nipreps/sdcflows:latest - fi - - checkout: - path: /tmp/src/sdcflows - - run: - name: Build Docker image - working_directory: /tmp/src/sdcflows - no_output_timeout: 60m - command: | - export PY3=$( pyenv versions | awk '/^\* 3/ { print $2 }' ) - pyenv local $PY3 - python3 -m pip install -U build hatch hatchling pip twine docutils - - # Get version, update files. - THISVERSION=$( python3 -m hatch version | tail -n1 | xargs ) - if [[ ${THISVERSION:0:1} == "0" ]] ; then - echo "WARNING: latest git tag could not be found" - echo "Please, make sure you fetch all tags from upstream with" - echo "the command ``git fetch --tags --verbose`` and push" - echo "them to your fork with ``git push origin --tags``" - fi - # Build docker image - docker build --rm \ - --cache-from=nipreps/sdcflows \ - -t nipreps/sdcflows:latest \ - --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ - --build-arg VCS_REF=`git rev-parse --short HEAD` \ - --build-arg VERSION="${CIRCLE_TAG:-$THISVERSION}" . \ - | tee build-output.log - echo "${CIRCLE_TAG:-$THISVERSION}" >> /tmp/.local-version.txt - - run: - name: Check Docker image - working_directory: /tmp/src/sdcflows - command: | - export PY3=$( pyenv versions | awk '/^\* 3/ { print $2 }' ) - pyenv local $PY3 - # Get version, update files. - THISVERSION=$( python3 -m hatch version | tail -n1 | xargs ) - BUILT_VERSION=$( docker run --rm --entrypoint=python nipreps/sdcflows:latest -c "import sdcflows; print(sdcflows.__version__)" ) - BUILT_VERSION=${BUILT_VERSION%$'\r'} - echo "VERSION: \"$THISVERSION\"" - echo "BUILT: \"$BUILT_VERSION\"" - set -e - test "$BUILT_VERSION" = "$THISVERSION" - - run: - name: Docker push to local registry - no_output_timeout: 40m - command: | - docker tag nipreps/sdcflows:latest localhost:5000/sdcflows - docker push localhost:5000/sdcflows - - run: - name: Docker registry garbage collection - command: | - docker exec -it registry /bin/registry garbage-collect --delete-untagged \ - /etc/docker/registry/config.yml - - save_cache: - key: build-v2-{{ .Branch }}-{{ epoch }} - paths: - - /tmp/docker - - - restore_cache: - keys: - - freesurfer-v0-{{ .BuildNum }} - - freesurfer-v0- - - restore_cache: - keys: - - data-v6-{{ .Branch }}-{{ .Revision }} - - data-v6--{{ .Revision }} - - data-v6-{{ .Branch }}- - - data-v6-main- - - data-v6- - - - restore_cache: - keys: - - workdir-v3-{{ .Branch }}- - - workdir-v3-master- - - workdir-v3- - - run: - name: Refreshing cached intermediate results - working_directory: /tmp/src/sdcflows - command: | - COMMIT_MSG=$( git log --format=oneline -n 1 $CIRCLE_SHA1 ) - set +e - do_refresh="$( echo "${COMMIT_MSG}" | grep -i -E '\[refresh[ _]?cache\]' )" - set -e - if [[ "x${do_refresh}" = "x" ]]; then - echo "Did not refresh the workdir." - else - wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q \ - -O /tmp/data/workdir.tar.gz "https://files.osf.io/v1/resources/9sy2a/providers/osfstorage/5dcabd60a1cd9e000c751b3c" - rm -rf /tmp/work - mkdir -p /tmp/work - pushd /tmp/work - tar xzfv /tmp/data/workdir.tar.gz --strip 1 - popd - fi - - wipe_dir=$( echo "${COMMIT_MSG}" | sed -n 's/.*\[wipe \([a-zA-Z0-9_\*]*\)\].*/\1/p' ) - if [[ "x${wipe_dir}" != "x" ]]; then - path=/tmp/work/${wipe_dir} - echo "Found tag [wipe ${wipe_dir}] - clearing up $path ..." - rm -rf ${path} - fi - - run: - name: Run tests - no_output_timeout: 2h - command: | - mkdir -p /tmp/work - docker run -it --rm -w /src/sdcflows \ - -e TEST_WORK_DIR=/work \ - -e TEST_DATA_HOME=/data \ - -e TEST_OUTPUT_DIR=/out \ - -e COVERAGE_FILE=/out/.coverage \ - -e FS_LICENSE=/opt/freesurfer/license.txt \ - -v /tmp/data:/data:ro \ - -v /tmp/src:/src \ - -v /tmp/tests:/out \ - -v /tmp/work:/work \ - -v /tmp/freesurfer:/opt/freesurfer:ro \ - -v /tmp/templateflow:/home/sdcflows/.cache/templateflow \ - nipreps/sdcflows:latest \ - pytest -v --junit-xml=/out/pytest.xml \ - --cov sdcflows --cov-report xml:/out/unittests.xml \ - -n auto sdcflows/ - - save_cache: - key: workdir-v3-{{ .Branch }}-{{ .BuildNum }} - paths: - - /tmp/work - - store_artifacts: - path: /tmp/tests - - store_test_results: - path: /tmp/tests - - - codecov/upload: - file: /tmp/tests/unittests.xml - flags: unittests - - build_docs: - <<: *python_defaults - working_directory: /tmp/gh-pages - environment: - - FSLOUTPUTTYPE: NIFTI - - SUBJECTS_DIR: /tmp/subjects - steps: - - checkout - - run: - name: Create subjects folder - command: mkdir -p $SUBJECTS_DIR - - run: - name: Install Graphviz & pandoc - command: | - sudo apt-get update -y - sudo apt-get install -y --no-install-recommends graphviz pandoc texlive - - run: - name: Install deps - command: | - python -m venv /tmp/venv - source /tmp/venv/bin/activate - python -m pip install -U build hatch hatchling pip twine docutils - python -m pip install .[docs] - - run: - name: Build only this commit - command: | - source /tmp/venv/bin/activate - python -m hatch version | tail -n1 | xargs - BRANCH=$( echo $CIRCLE_BRANCH | sed 's+/+_+g' ) - python -c "from templateflow.api import get; get('MNI152NLin2009cAsym', desc='brain', resolution=1, suffix='T1w')" - make -C docs SPHINXOPTS="-W -v" BUILDDIR="$HOME/docs" OUTDIR=${CIRCLE_TAG:-$BRANCH} html - - store_artifacts: - path: ~/docs/ - - deploy_docker: - <<: *machine_defaults - steps: - - restore_cache: - keys: - - build-v2-{{ .Branch }}-{{ epoch }} - - build-v2-{{ .Branch }}- - - build-v2-master- - - build-v2- - paths: - - /tmp/docker - - docker/install-docker-credential-helper - - run: *docker_auth - - run: *setup_docker_registry - - run: *pull_from_registry - - run: - name: Deploy to Docker Hub - no_output_timeout: 40m - command: | - if [[ -n "$DOCKER_PAT" ]]; then - docker push nipreps/sdcflows:latest - docker tag nipreps/sdcflows nipreps/sdcflows:$CIRCLE_TAG - docker push nipreps/sdcflows:$CIRCLE_TAG - fi - - test_package: - <<: *python_defaults - steps: - - checkout - - run: - name: Prepare environment & build - command: | - python -m venv /tmp/buildenv - source /tmp/buildenv/bin/activate - python3 -m pip install -U build hatch hatchling pip twine docutils - python3 -m build - twine check dist/sdcflows* - - store_artifacts: - path: /tmp/src/sdcflows/dist - - persist_to_workspace: - root: /tmp/src/sdcflows - paths: dist - - - run: - name: Validate version - command: | - source /tmp/buildenv/bin/activate - THISVERSION=$( python -m hatch version | tail -n1 | xargs ) - python -m pip install dist/*.whl - mkdir empty - cd empty - INSTALLED=$( python -c 'import sdcflows; print(sdcflows.__version__)' ) - test "${CIRCLE_TAG:-$THISVERSION}" == "$INSTALLED" - - deploy_pypi: - <<: *python_defaults - steps: - - attach_workspace: - at: /tmp/src/sdcflows - - run: - name: Upload to Pypi - command: | - python -m pip install twine - python -m twine check dist/* - python -m twine upload dist/* --non-interactive + - run: echo Not doing nothing. workflows: version: 2 build_deploy: jobs: - - cache_test_data: - context: - - nipreps-common - - fs-license - filters: - branches: - ignore: - - /docs?\/.*/ - tags: - only: /.*/ - - - build_n_pytest: - requires: - - cache_test_data - filters: - branches: - ignore: - - /docs?\/.*/ - tags: - only: /.*/ - - - test_package: - context: - - nipreps-common - filters: - branches: - ignore: - - /docs?\/.*/ - - /tests?\/.*/ - tags: - only: /.*/ - - - deploy_pypi: - context: - - nipreps-common - requires: - - build_docs - - test_package - - build_n_pytest - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - - - deploy_docker: - context: - - nipreps-common - requires: - - deploy_pypi - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - - - build_docs: - filters: - branches: - ignore: - - /tests?\/.*/ - tags: - only: /.*/ + - empty diff --git a/.github/workflows/unittests.yml b/.github/workflows/build-test-publish.yml similarity index 62% rename from .github/workflows/unittests.yml rename to .github/workflows/build-test-publish.yml index 716c30d92c..d3d5a03551 100644 --- a/.github/workflows/unittests.yml +++ b/.github/workflows/build-test-publish.yml @@ -5,6 +5,12 @@ on: pull_request: schedule: - cron: 0 0 * * 0 + # Allow job to be triggered manually from GitHub interface + workflow_dispatch: + +# Force pytest to use color +env: + FORCE_COLOR: true concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -18,7 +24,20 @@ defaults: shell: bash -el {0} jobs: - build-linux: + build-package: + name: Build & inspect package + runs-on: ubuntu-latest + permissions: + attestations: write + id-token: write + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: hynek/build-and-inspect-python-package@v2 + + test: if: "!contains(github.event.head_commit.message, '[skip ci]' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'nipreps/sdcflows'))" runs-on: ubuntu-latest env: @@ -30,11 +49,16 @@ jobs: AFNI_IMSAVE_WARNINGS: NO AFNI_TTATLAS_DATASET: /opt/afni/atlases AFNI_PLUGINPATH: /opt/afni/plugins - ANTSPATH: /opt/ants strategy: - max-parallel: 5 + max-parallel: 6 matrix: python-version: ["3.9", "3.10", "3.11", "3.12"] + marks: ["not slow"] + include: + - python-version: "3.9" + marks: "slow and not veryslow" + - python-version: "3.12" + marks: "veryslow" steps: - uses: actions/cache@v4 @@ -66,19 +90,6 @@ jobs: tcsh @update.afni.binaries -package linux_ubuntu_16_64 -bindir ${AFNI_HOME} fi - - uses: actions/cache@v4 - with: - path: /opt/ants - key: ants-v1 - restore-keys: | - ants-v1 - - name: Install ANTS - run: | - if [[ ! -d "${ANTSPATH}" ]]; then - sudo mkdir -p $ANTSPATH - curl -sSL "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" | sudo tar -xzC $ANTSPATH --strip-components 1 - fi - - name: Git settings (pacify DataLad) run: | git config --global user.name 'NiPreps Bot' @@ -89,11 +100,11 @@ jobs: auto-update-conda: true auto-activate-base: true python-version: ${{ matrix.python-version }} - channels: anaconda,https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/,conda-forge + channels: https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/,conda-forge - uses: actions/cache@v4 id: conda env: - CACHE_NUM: v4 + CACHE_NUM: v5 with: path: | ~/conda_pkgs_dir @@ -105,9 +116,9 @@ jobs: run: | conda install git-annex=*=alldep* pip pip install datalad datalad-osf - - name: Install fsl + - name: Install fsl and ANTs run: | - conda install fsl-fugue fsl-topup + conda install fsl-fugue fsl-topup ants - uses: actions/checkout@v4 - name: Install dependencies timeout-minutes: 5 @@ -128,7 +139,7 @@ jobs: - uses: actions/cache@v4 with: path: ${{ env.TEST_DATA_HOME }} - key: data-cache-v1 + key: data-cache-v2 restore-keys: | data-cache- - name: Install test data @@ -139,28 +150,44 @@ jobs: # ds001600 datalad install -r https://github.com/nipreps-data/ds001600.git datalad update -r --merge -d ds001600/ - datalad get -r -d ds001600/ ds001600/sub-1/ + datalad get -r -J 2 -d ds001600/ ds001600/sub-1/ # HCP/sub-101006 datalad install -r https://github.com/nipreps-data/HCP101006.git datalad update -r --merge -d HCP101006/ - datalad get -r -d HCP101006 + datalad get -r -J 2 -d HCP101006 HCP101006/* # ds001771 datalad install -r https://github.com/nipreps-data/ds001771.git datalad update -r --merge -d ds001771/ - datalad get -r -d ds001771/ ds001771/sub-36/* - datalad get -r -d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/* + datalad get -r -J 2 -d ds001771/ ds001771/sub-36/* + datalad get -r -J 2 -d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/* # ds000054 datalad install -r https://github.com/nipreps-data/ds000054.git datalad update --merge -d ds000054/ datalad get -r -d ds000054/ ds000054/sub-100185/* + datalad get -r -J 2 -d ds000054/ ds000054/derivatives/smriprep-0.6/sub-100185/anat/ # ds000206 datalad install -r https://github.com/nipreps-data/ds000206.git datalad update -r --merge -d ds000206/ - datalad get -r -d ds000206/ ds000206/sub-05/ + datalad get -r -J 2 -d ds000206/ ds000206/sub-05/ + + # Brain extraction tests + datalad install -r https://gin.g-node.org/nipreps-data/brain-extraction-tests + datalad update --merge -d brain-extraction-tests/ + datalad get -r -J 2 -d brain-extraction-tests brain-extraction-tests/* + + # HCPH pilot + datalad install -r https://github.com/nipreps-data/hcph-pilot_fieldmaps.git + datalad update -r --merge -d hcph-pilot_fieldmaps/ + datalad get -r -J 2 -d hcph-pilot_fieldmaps/ hcph-pilot_fieldmaps/* + + - name: Set FreeSurfer variables + run: | + echo "FREESURFER_HOME=$HOME/.cache/freesurfer" >> $GITHUB_ENV + echo "FS_LICENSE=$HOME/.cache/freesurfer/license.txt" >> $GITHUB_ENV - name: Install FreeSurfer's mri_robust_template env: @@ -168,17 +195,40 @@ jobs: run: | curl https://files.osf.io/v1/resources/$MRI_ROBUST_TEMPLATE?direct > mri_robust_template sudo install mri_robust_template /usr/local/bin - mkdir -p $HOME/.cache/freesurfer/ - echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $HOME/.cache/freesurfer/license.txt + mkdir -p $( dirname $FS_LICENSE ) + echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $FS_LICENSE - name: Run pytest with coverage run: | - export LD_LIBRARY_PATH=/usr/lib/fsl/5.0:$LD_LIBRARY_PATH - export PATH=$ANTSPATH:${AFNI_HOME}:/usr/lib/fsl/5.0:$PATH - pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows + export PATH=${AFNI_HOME}:$PATH + export FSLDIR=${CONDA_PREFIX} + pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows \ + --durations=20 --durations-min=10 -m "$MARKS" + env: + MARKS: ${{ matrix.marks }} - uses: codecov/codecov-action@v4 with: file: cov.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} + + publish: + name: Publish released package to pypi.org + environment: release-pypi + if: github.event.action == 'published' + runs-on: ubuntu-latest + needs: [build-package, test] + permissions: + attestations: write + id-token: write + + steps: + - name: Download packages built by build-and-inspect-python-package + uses: actions/download-artifact@v4 + with: + name: Packages + path: dist + + - name: Upload package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 26a7f11751..0000000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,136 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ '*' ] - tags: [ '*' ] - pull_request: - branches: [ master, 'maint/*' ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - job_metadata: - if: github.repository == 'nipreps/sdcflows' - runs-on: ubuntu-latest - outputs: - commit_message: ${{ steps.get_commit_message.outputs.commit_message }} - version: ${{ steps.show_version.outputs.version }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Print head git commit message - id: get_commit_message - run: | - if [[ -z "$COMMIT_MSG" ]]; then - COMMIT_MSG=$(git show -s --format=%s $REF) - fi - echo commit_message=$COMMIT_MSG | tee -a $GITHUB_OUTPUT - env: - COMMIT_MSG: ${{ github.event.head_commit.message }} - REF: ${{ github.event.pull_request.head.sha }} - - name: Detect version - id: show_version - run: | - if [[ "$GITHUB_REF" == refs/tags/* ]]; then - VERSION=${GITHUB_REF##*/} - else - pip install -U build hatch hatchling pip twine docutils - VERSION=$( python -m hatch version | tail -n1 | xargs ) - fi - echo version=$VERSION | tee -a $GITHUB_OUTPUT - - build: - if: github.repository == 'nipreps/sdcflows' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up Python 3 - uses: actions/setup-python@v5 - with: - python-version: 3 - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: Build sdcflows - run: pipx run build - - name: Check distributions - run: pipx run twine check dist/* - - uses: actions/upload-artifact@v4 - with: - name: dist - path: dist/ - - test: - if: "!startsWith(github.ref, 'refs/tags/') && !contains(github.event.head_commit.message, '[skip ci]')" - needs: [build, job_metadata] - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9", "3.12"] - install: [repo, sdist, wheel, editable] - - env: - INSTALL_TYPE: ${{ matrix.install }} - - steps: - - uses: actions/checkout@v4 - if: matrix.install == 'repo' || matrix.install == 'editable' - with: - fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Fetch packages - if: matrix.install == 'sdist' || matrix.install == 'wheel' - uses: actions/download-artifact@v4 - with: - name: dist - path: dist/ - - name: Select archive - run: | - if [ "$INSTALL_TYPE" = "sdist" ]; then - ARCHIVE=$( ls dist/*.tar.gz ) - elif [ "$INSTALL_TYPE" = "wheel" ]; then - ARCHIVE=$( ls dist/*.whl ) - elif [ "$INSTALL_TYPE" = "repo" ]; then - ARCHIVE="." - elif [ "$INSTALL_TYPE" = "editable" ]; then - ARCHIVE="-e ." - fi - echo "ARCHIVE=$ARCHIVE" | tee -a $GITHUB_ENV - - name: Install package - run: python -m pip install $ARCHIVE - - name: Check version - run: | - INSTALLED_VERSION=$(python -c 'import sdcflows; print(sdcflows.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${VERSION}" - env: - VERSION: ${{ needs.job_metadata.outputs.version }} - - flake8: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - - run: pipx run flake8-pyproject sdcflows/ - - # codespell: - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v4 - # - uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 0000000000..bd8b7c2b0c --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,30 @@ +name: Validations + +on: + push: + branches: [ '*' ] + pull_request: + branches: [ master, main, 'maint/*' ] + +env: + FORCE_COLOR: true + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + flake8: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: pipx run flake8-pyproject sdcflows/ + + # codespell: + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v4 + # - uses: codespell-project/actions-codespell@v2 diff --git a/pyproject.toml b/pyproject.toml index fbdaae9e94..fa54d89b70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -160,11 +160,15 @@ per-file-ignores = [ [tool.pytest.ini_options] norecursedirs = [".git"] -addopts = "-svx --doctest-modules" +addopts = "-svx --doctest-modules --strict-markers" doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" env = "PYTHONHASHSEED=0" filterwarnings = ["ignore::DeprecationWarning"] junit_family = "xunit2" +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "veryslow: marks tests as very slow (>5min)", +] [tool.coverage.run] @@ -191,3 +195,8 @@ ignore-words-list = 'nd,mapp,reson' skip = """ ./.git,*.pdf,*.svg,*.min.js,*.ipynb,ORIGINAL_LICENSE,\ ./docs/source/_static/example_anatreport.html""" + +[tool.check-wheel-contents] +ignore = [ + "W002", # Test data contains duplicates +] diff --git a/sdcflows/interfaces/tests/test_bspline.py b/sdcflows/interfaces/tests/test_bspline.py index 67813a4c29..d10b49684b 100644 --- a/sdcflows/interfaces/tests/test_bspline.py +++ b/sdcflows/interfaces/tests/test_bspline.py @@ -138,7 +138,6 @@ def test_topup_coeffs(tmpdir, testdata_dir): _fix_topup_fieldcoeff("failing.nii.gz", str(testdata_dir / "epi.nii.gz"), "i") -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") def test_topup_coeffs_interpolation(tmpdir, testdata_dir): """Check that our interpolation is not far away from TOPUP's.""" tmpdir.chdir() diff --git a/sdcflows/tests/test_transform.py b/sdcflows/tests/test_transform.py index 72e44e0759..18e1b71174 100644 --- a/sdcflows/tests/test_transform.py +++ b/sdcflows/tests/test_transform.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Unit tests of the transform object.""" -import os from subprocess import check_call from itertools import product import pytest @@ -139,7 +138,6 @@ def test_displacements_field(tmpdir, testdata_dir, outdir, pe_dir, rotation, fli ).run() -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") @pytest.mark.parametrize( "pe0", [ diff --git a/sdcflows/workflows/fit/tests/test_pepolar.py b/sdcflows/workflows/fit/tests/test_pepolar.py index 995c867f96..e95128d153 100644 --- a/sdcflows/workflows/fit/tests/test_pepolar.py +++ b/sdcflows/workflows/fit/tests/test_pepolar.py @@ -21,15 +21,13 @@ # https://www.nipreps.org/community/licensing/ # """Test pepolar type of fieldmaps.""" -import os import pytest from nipype.pipeline import engine as pe from ..pepolar import init_topup_wf -@pytest.mark.skipif(os.getenv("TRAVIS") == "true", reason="this is TravisCI") -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") +@pytest.mark.slow @pytest.mark.parametrize("ds", ("ds001771", "HCP101006")) def test_topup_wf(tmpdir, bids_layouts, workdir, outdir, ds): """Test preparation workflow.""" diff --git a/sdcflows/workflows/fit/tests/test_phdiff.py b/sdcflows/workflows/fit/tests/test_phdiff.py index 94af79f2fd..94932f6243 100644 --- a/sdcflows/workflows/fit/tests/test_phdiff.py +++ b/sdcflows/workflows/fit/tests/test_phdiff.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Test phase-difference type of fieldmaps.""" -import os from pathlib import Path from json import loads @@ -30,8 +29,7 @@ from ..fieldmap import init_fmap_wf, Workflow -@pytest.mark.skipif(os.getenv("TRAVIS") == "true", reason="this is TravisCI") -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") +@pytest.mark.slow @pytest.mark.parametrize( "fmap_file", [ diff --git a/sdcflows/workflows/fit/tests/test_syn.py b/sdcflows/workflows/fit/tests/test_syn.py index d1535983b2..262890d03a 100644 --- a/sdcflows/workflows/fit/tests/test_syn.py +++ b/sdcflows/workflows/fit/tests/test_syn.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Test fieldmap-less SDC-SyN.""" -import os import json import pytest from nipype.pipeline import engine as pe @@ -29,8 +28,8 @@ from ..syn import init_syn_sdc_wf, init_syn_preprocessing_wf, _adjust_zooms, _set_dtype -@pytest.mark.skipif(os.getenv("TRAVIS") == "true", reason="this is TravisCI") -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") +@pytest.mark.veryslow +@pytest.mark.slow def test_syn_wf(tmpdir, datadir, workdir, outdir, sloppy_mode): """Build and run an SDC-SyN workflow.""" derivs_path = datadir / "ds000054" / "derivatives" diff --git a/sdcflows/workflows/tests/test_ancillary.py b/sdcflows/workflows/tests/test_ancillary.py index f73d062989..7e407d86e3 100644 --- a/sdcflows/workflows/tests/test_ancillary.py +++ b/sdcflows/workflows/tests/test_ancillary.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Check the tools submodule.""" -import os import pytest from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu @@ -29,7 +28,7 @@ from ..ancillary import init_brainextraction_wf -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") +@pytest.mark.slow @pytest.mark.parametrize("folder", ["magnitude/ds000054", "magnitude/ds000217"]) def test_brainmasker(tmpdir, datadir, workdir, outdir, folder): """Exercise the brain masking tool.""" diff --git a/sdcflows/workflows/tests/test_base.py b/sdcflows/workflows/tests/test_base.py index b31222fecc..2cb77cf089 100644 --- a/sdcflows/workflows/tests/test_base.py +++ b/sdcflows/workflows/tests/test_base.py @@ -22,13 +22,14 @@ # """Test the base workflow.""" from pathlib import Path -import os import pytest from sdcflows import fieldmaps as fm from sdcflows.utils.wrangler import find_estimators from sdcflows.workflows.base import init_fmap_preproc_wf +@pytest.mark.veryslow +@pytest.mark.slow @pytest.mark.parametrize( "dataset,subject", [("ds000054", "100185"), ("HCP101006", "101006")] ) @@ -62,9 +63,6 @@ def test_fmap_wf(tmpdir, workdir, outdir, bids_layouts, dataset, subject): if workdir: wf.base_dir = str(workdir) - if os.getenv("GITHUB_ACTIONS") == "true": - return - res = wf.run(plugin="Linear") # Regression test for when out_merge_fmap_coeff was flattened and would diff --git a/sdcflows/workflows/tests/test_integration.py b/sdcflows/workflows/tests/test_integration.py index 6263f7a5f9..ac61a48034 100644 --- a/sdcflows/workflows/tests/test_integration.py +++ b/sdcflows/workflows/tests/test_integration.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Test the base workflow.""" -import os from pathlib import Path import json import pytest @@ -37,7 +36,7 @@ ) -@pytest.mark.skipif(os.getenv("GITHUB_ACTIONS") == "true", reason="this is GH Actions") +@pytest.mark.slow @pytest.mark.parametrize("pe0", ["LR", "PA"]) @pytest.mark.parametrize("mode", ["pepolar", "phasediff"]) def test_integration_wf(tmpdir, workdir, outdir, datadir, pe0, mode):