diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml index 7efe2c74..b9e7c3d8 100644 --- a/.github/workflows/travis.yml +++ b/.github/workflows/travis.yml @@ -1,65 +1,89 @@ name: Deps & CI -on: [push] +on: + push: + branches: [ '*' ] + pull_request: + branches: [ master, 'maint/*' ] + schedule: + - cron: '0 0 * * 1' + # Allow job to be triggered manually from GitHub interface + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read jobs: - build-linux: - if: "!contains(github.event.head_commit.message, '[skip ci]' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'nipy/nitransforms'))" + cache-test-data: + if: "!contains(github.event.head_commit.message, '[skip ci]')" runs-on: ubuntu-latest env: TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests - strategy: - max-parallel: 5 - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] - + outputs: + SHA: ${{ steps.test-head.outputs.SHA }} steps: - name: Git settings (pacify DataLad) run: | git config --global user.name 'NiPreps Bot' git config --global user.email 'nipreps@gmail.com' - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/cache@v3 - id: conda - with: - path: | - /usr/share/miniconda/pkgs - /home/runner/.cache/pip - key: python-${{ matrix.python-version }}-v1 - restore-keys: | - python-${{ matrix.python-version }}- - name: Install DataLad run: | - $CONDA/bin/conda install -c conda-forge git-annex datalad pip pytest - $CONDA/bin/python -m pip install datalad-osf + $CONDA/bin/conda install -c conda-forge git-annex + python -m pip install datalad datalad-next datalad-osf + - name: Check remote HEAD + id: test-head + run: | + git ls-remote https://gin.g-node.org/oesteban/nitransforms-tests \ + | awk '/HEAD/{ print "SHA=" $1 }' >> $GITHUB_OUTPUT - uses: actions/cache@v3 with: path: ${{ env.TEST_DATA_HOME }} - key: data-cache-v2 + key: data-cache-v2-${{ steps.test-head.outputs.SHA }} restore-keys: | + data-cache-v2-${{ steps.test-head.outputs.SHA }} data-cache-v2 - name: Install test data run: | export PATH=$CONDA/bin:$PATH mkdir -p /home/runner/testdata - cd /home/runner/testdata - $CONDA/bin/datalad install https://gin.g-node.org/oesteban/nitransforms-tests - $CONDA/bin/datalad update --merge -d nitransforms-tests/ - $CONDA/bin/datalad get -d nitransforms-tests/ + datalad install -s https://gin.g-node.org/oesteban/nitransforms-tests $TEST_DATA_HOME + datalad update --merge -d $TEST_DATA_HOME + datalad get -J 2 -d $TEST_DATA_HOME + + build-linux: + if: "!contains(github.event.head_commit.message, '[skip ci]')" + runs-on: ubuntu-latest + needs: [cache-test-data] + env: + TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests + strategy: + max-parallel: 5 + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache/restore@v3 + with: + path: ${{ env.TEST_DATA_HOME }} + key: data-cache-v2-${{ needs.cache-test-data.outputs.SHA }} - uses: actions/checkout@v3 - name: Install minimal dependencies run: | - $CONDA/bin/pip install .[tests] + pip install .[tests] - name: Run pytest run: | - $CONDA/bin/pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/ + pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/ - name: Submit code coverage uses: codecov/codecov-action@v3 diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index d7c7f9c5..1045cde1 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -12,7 +12,7 @@ from os import cpu_count from functools import partial from pathlib import Path -from typing import Callable, TypeVar +from typing import Callable, TypeVar, Union import numpy as np from nibabel.loadsave import load as _nbload @@ -144,8 +144,8 @@ async def _apply_serial( def apply( transform: TransformBase, - spatialimage: str | Path | SpatialImage, - reference: str | Path | SpatialImage = None, + spatialimage: Union[str, Path, SpatialImage], + reference: Union[str, Path, SpatialImage] = None, order: int = 3, mode: str = "constant", cval: float = 0.0, @@ -154,7 +154,7 @@ def apply( dtype_width: int = 8, serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, max_concurrent: int = min(cpu_count(), 12), -) -> SpatialImage | np.ndarray: +) -> Union[SpatialImage, np.ndarray]: """ Apply a transformation to an image, resampling on the reference spatial object.