diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 6dac22f0..00000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,96 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ '*' ] - tags: [ '*' ] - pull_request: - branches: [ master, 'maint/*' ] - -jobs: - build: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - - steps: - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Build package - run: | - pipx run build - - name: Determine expected version - run: | - python -m venv /tmp/getversion - source /tmp/getversion/bin/activate - python -m pip install setuptools_scm - - # Interpolate version - if [[ "$GITHUB_REF" == refs/tags/* ]]; then - TAG=${GITHUB_REF##*/} - fi - THISVERSION=$( python -m setuptools_scm ) - THISVERSION=${TAG:-$THISVERSION} - echo "Expected VERSION: \"${THISVERSION}\"" - echo "THISVERSION=${THISVERSION}" >> ${GITHUB_ENV} - - - name: Install in confined environment [sdist] - run: | - python -m venv /tmp/install_sdist - source /tmp/install_sdist/bin/activate - python -m pip install --upgrade pip - python -m pip install dist/nitransforms*.tar.gz - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "VERSION: \"${THISVERSION}\"" - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [wheel] - run: | - python -m venv /tmp/install_wheel - source /tmp/install_wheel/bin/activate - python -m pip install --upgrade pip - python -m pip install dist/nitransforms*.whl - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [pip install .] - run: | - python -m venv /tmp/setup_install - source /tmp/setup_install/bin/activate - python -m pip install --upgrade pip wheel - python -m pip install . - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [pip install -e .] - run: | - python -m venv /tmp/setup_develop - source /tmp/setup_develop/bin/activate - python -m pip install pip - python -m pip install --upgrade pip wheel - python -m pip install -e . - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - flake8: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3 - uses: actions/setup-python@v5 - - run: pipx run flake8 nitransforms diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml new file mode 100644 index 00000000..49e8211a --- /dev/null +++ b/.github/workflows/tox.yml @@ -0,0 +1,130 @@ +name: Tox + +on: + push: + branches: [ master, main, 'maint/*' ] + tags: [ '*' ] + pull_request: + branches: [ master, main, 'maint/*' ] + schedule: + - cron: '0 0 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +env: + # Force tox and pytest to use color + FORCE_COLOR: true + TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests + +jobs: + cache-test-data: + if: "!contains(github.event.head_commit.message, '[skip ci]')" + runs-on: ubuntu-latest + outputs: + SHA: ${{ steps.test-head.outputs.SHA }} + steps: + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - name: Git settings (pacify DataLad) + run: | + git config --global user.name 'NiPreps Bot' + git config --global user.email 'nipreps@gmail.com' + - name: Install DataLad + run: | + $CONDA/bin/conda install -c conda-forge git-annex + uv tool install --with=datalad-next --with=datalad-osf datalad + - name: Check remote HEAD + id: test-head + run: | + git ls-remote https://gin.g-node.org/oesteban/nitransforms-tests \ + | awk '/HEAD/{ print "SHA=" $1 }' >> $GITHUB_OUTPUT + + - uses: actions/cache@v4 + with: + path: ${{ env.TEST_DATA_HOME }} + key: data-cache-v2-${{ steps.test-head.outputs.SHA }} + restore-keys: | + data-cache-v2-${{ steps.test-head.outputs.SHA }} + data-cache-v2 + + - name: Install test data + run: | + export PATH=$CONDA/bin:$PATH + mkdir -p /home/runner/testdata + + datalad install -s https://gin.g-node.org/oesteban/nitransforms-tests $TEST_DATA_HOME + datalad update --merge -d $TEST_DATA_HOME + datalad get -J 2 -d $TEST_DATA_HOME + + test: + runs-on: ubuntu-latest + needs: [cache-test-data] + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: [latest, pre] + include: + - python-version: "3.9" + dependencies: min + exclude: + # Do not test pre-releases for versions out of SPEC0 + - python-version: "3.9" + dependencies: pre + - python-version: "3.10" + dependencies: pre + + env: + DEPENDS: ${{ matrix.dependencies }} + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - uses: actions/cache/restore@v4 + with: + path: ${{ env.TEST_DATA_HOME }} + key: data-cache-v2-${{ needs.cache-test-data.outputs.SHA }} + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Install tox + run: | + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - uses: codecov/codecov-action@v4 + with: + file: coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} + + checks: + runs-on: 'ubuntu-latest' + continue-on-error: true + strategy: + matrix: + check: ['style'] + + steps: + - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - name: Show tox config + run: uvx tox c + - name: Show tox config (this call) + run: uvx tox c -e ${{ matrix.check }} + - name: Run check + run: uvx tox -e ${{ matrix.check }} diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml deleted file mode 100644 index 2a9473ed..00000000 --- a/.github/workflows/travis.yml +++ /dev/null @@ -1,91 +0,0 @@ -name: Deps & CI - -on: - push: - branches: [ '*' ] - pull_request: - branches: [ master, 'maint/*' ] - schedule: - - cron: '0 0 * * 1' - # Allow job to be triggered manually from GitHub interface - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - cache-test-data: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - env: - TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests - outputs: - SHA: ${{ steps.test-head.outputs.SHA }} - steps: - - name: Git settings (pacify DataLad) - run: | - git config --global user.name 'NiPreps Bot' - git config --global user.email 'nipreps@gmail.com' - - name: Install DataLad - run: | - $CONDA/bin/conda install -c conda-forge git-annex - python -m pip install datalad datalad-next datalad-osf - - name: Check remote HEAD - id: test-head - run: | - git ls-remote https://gin.g-node.org/oesteban/nitransforms-tests \ - | awk '/HEAD/{ print "SHA=" $1 }' >> $GITHUB_OUTPUT - - - uses: actions/cache@v4 - with: - path: ${{ env.TEST_DATA_HOME }} - key: data-cache-v2-${{ steps.test-head.outputs.SHA }} - restore-keys: | - data-cache-v2-${{ steps.test-head.outputs.SHA }} - data-cache-v2 - - - name: Install test data - run: | - export PATH=$CONDA/bin:$PATH - mkdir -p /home/runner/testdata - - datalad install -s https://gin.g-node.org/oesteban/nitransforms-tests $TEST_DATA_HOME - datalad update --merge -d $TEST_DATA_HOME - datalad get -J 2 -d $TEST_DATA_HOME - - build-linux: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - needs: [cache-test-data] - env: - TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests - strategy: - max-parallel: 5 - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - steps: - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/cache/restore@v4 - with: - path: ${{ env.TEST_DATA_HOME }} - key: data-cache-v2-${{ needs.cache-test-data.outputs.SHA }} - - - uses: actions/checkout@v4 - - name: Install minimal dependencies - run: | - pip install .[tests] - - name: Run pytest - run: | - pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/ - - - name: Submit code coverage - uses: codecov/codecov-action@v4 - with: - files: cov.xml diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 34413ca6..4cb47fff 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,19 +1,21 @@ -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - version: 2 build: - os: ubuntu-20.04 + os: ubuntu-lts-latest tools: - python: "3.8" - -sphinx: - configuration: docs/conf.py + python: latest -python: - install: - - path: . - extra_requirements: - - all - - requirements: docs/requirements.txt + jobs: + pre_create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + # Turn `python -m virtualenv` into `python -c pass` + - truncate --size 0 $( dirname $( uv python find ) )/../lib/python3*/site-packages/virtualenv/__main__.py + post_create_environment: + - uv venv $READTHEDOCS_VIRTUALENV_PATH + # Turn `python -m pip` into `python -c pass` + - truncate --size 0 $( ls -d $READTHEDOCS_VIRTUALENV_PATH/lib/python3* )/site-packages/pip.py + post_install: + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install --cache-dir $READTHEDOCS_VIRTUALENV_PATH/../../uv_cache .[niftiext] + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install --cache-dir $READTHEDOCS_VIRTUALENV_PATH/../../uv_cache -r docs/requirements.txt diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 1d115b13..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -recursive-exclude .circleci/ * -recursive-exclude .github/ * -recursive-exclude docker/ * -recursive-exclude docs/ * -recursive-exclude joss/ * -recursive-exclude nitransforms/tests * -exclude .codecov.yml .coveragerc .gitignore .pep8speaks.yml .travis.yml Dockerfile \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 18e532a1..025d649b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -76,7 +76,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/docs/requirements.txt b/docs/requirements.txt index 8a932a22..d74f8faf 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -4,5 +4,5 @@ packaging pydot>=1.2.3 pydotplus sphinx-argparse -sphinx ~= 4.0 -sphinx_rtd_theme \ No newline at end of file +sphinx~=7.0 +sphinx_rtd_theme diff --git a/pyproject.toml b/pyproject.toml index 45a691bb..63876722 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,60 @@ requires = ["setuptools >= 45", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" +[project] +name = "nitransforms" +authors = [{name = "The NiPy developers", email = "nipreps@gmail.com"}] +readme = "README.md" +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: Image Recognition", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +description = "NiTransforms -- Neuroimaging spatial transforms in Python." +license = {text = "MIT License"} +requires-python = ">= 3.9" +dependencies = [ + "numpy >= 1.21", + "scipy >= 1.8", + "nibabel >= 4.0", + "h5py >= 3.9", +] +dynamic = ["version"] + +[project.urls] +Homepage = "https://github.com/nipy/nitransforms" +Manuscript = "https://doi.org/10.31219/osf.io/8aq7b" +NiBabel = "https://github.com/nipy/nibabel/pull/656" + +[project.optional-dependencies] +niftiext = ["lxml >= 4.6"] +test = [ + "pytest >= 6", + "pytest-cov >= 2.11", + "pytest-env", + "pytest-xdist >= 2.5", + "coverage[toml] >= 5.2.1", + "nitransforms[niftiext]", +] +# Aliases +niftiexts = ["nitransforms[niftiext]"] +tests = ["nitransforms[test]"] +all = [ + "nitransforms[test]", +] + +[project.scripts] +nb-transform = "nitransforms.cli:main" + +[tool.setuptools.packages.find] +include = ["nitransforms*"] + [tool.setuptools_scm] write_to = "nitransforms/_version.py" write_to_template = """\ @@ -9,3 +63,24 @@ write_to_template = """\ __version__ = "{version}" """ fallback_version = "0.0" + +[tool.pytest.ini_options] +minversion = "6" +testpaths = ["nitransforms"] +log_cli_level = "INFO" +xfail_strict = true +norecursedirs = [".git"] +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + "--cov=nitransforms", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning"] +junit_family = "xunit2" diff --git a/setup.cfg b/setup.cfg index 4607288f..f355be94 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,66 +1,3 @@ -[metadata] -author = The NiPy developers -author_email = nipreps@gmail.com -classifiers = - Development Status :: 2 - Pre-Alpha - Intended Audience :: Science/Research - Topic :: Scientific/Engineering :: Image Recognition - License :: OSI Approved :: BSD License - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 -description = NiTransforms -- Neuroimaging spatial transforms in Python. -license = MIT License -long_description = file:README.md -long_description_content_type = text/markdown; charset=UTF-8 -provides = nitransforms -project_urls = - Manuscript=https://doi.org/10.31219/osf.io/8aq7b - NiBabel=https://github.com/nipy/nibabel/pull/656 -url = https://github.com/nipy/nitransforms - -[options] -python_requires = >= 3.8 -install_requires = - numpy ~= 1.21 - scipy >= 1.6.0 - nibabel >= 3.0 - h5py -test_requires = - pytest - pytest-cov - pytest-env - nose - codecov -setup_requires = - setuptools_scm - toml -packages = find: -include_package_data = True - -[options.extras_require] -niftiext = - lxml -niftiexts = - %(niftiext)s -test = - pytest - pytest-cov - pytest-env - codecov - lxml -tests = - %(test)s - -all = - %(niftiext)s - %(test)s - -[options.entry_points] -console_scripts = - nb-transform = nitransforms.cli:main - [flake8] max-line-length = 99 doctests = False @@ -68,13 +5,3 @@ ignore = E266 E231 W503 - -[tool:pytest] -norecursedirs = .git -addopts = -svx --doctest-modules -doctest_optionflags = ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS -env = - PYTHONHASHSEED=0 -filterwarnings = - ignore::DeprecationWarning -junit_family=xunit2 diff --git a/setup.py b/setup.py deleted file mode 100644 index 78ec8923..00000000 --- a/setup.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Prepare package for distribution.""" -from setuptools import setup - -if __name__ == "__main__": - setup( - name="nitransforms", - ) diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..fe549039 --- /dev/null +++ b/tox.ini @@ -0,0 +1,110 @@ +[tox] +requires = + tox>=4 +envlist = + py3{9,10,11,12,13}-latest + py39-min + py3{11,12,13}-pre +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + latest: latest + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + # Overrides default location for finding test data + TEST_DATA_HOME + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +extras = tests +setenv = + FSLOUTPUTTYPE=NIFTI_GZ + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple +uv_resolution = + min: lowest-direct + +commands = + python -c "import nitransforms; print(nitransforms.__version__)" + pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} + +[testenv:style] +description = Check our style guide +labels = check +deps = + flake8 +skip_install = true +commands = + flake8 nitransforms + +[testenv:style-fix] +description = Auto-apply style guide to the extent possible +labels = pre-release +deps = + ruff +skip_install = true +commands = + ruff check --fix + ruff format + ruff check --select ISC001 + +[testenv:spellcheck] +description = Check spelling +labels = check +deps = + codespell[toml] +skip_install = true +commands = + codespell . {posargs} + +[testenv:build{,-strict}] +labels = + check + pre-release +deps = + build + twine +skip_install = true +set_env = + # Ignore specific known warnings: + # https://github.com/pypa/pip/issues/11684 + # https://github.com/pypa/pip/issues/12243 + strict: PYTHONWARNINGS=error,once:pkg_resources is deprecated as an API.:DeprecationWarning:pip._internal.metadata.importlib._envs,once:Unimplemented abstract methods {'locate_file'}:DeprecationWarning:pip._internal.metadata.importlib._dists +commands = + python -m build + python -m twine check dist/* + +[testenv:publish] +depends = build +labels = release +deps = + twine +skip_install = true +commands = + python -m twine upload dist/*