Skip to content

Test (and Deploy to PyPI on tag) #706

Test (and Deploy to PyPI on tag)

Test (and Deploy to PyPI on tag) #706

name: Test (and Deploy to PyPI on tag)
on:
# Only run on pushes to main, or when version tags are pushed
push:
branches:
- "main"
tags:
- "v**"
# Run on all pull-requests
pull_request:
schedule:
# Runs at 6:10am UTC on Monday
- cron: '10 6 * * 1'
# Allow workflow dispatch from GitHub
workflow_dispatch:
concurrency:
# Cancel this workflow if it is running,
# and then changes are applied on top of the HEAD of the branch,
# triggering another run of the workflow
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
linting:
runs-on: ubuntu-latest
name: Check Linting
steps:
- uses: neuroinformatics-unit/actions/lint@v2
manifest:
name: Check Manifest
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/check_manifest@v2
test:
needs: [linting, manifest]
name: Run package tests
timeout-minutes: 120
runs-on: ${{ matrix.os }}
env:
KERAS_BACKEND: torch
CELLFINDER_TEST_DEVICE: cpu
# pooch cache dir
BRAINGLOBE_TEST_DATA_DIR: "~/.pooch_cache"
strategy:
matrix:
# Run all supported Python versions on linux
os: [ubuntu-latest]
python-version: ["3.10", "3.11", "3.12"]
# Include one windows and one macOS (arm based) run
include:
- os: macos-latest
python-version: "3.12"
- os: windows-latest
python-version: "3.12"
steps:
- uses: actions/checkout@v4
- name: Cache pooch data
uses: actions/cache@v4
with:
path: "~/.pooch_cache"
# hash on conftest in case url changes
key: ${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pooch_registry.txt') }}
# Cache the tensorflow model so we don't have to remake it every time
- name: Cache brainglobe directory
uses: actions/cache@v3
with:
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually
~/.brainglobe
!~/.brainglobe/atlas.tar.gz
key: brainglobe
# Setup pyqt libraries
- name: Setup qtpy libraries
uses: tlambert03/setup-qt-libs@v1
# Setup VTK with headless display
- uses: pyvista/setup-headless-display-action@v2
# Run all tests
- uses: neuroinformatics-unit/actions/test@v2
with:
python-version: ${{ matrix.python-version }}
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
use-xvfb: true
test_numba_disabled:
needs: [linting, manifest]
name: Run tests with numba disabled
timeout-minutes: 120
runs-on: ubuntu-latest
env:
NUMBA_DISABLE_JIT: "1"
PYTORCH_JIT: "0"
# pooch cache dir
BRAINGLOBE_TEST_DATA_DIR: "~/.pooch_cache"
steps:
- uses: actions/checkout@v4
- name: Cache brainglobe directory
uses: actions/cache@v3
with:
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually
~/.brainglobe
!~/.brainglobe/atlas.tar.gz
key: brainglobe
- name: Cache pooch data
uses: actions/cache@v4
with:
path: "~/.pooch_cache"
key: ${{ runner.os }}-3.10-${{ hashFiles('**/pooch_registry.txt') }}
# Setup pyqt libraries
- name: Setup qtpy libraries
uses: tlambert03/setup-qt-libs@v1
# Setup VTK with headless display
- uses: pyvista/setup-headless-display-action@v2
# Run test suite with numba disabled
- uses: neuroinformatics-unit/actions/test@v2
with:
python-version: "3.12"
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
codecov-flags: "numba"
# Run brainglobe-workflows brainmapper-CLI tests to check for
# breakages
test_brainmapper_cli:
needs: [linting, manifest]
name: Run brainmapper tests to check for breakages
timeout-minutes: 120
runs-on: ubuntu-latest
env:
KERAS_BACKEND: torch
CELLFINDER_TEST_DEVICE: cpu
steps:
- name: Cache brainglobe directory
uses: actions/cache@v3
with:
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually
~/.brainglobe
!~/.brainglobe/atlas.tar.gz
key: brainglobe
- name: Checkout brainglobe-workflows
uses: actions/checkout@v3
with:
repository: 'brainglobe/brainglobe-workflows'
- name: Set up Python 3.12
uses: actions/setup-python@v3
with:
python-version: "3.12"
- name: Install test dependencies
run: |
python -m pip install --upgrade pip wheel
# Install cellfinder from the latest SHA on this branch
python -m pip install "cellfinder @ git+$GITHUB_SERVER_URL/$GITHUB_REPOSITORY@$GITHUB_SHA"
# Install checked out copy of brainglobe-workflows
python -m pip install .[dev]
- name: Run brainmapper tests in brainglobe-workflows
run: |
python -m pytest --color=yes -v tests/brainmapper
build_sdist_wheel:
name: Build source distribution and wheel
needs: [test, test_numba_disabled]
if: github.event_name == 'push' && github.ref_type == 'tag'
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/build_sdist_wheels@v2
upload_all:
name: Publish build distributions
needs: [build_sdist_wheel]
if: github.event_name == 'push' && github.ref_type == 'tag'
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/upload_pypi@v2
with:
secret-pypi-key: ${{ secrets.TWINE_API_KEY }}