Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integration test for detect+track video #242

Merged
merged 28 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
3ed60c4
Basic test passes
sfmig Nov 12, 2024
be2db90
Added tests for save frames and save video functionality
sfmig Nov 12, 2024
80dd915
Factor out test inputs as fixture
sfmig Nov 12, 2024
29e3508
Fix fixture name and parametrisation
sfmig Nov 12, 2024
b03b78f
Rename integration test modules and refactor fixture
sfmig Nov 12, 2024
f2f1647
Parametrise output_dir
sfmig Nov 12, 2024
c282802
Clarify CLI help
sfmig Nov 12, 2024
04d3f49
Remove home mocking commented out fixture
sfmig Nov 12, 2024
01380a1
Add caching to testing on CI
sfmig Nov 12, 2024
86ccd9b
Small additions
sfmig Nov 12, 2024
f612c59
Dummy commit to check if cache is shared
sfmig Nov 14, 2024
46daa32
Remove zip from GIN repo and pooch registry
sfmig Nov 14, 2024
98a9a13
Fix tests by forcing download of all mlflow files
sfmig Nov 14, 2024
3b4f07a
Replace macos-13 for macos-12 as intel macos
sfmig Nov 14, 2024
9523145
Revert "Replace macos-13 for macos-12 as intel macos"
sfmig Nov 14, 2024
413636b
Make parametrisation of output_dir_name more explicit
sfmig Nov 14, 2024
b90fd03
Remove output directory parametrisation (cover as a unit test instead)
sfmig Nov 14, 2024
629bccf
Fix deprecation warnings
sfmig Nov 14, 2024
0aabf0f
Revert "Remove zip from GIN repo and pooch registry"
sfmig Nov 14, 2024
32952e4
Fix docstring fixture
sfmig Dec 9, 2024
ad14219
Merge branch 'main' into smg/detect-and-track-integration-test
sfmig Dec 9, 2024
ea6adc6
Remove timestamp
sfmig Dec 9, 2024
dc42f23
Add no timestamp flag parametrisation
sfmig Dec 10, 2024
9dfe0af
Mark integration tests as slow
sfmig Dec 10, 2024
d3ec68e
Skip slow tests in macos-13
sfmig Dec 10, 2024
14b07f2
Fix optional type for py3.9
sfmig Dec 10, 2024
c9a27a0
Correctly pass tox parameters to pytest
sfmig Dec 10, 2024
f65dfb2
Fix not-equal
sfmig Dec 10, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 20 additions & 1 deletion .github/workflows/test_and_deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,26 @@ jobs:
- os: macos-latest # M1 macOS
python-version: "3.10"
steps:
- uses: neuroinformatics-unit/actions/test@v2
- name: Cache test data
uses: actions/cache@v4
with:
path: |
~/.crabs-exploration-test-data/*
key: cached-test-data
enableCrossOsArchive: true
- name: Run all tests except marked slow (if macos-13)
# this is because we observed macos-13 runners in CI
# are sometimes x2 as slow as the others. See
# https://github.com/actions/runner-images/issues/3885#issuecomment-1848423579
if: matrix.os == 'macos-13'
uses: neuroinformatics-unit/actions/test@v2
with:
python-version: ${{ matrix.python-version }}
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
tox-args: "-- -m 'not slow'"
- name: Run all tests (if not macos-13)
if: matrix.os != 'macos-13'
uses: neuroinformatics-unit/actions/test@v2
with:
python-version: ${{ matrix.python-version }}
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
Expand Down
1 change: 1 addition & 0 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Pytest configuration file."""

pytest_plugins = [
"tests.fixtures.integration",
sfmig marked this conversation as resolved.
Show resolved Hide resolved
"tests.fixtures.frame_extraction",
]
5 changes: 4 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ dev = [
"ruff",
"setuptools_scm",
"check-manifest",
"pooch",
"tqdm",
# "codespell",
# "pandas-stubs",
# "types-attrs",
Expand Down Expand Up @@ -146,5 +148,6 @@ python =
extras =
dev
commands =
pytest -v --color=yes --cov=crabs --cov-report=xml
pytest -v --color=yes --cov=crabs --cov-report=xml {posargs}
"""
# {posargs} allows parameters to get to pytest
41 changes: 41 additions & 0 deletions tests/fixtures/integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Pytest fixtures for integration tests."""

from pathlib import Path

import pooch
import pytest

GIN_TEST_DATA_REPO = "https://gin.g-node.org/SainsburyWellcomeCentre/crabs-exploration-test-data"


@pytest.fixture(scope="session")
def pooch_registry() -> dict:
"""Pooch registry for the test data.

This fixture is common to the entire test session. The
file registry is downloaded fresh for every test session.

Returns
-------
dict
URL and hash of the GIN repository with the test data

"""
# Initialise pooch registry
registry = pooch.create(
Path.home() / ".crabs-exploration-test-data",
base_url=f"{GIN_TEST_DATA_REPO}/raw/master/test_data",
)

# Download only the registry file from GIN
# if known_hash = None, the file is always downloaded.
file_registry = pooch.retrieve(
url=f"{GIN_TEST_DATA_REPO}/raw/master/files-registry.txt",
known_hash=None,
path=Path.home() / ".crabs-exploration-test-data",
)

# Load registry file onto pooch registry
registry.load_registry(file_registry)

return registry
2 changes: 1 addition & 1 deletion tests/test_integration/test_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def test_exclude_pattern(via_json_1: str, via_json_2: str, tmp_path: Path):
# combine input json files, excluding those that end with _2.json
json_out_fullpath = combine_multiple_via_jsons(
[via_json_1, via_json_2],
exclude_pattern="\w+_2.json$",
exclude_pattern=r"\w+_2.json$",
json_out_dir=str(tmp_path),
)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_integration/test_frame_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def assert_output_files(list_input_videos: list, cli_dict: dict) -> None:

# check filename format of images: <video_name>_frame_{frame_idx:08d}
list_regex_patterns = [
Path(input_video_str).stem + "_frame_[\d]{8}$"
Path(input_video_str).stem + r"_frame_[\d]{8}$"
for input_video_str in list_input_videos
]
for f in list_imgs:
Expand Down
167 changes: 167 additions & 0 deletions tests/test_integration/test_inference.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
import re
import subprocess
from pathlib import Path
from typing import Optional

import cv2
import pooch
import pytest

from crabs.tracker.utils.io import open_video


@pytest.fixture()
def input_data_paths(pooch_registry: pooch.Pooch):
"""Input data for a detector+tracking run.

The data is fetched from the pooch registry.

Returns
-------
dict
Dictionary with the paths to the input video, annotations,
tracking configuration and trained model.

"""
input_data_paths = {}
video_root_name = "04.09.2023-04-Right_RE_test_3_frames"
input_data_paths["video_root_name"] = video_root_name

# get trained model from pooch registry
# download and unzip ml-runs
list_files_ml_runs = pooch_registry.fetch(
"ml-runs.zip",
processor=pooch.Unzip(extract_dir=""),
progressbar=True,
)
# get path to the last checkpoint
input_data_paths["ckpt"] = next(
x for x in list_files_ml_runs if x.endswith("last.ckpt")
)

# get input video, annotations and config from registry
map_key_to_filepath = {
"video": f"{video_root_name}/{video_root_name}.mp4",
"annotations": f"{video_root_name}/{video_root_name}_ground_truth.csv",
"tracking_config": f"{video_root_name}/tracking_config.yaml",
}
for key, filepath in map_key_to_filepath.items():
input_data_paths[key] = pooch_registry.fetch(filepath)

return input_data_paths


# mark integration test as slow
@pytest.mark.slow
@pytest.mark.parametrize(
"no_timestamp_flag",
[
None,
"--output_dir_no_timestamp",
],
)
@pytest.mark.parametrize(
"flags_to_append",
[
[],
["--save_video"],
["--save_frames"],
["--save_video", "--save_frames"],
],
)
def test_detect_and_track_video(
input_data_paths: dict,
tmp_path: Path,
flags_to_append: list,
no_timestamp_flag: Optional[str],
):
"""Test the detect-and-track-video entry point when groundtruth is passed.

The test checks:
- the exit code of the detect-and-track-video command
- the existence of csv file with predictions
- the existence of csv file with tracking metrics
- the existence of video file if requested
- the existence of exported frames if requested

"""
# Define main detect-and-track-video command
main_command = [
"detect-and-track-video",
f"--trained_model_path={input_data_paths['ckpt']}",
f"--video_path={input_data_paths['video']}",
f"--config_file={input_data_paths['tracking_config']}",
f"--annotations_file={input_data_paths['annotations']}",
"--accelerator=cpu",
]
# append required flags
main_command.extend(flags_to_append)
if no_timestamp_flag:
main_command.append(no_timestamp_flag)

# run command
completed_process = subprocess.run(
main_command,
check=True,
cwd=tmp_path,
# set cwd to Pytest's temporary directory
# so the output is saved there
)

# check the command runs successfully
assert completed_process.returncode == 0

# check the tracking output directory is created and has expected name
output_dir_name_expected = "tracking_output"
if no_timestamp_flag:
expected_pattern = re.compile(rf"{output_dir_name_expected}$")
else:
expected_pattern = re.compile(
rf"{output_dir_name_expected}_\d{{8}}_\d{{6}}$"
)
list_cwd_subdirs = [x for x in tmp_path.iterdir() if x.is_dir()]
tracking_output_dir = list_cwd_subdirs[0]
assert len(list_cwd_subdirs) == 1
assert expected_pattern.match(tracking_output_dir.stem)

# check csv with predictions exists
predictions_csv = (
tmp_path
/ tracking_output_dir
/ f"{input_data_paths['video_root_name']}_tracks.csv"
)
assert (predictions_csv).exists()

# check csv with tracking metrics exists
tracking_metrics_csv = (
tmp_path / tracking_output_dir / "tracking_metrics_output.csv"
)
assert (tracking_metrics_csv).exists()

# if the video is requested: check it exists
if "--save_video" in flags_to_append:
assert (
tmp_path
/ tracking_output_dir
/ f"{input_data_paths['video_root_name']}_tracks.mp4"
).exists()

# if the frames are requested: check they exist
if "--save_frames" in flags_to_append:
input_video_object = open_video(input_data_paths["video"])
total_n_frames = int(input_video_object.get(cv2.CAP_PROP_FRAME_COUNT))

# check frames subdirectory exists
frames_subdir = (
tmp_path
/ tracking_output_dir
/ f"{input_data_paths['video_root_name']}_frames"
)
assert frames_subdir.exists()

# check files are named as expected
expected_pattern = re.compile(r"frame_\d{8}.png")
list_files = [x for x in frames_subdir.iterdir() if x.is_file()]

assert len(list_files) == total_n_frames
assert all(expected_pattern.match(x.name) for x in list_files)
Loading