Skip to content

Commit

Permalink
Merge pull request #776 from int-brain-lab/hotfix/2.35.3
Browse files Browse the repository at this point in the history
Use correct task for timeline acquisitions in make_pipeline
  • Loading branch information
k1o0 authored Jun 4, 2024
2 parents 7270eaf + 509cb8a commit 894b968
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 10 deletions.
2 changes: 1 addition & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import warnings

__version__ = '2.35.2'
__version__ = '2.35.3'
warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
Expand Down
29 changes: 20 additions & 9 deletions ibllib/pipes/dynamic_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,12 @@ def make_pipeline(session_path, **pkwargs):
compute_status = False
else:
registration_class = btasks.TrialRegisterRaw
behaviour_class = btasks.ChoiceWorldTrialsNidq
if sync_args['sync_namespace'] == 'timeline':
behaviour_class = btasks.ChoiceWorldTrialsTimeline
elif sync_args['sync_namespace'] in ('spikeglx', None):
behaviour_class = btasks.ChoiceWorldTrialsNidq
else:
raise NotImplementedError(f'No trials task available for sync namespace "{sync_args["sync_namespace"]}"')
compute_status = True
else:
raise NotImplementedError
Expand All @@ -278,7 +283,7 @@ def make_pipeline(session_path, **pkwargs):
tasks[f'Trials_{protocol}_{i:02}'] = type(f'Trials_{protocol}_{i:02}', (behaviour_class,), {})(
**kwargs, **sync_kwargs, **task_kwargs, parents=parents)
if compute_status:
tasks[f"TrainingStatus_{protocol}_{i:02}"] = type(f'TrainingStatus_{protocol}_{i:02}', (
tasks[f'TrainingStatus_{protocol}_{i:02}'] = type(f'TrainingStatus_{protocol}_{i:02}', (
btasks.TrainingStatus,), {})(**kwargs, **task_kwargs, parents=[tasks[f'Trials_{protocol}_{i:02}']])

# Ephys tasks
Expand All @@ -289,7 +294,10 @@ def make_pipeline(session_path, **pkwargs):
all_probes = []
register_tasks = []
for pname, probe_info in devices['neuropixel'].items():
meta_file = spikeglx.glob_ephys_files(Path(session_path).joinpath(probe_info['collection']), ext='meta')
# Glob to support collections such as _00a, _00b. This doesn't fix the issue of NP2.4
# extractions, however.
probe_collection = next(session_path.glob(probe_info['collection'] + '*'))
meta_file = spikeglx.glob_ephys_files(probe_collection, ext='meta')
meta_file = meta_file[0].get('ap')
nptype = spikeglx._get_neuropixel_version_from_meta(spikeglx.read_meta_data(meta_file))
nshanks = spikeglx._get_nshanks_from_meta(spikeglx.read_meta_data(meta_file))
Expand Down Expand Up @@ -482,12 +490,15 @@ def get_trials_tasks(session_path, one=None):
# If experiment description file then use this to make the pipeline
if experiment_description is not None:
tasks = []
pipeline = make_pipeline(session_path, one=one)
trials_tasks = [t for t in pipeline.tasks if 'Trials' in t]
for task in trials_tasks:
t = pipeline.tasks.get(task)
t.__init__(session_path, **t.kwargs)
tasks.append(t)
try:
pipeline = make_pipeline(session_path, one=one)
trials_tasks = [t for t in pipeline.tasks if 'Trials' in t]
for task in trials_tasks:
t = pipeline.tasks.get(task)
t.__init__(session_path, **t.kwargs)
tasks.append(t)
except NotImplementedError as ex:
_logger.warning('Failed to get trials tasks: %s', ex)
else:
# Otherwise default to old way of doing things
if one and one.to_eid(session_path):
Expand Down
19 changes: 19 additions & 0 deletions ibllib/tests/test_dynamic_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import ibllib.tests
import ibllib.pipes.dynamic_pipeline as dyn
from ibllib.pipes.tasks import Pipeline, Task
import ibllib.pipes.behavior_tasks as btasks
from ibllib.pipes import ephys_preprocessing
from ibllib.pipes import training_preprocessing
from ibllib.io import session_params
Expand Down Expand Up @@ -65,6 +66,7 @@ def setUp(self):
{'ephysChoiceWorld': {'task_collection': 'raw_task_data_00'}},
{'passiveChoiceWorld': {'task_collection': 'raw_task_data_01'}},
]}
self.description = description
with open(self.session_path_dynamic / '_ibl_experiment.description.yaml', 'w') as fp:
yaml.safe_dump(description, fp)

Expand All @@ -87,8 +89,25 @@ def test_get_trials_tasks(self):
one.alyx.cache_mode = None # sneaky hack as this is checked by the pipeline somewhere
tasks = dyn.get_trials_tasks(self.session_path_dynamic, one)
self.assertEqual(2, len(tasks))
self.assertIsInstance(tasks[0], btasks.ChoiceWorldTrialsNidq)
one.load_datasets.assert_called() # check that description file is checked on disk

# A session with timeline acquisition
self.description['sync']['nidq']['acquisition_software'] = 'timeline'
with open(self.session_path_dynamic / '_ibl_experiment.description.yaml', 'w') as fp:
yaml.safe_dump(self.description, fp)
tasks = dyn.get_trials_tasks(self.session_path_dynamic, one)
self.assertIsInstance(tasks[0], btasks.ChoiceWorldTrialsTimeline)

# A session with an unknown sync namespace
self.description['sync']['nidq']['acquisition_software'] = 'notepad'
with open(self.session_path_dynamic / '_ibl_experiment.description.yaml', 'w') as fp:
yaml.safe_dump(self.description, fp)
with self.assertLogs(dyn.__name__, 'WARNING') as cm:
self.assertEqual([], dyn.get_trials_tasks(self.session_path_dynamic))
log_message = cm.records[0].getMessage()
self.assertIn('sync namespace "notepad"', log_message)

# An ephys session
tasks = dyn.get_trials_tasks(self.session_path_legacy)
self.assertEqual(1, len(tasks))
Expand Down
3 changes: 3 additions & 0 deletions release_notes.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
- Support extraction of repNum for advancedChoiceWorld
- Support matplotlib v3.9; min slidingRP version now 1.1.1

#### 2.35.3
- Use correct task for timeline acquisitions in make_pipeline

## Release Note 2.34.0

### features
Expand Down

0 comments on commit 894b968

Please sign in to comment.