Skip to content

Commit

Permalink
Merge branch 'release/2.13.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
juhuntenburg committed Jun 30, 2022
2 parents 810282c + be1e43f commit f82cf5d
Show file tree
Hide file tree
Showing 17 changed files with 151 additions and 67 deletions.
19 changes: 18 additions & 1 deletion brainbox/io/one.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,6 @@ def _load_channel_locations_traj(eid, probe=None, one=None, revision=None, align
# get the channels from histology tracing
xyz = xyz[np.argsort(xyz[:, 2]), :]
chans = histology.interpolate_along_track(xyz, (depths + TIP_SIZE_UM) / 1e6)

channels[probe] = _channels_traj2bunch(chans, brain_atlas)
source = 'traced'
channels[probe]['axial_um'] = chn_coords[:, 1]
Expand Down Expand Up @@ -894,6 +893,7 @@ class SpikeSortingLoader:
collection: str = ''
histology: str = '' # 'alf', 'resolved', 'aligned' or 'traced'
spike_sorting_path: Path = None
_sync: dict = None

def __post_init__(self):
# pid gets precedence
Expand Down Expand Up @@ -1039,3 +1039,20 @@ def url(self):
"""Gets flatiron URL for the session"""
webclient = getattr(self.one, '_web_client', None)
return webclient.rel_path2url(get_alf_path(self.session_path)) if webclient else None

def samples2times(self, values, direction='forward'):
"""
:param values: numpy array of times in seconds or samples to resync
:param direction: 'forward' (samples probe time to seconds main time) or 'reverse'
(seconds main time to samples probe time)
:return:
"""
if self._sync is None:
timestamps = self.one.load_dataset(
self.eid, dataset='_spikeglx_*.timestamps.npy', collection=f'raw_ephys_data/{self.pname}')
self._sync = {
'timestamps': timestamps,
'forward': interp1d(timestamps[:, 0], timestamps[:, 1], fill_value='extrapolate'),
'reverse': interp1d(timestamps[:, 1], timestamps[:, 0], fill_value='extrapolate'),
}
return self._sync[direction](values)
2 changes: 1 addition & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.12.2"
__version__ = "2.13.0"
import warnings

from ibllib.misc import logger_config
Expand Down
74 changes: 59 additions & 15 deletions ibllib/atlas/atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,22 +110,55 @@ def _round(i, round=True):
else:
return i

def x2i(self, x, round=True):
return self._round((x - self.x0) / self.dx, round=round)

def y2i(self, y, round=True):
return self._round((y - self.y0) / self.dy, round=round)

def z2i(self, z, round=True):
return self._round((z - self.z0) / self.dz, round=round)
def x2i(self, x, round=True, mode='raise'):
i = np.asarray(self._round((x - self.x0) / self.dx, round=round))
if np.any(i < 0) or np.any(i >= self.nx):
if mode == 'clip':
i[i < 0] = 0
i[i >= self.nx] = self.nx - 1
elif mode == 'raise':
raise ValueError("At least one x value lies outside of the atlas volume.")
elif mode == 'wrap':
pass
return i

def y2i(self, y, round=True, mode='raise'):
i = np.asarray(self._round((y - self.y0) / self.dy, round=round))
if np.any(i < 0) or np.any(i >= self.ny):
if mode == 'clip':
i[i < 0] = 0
i[i >= self.ny] = self.ny - 1
elif mode == 'raise':
raise ValueError("At least one y value lies outside of the atlas volume.")
elif mode == 'wrap':
pass
return i

def z2i(self, z, round=True, mode='raise'):
i = np.asarray(self._round((z - self.z0) / self.dz, round=round))
if np.any(i < 0) or np.any(i >= self.nz):
if mode == 'clip':
i[i < 0] = 0
i[i >= self.nz] = self.nz - 1
elif mode == 'raise':
raise ValueError("At least one z value lies outside of the atlas volume.")
elif mode == 'wrap':
pass
return i

def xyz2i(self, xyz, round=True):
def xyz2i(self, xyz, round=True, mode='raise'):
"""
:param mode: {‘raise’, 'clip', 'wrap'} determines what to do when determined index lies outside the atlas volume
'raise' will raise a ValueError
'clip' will replace the index with the closest index inside the volume
'wrap' will wrap around to the other side of the volume. This is only here for legacy reasons
"""
xyz = np.array(xyz)
dt = int if round else float
out = np.zeros_like(xyz, dtype=dt)
out[..., 0] = self.x2i(xyz[..., 0], round=round)
out[..., 1] = self.y2i(xyz[..., 1], round=round)
out[..., 2] = self.z2i(xyz[..., 2], round=round)
out[..., 0] = self.x2i(xyz[..., 0], round=round, mode=mode)
out[..., 1] = self.y2i(xyz[..., 1], round=round, mode=mode)
out[..., 2] = self.z2i(xyz[..., 2], round=round, mode=mode)
return out

"""Methods indices to distance"""
Expand Down Expand Up @@ -227,7 +260,10 @@ def _get_cache_dir():
def compute_surface(self):
"""
Get the volume top, bottom, left and right surfaces, and from these the outer surface of
the image volume. This is needed to compute probe insertions intersections
the image volume. This is needed to compute probe insertions intersections.
NOTE: In places where the top or bottom surface touch the top or bottom of the atlas volume, the surface
will be set to np.nan. If you encounter issues working with these surfaces check if this might be the cause.
"""
if self.surface is None: # only compute if it hasn't already been computed
axz = self.xyz2dims[2] # this is the dv axis
Expand Down Expand Up @@ -439,7 +475,12 @@ def slice(self, coordinate, axis, volume='image', mode='raise', region_values=No
:param mapping: mapping to use. Options can be found using ba.regions.mappings.keys()
:return: 2d array or 3d RGB numpy int8 array
"""
index = self.bc.xyz2i(np.array([coordinate] * 3))[axis]
if axis == 0:
index = self.bc.x2i(np.array(coordinate), mode=mode)
elif axis == 1:
index = self.bc.y2i(np.array(coordinate), mode=mode)
elif axis == 2:
index = self.bc.z2i(np.array(coordinate), mode=mode)

# np.take is 50 thousand times slower than straight slicing !
def _take(vol, ind, axis):
Expand Down Expand Up @@ -765,7 +806,10 @@ def from_dict(d, brain_atlas=None):
if brain_atlas:
iy = brain_atlas.bc.y2i(d['y'] / 1e6)
ix = brain_atlas.bc.x2i(d['x'] / 1e6)
z = brain_atlas.top[iy, ix]
# Only use the brain surface value as z if it isn't NaN (this happens when the surface touches the edges
# of the atlas volume
if not np.isnan(brain_atlas.top[iy, ix]):
z = brain_atlas.top[iy, ix]
return Insertion(x=d['x'] / 1e6, y=d['y'] / 1e6, z=z,
phi=d['phi'], theta=d['theta'], depth=d['depth'] / 1e6,
beta=d.get('beta', 0), label=d.get('label', ''))
Expand Down
1 change: 1 addition & 0 deletions ibllib/ephys/neuropixel.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@
', change your imports to neuropixel !', DeprecationWarning)

from neuropixel import * # noqa
from neuropixel import SITES_COORDINATES # noqa
3 changes: 1 addition & 2 deletions ibllib/io/extractors/biased_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
StimOnTimes_deprecated, StimOnTriggerTimes, StimOnOffFreezeTimes, ItiInTimes,
StimOffTriggerTimes, StimFreezeTriggerTimes, ErrorCueTriggerTimes, PhasePosQuiescence)
from ibllib.io.extractors.training_wheel import Wheel
from ibllib.misc import version


class ContrastLR(BaseBpodTrialsExtractor):
Expand Down Expand Up @@ -163,7 +162,7 @@ def extract_all(session_path, save=False, bpod_trials=False, settings=False, ext

base = [GoCueTriggerTimes]
# Version check
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
# We now extract a single trials table
base.extend([
StimOnTriggerTimes, ItiInTimes, StimOffTriggerTimes, StimFreezeTriggerTimes, ErrorCueTriggerTimes,
Expand Down
5 changes: 3 additions & 2 deletions ibllib/io/extractors/bpod_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import logging
from collections import OrderedDict

from pkg_resources import parse_version
from ibllib.io.extractors import habituation_trials, training_trials, biased_trials, opto_trials
import ibllib.io.extractors.base
import ibllib.io.raw_data_loaders as rawio
from ibllib.misc import version

_logger = logging.getLogger('ibllib')

Expand Down Expand Up @@ -54,7 +54,8 @@ def extract_all(session_path, save=True, bpod_trials=None, settings=None):
files_wheel = []
wheel = OrderedDict({k: trials.pop(k) for k in tuple(trials.keys()) if 'wheel' in k})
elif extractor_type == 'habituation':
if settings['IBLRIG_VERSION_TAG'] and version.le(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if settings['IBLRIG_VERSION_TAG'] and \
parse_version(settings['IBLRIG_VERSION_TAG']) <= parse_version('5.0.0'):
_logger.warning("No extraction of legacy habituation sessions")
return None, None, None
trials, files_trials = habituation_trials.extract_all(
Expand Down
11 changes: 5 additions & 6 deletions ibllib/io/extractors/training_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import ibllib.io.raw_data_loaders as raw
from ibllib.io.extractors.base import BaseBpodTrialsExtractor, run_extractor_classes
from ibllib.io.extractors.training_wheel import Wheel
from ibllib.misc import version


_logger = logging.getLogger('ibllib')
Expand Down Expand Up @@ -211,7 +210,7 @@ def get_feedback_times_ge5(session_path, data=False):

def _extract(self):
# Version check
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
merge = self.get_feedback_times_ge5(self.session_path, data=self.bpod_trials)
else:
merge = self.get_feedback_times_lt5(self.session_path, data=self.bpod_trials)
Expand Down Expand Up @@ -282,7 +281,7 @@ class GoCueTriggerTimes(BaseBpodTrialsExtractor):
var_names = 'goCueTrigger_times'

def _extract(self):
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
goCue = np.array([tr['behavior_data']['States timestamps']
['play_tone'][0][0] for tr in self.bpod_trials])
else:
Expand Down Expand Up @@ -356,7 +355,7 @@ class IncludedTrials(BaseBpodTrialsExtractor):
var_names = 'included'

def _extract(self):
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
trials_included = self.get_included_trials_ge5(
data=self.bpod_trials, settings=self.settings)
else:
Expand Down Expand Up @@ -513,7 +512,7 @@ def _extract(self):
# Version check
_logger.warning("Deprecation Warning: this is an old version of stimOn extraction."
"From version 5., use StimOnOffFreezeTimes")
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
stimOn_times = self.get_stimOn_times_ge5(self.session_path, data=self.bpod_trials)
else:
stimOn_times = self.get_stimOn_times_lt5(self.session_path, data=self.bpod_trials)
Expand Down Expand Up @@ -719,7 +718,7 @@ def extract_all(session_path, save=False, bpod_trials=None, settings=None):

base = [RepNum, GoCueTriggerTimes]
# Version check
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
# We now extract a single trials table
base.extend([
StimOnTriggerTimes, ItiInTimes, StimOffTriggerTimes, StimFreezeTriggerTimes,
Expand Down
6 changes: 3 additions & 3 deletions ibllib/io/raw_data_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@
from pathlib import Path
from typing import Union

from pkg_resources import parse_version
import numpy as np
import pandas as pd

from iblutil.io import jsonable
from ibllib.io.video import assert_valid_label
from ibllib.misc import version
from ibllib.time import uncycle_pgts, convert_pgts

_logger = logging.getLogger('ibllib')
Expand Down Expand Up @@ -374,7 +374,7 @@ def load_encoder_events(session_path, settings=False):
settings = {'IBLRIG_VERSION_TAG': '0.0.0'}
if not path:
return None
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
return _load_encoder_events_file_ge5(path)
else:
return _load_encoder_events_file_lt5(path)
Expand Down Expand Up @@ -479,7 +479,7 @@ def load_encoder_positions(session_path, settings=False):
if not path:
_logger.warning("No data loaded: could not find raw encoderPositions file")
return None
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
return _load_encoder_positions_file_ge5(path)
else:
return _load_encoder_positions_file_lt5(path)
Expand Down
11 changes: 11 additions & 0 deletions ibllib/misc/version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,15 @@
import pkg_resources
import traceback
import warnings

for line in traceback.format_stack():
print(line.strip())

warnings.warn(
'ibllib.version is deprecated and functionality will be removed! '
'use pkg_resources.parse_version and ibllib.__version__ instead. See stack above.',
DeprecationWarning
)


def _compare_version_tag(v1, v2, fcn):
Expand Down
23 changes: 11 additions & 12 deletions ibllib/oneibl/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
from one.alf.spec import is_uuid_string
from one import params
from one.converters import path_from_dataset
from one.remote import globus

from ibllib.io import globus
from ibllib.oneibl.registration import register_dataset

_logger = logging.getLogger('ibllib')
Expand Down Expand Up @@ -97,7 +97,7 @@ def _patch_dataset(self, path, dset_id=None, dry=False, ftp=False):
full_remote_path = PurePosixPath(FLATIRON_MOUNT, remote_path)
if isinstance(path, WindowsPath) and not ftp:
# On Windows replace drive map with Globus uri, e.g. C:/ -> /~/C/
path = '/~/' + path.as_posix().replace(':', '')
path = globus.as_globus_path(path)
status = self._scp(path, full_remote_path, dry=dry)[0]
return status

Expand Down Expand Up @@ -140,8 +140,8 @@ def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
Rules for creation/patching are the same that apply for registration via Alyx
as this uses the registration endpoint to get the dataset.
An existing file (same session and path relative to session) will be patched.
:param path: full file path. Must be whithin an ALF session folder (subject/date/number)
can also be a list of full file pathes belonging to the same session.
:param path: full file path. Must be within an ALF session folder (subject/date/number)
can also be a list of full file paths belonging to the same session.
:param server_repository: Alyx server repository name
:param created_by: alyx username for the dataset (optional, defaults to root)
:param ftp: flag for case when using ftppatcher. Don't adjust windows path in
Expand Down Expand Up @@ -197,13 +197,12 @@ class GlobusPatcher(Patcher):
"""

def __init__(self, one=None, globus_client_id=None, local_endpoint=None, label='ibllib patch'):
assert globus_client_id
def __init__(self, client_name='default', one=None, label='ibllib patch'):
assert one
self.local_endpoint = local_endpoint or globus.get_local_endpoint()
self.local_endpoint = getattr(globus.load_client_params(f'globus.{client_name}'),
'local_endpoint', globus.get_local_endpoint_id())
self.transfer_client = globus.create_globus_client(client_name)
self.label = label
self.transfer_client = globus.login_auto(
globus_client_id=globus_client_id, str_app='globus/admin')
# transfers/delete from the current computer to the flatiron: mandatory and executed first
self.globus_transfer = globus_sdk.TransferData(
self.transfer_client, self.local_endpoint, FLAT_IRON_GLOBUS_ID, verify_checksum=True,
Expand Down Expand Up @@ -296,11 +295,11 @@ def _wait_for_task(resp):
# on an errored task
# Out[10]: TransferResponse({'bytes_checksummed': 0, 'bytes_transferred': 0, 'canceled_by_admin': None, 'canceled_by_admin_message': None, 'command': 'API 0.10', 'completion_time': '2021-01-03T17:39:00+00:00', 'deadline': '2021-01-04T17:37:34+00:00', 'delete_destination_extra': False, 'destination_endpoint': 'simonsfoundation#ibl', 'destination_endpoint_display_name': 'IBL Flatiron SDSC Data', 'destination_endpoint_id': 'ab2d064c-413d-11eb-b188-0ee0d5d9299f', 'directories': 0, 'effective_bytes_per_second': 0, 'encrypt_data': False, 'fatal_error': {'code': 'CANCELED', 'description': 'canceled'}, 'faults': 2, 'files': 6, 'files_skipped': 0, 'files_transferred': 0, 'history_deleted': False, 'is_ok': None, 'is_paused': False, 'key': 'complete,2021-01-03T17:38:59.697413', 'label': 'test 3B analog sync patch', 'nice_status': None, 'nice_status_details': None, 'nice_status_expires_in': None, 'nice_status_short_description': None, 'owner_id': 'e633663a-8561-4a5d-ac92-f198d43b14dc', 'preserve_timestamp': False, 'recursive_symlinks': 'ignore', 'request_time': '2021-01-03T17:37:34+00:00', 'source_endpoint': 'internationalbrainlab#916c2766-bd2a-11ea-8f22-0a21f750d19b', 'source_endpoint_display_name': 'olivier_laptop', 'source_endpoint_id': '916c2766-bd2a-11ea-8f22-0a21f750d19b', 'status': 'FAILED', 'subtasks_canceled': 6, 'subtasks_expired': 0, 'subtasks_failed': 0, 'subtasks_pending': 0, 'subtasks_retrying': 0, 'subtasks_succeeded': 6, 'subtasks_total': 12, 'symlinks': 0, 'sync_level': 3, 'task_id': '5706dd2c-4dea-11eb-8ffb-0a34088e79f9', 'type': 'TRANSFER', 'username': 'internationalbrainlab', 'verify_checksum': True}) # noqa
while True:
tinfo = gtc.get_task(task_id=resp['task_id'])['completion_time']
if tinfo['completion_time'] is not None:
tinfo = gtc.get_task(task_id=resp['task_id'])
if tinfo and tinfo['completion_time'] is not None:
break
_ = gtc.task_wait(task_id=resp['task_id'], timeout=30)
if tinfo['fatal_error'] is not None:
if tinfo and tinfo['fatal_error'] is not None:
raise ConnectionError(f"Globus transfer failed \n {tinfo}")

# handles the transfers first
Expand Down
Loading

0 comments on commit f82cf5d

Please sign in to comment.