Skip to content

Commit

Permalink
Merge pull request #306 from ttngu207/datajoint_social01
Browse files Browse the repository at this point in the history
Datajoint social01
  • Loading branch information
JaerongA authored Jan 19, 2024
2 parents 7f561e3 + ae0110a commit 5220b0d
Show file tree
Hide file tree
Showing 20 changed files with 5,481 additions and 588 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,6 @@ Ensure you stay in the `~/ProjectAeon/aeon_mecha` directory for the rest of the

If you use this software, please cite it as below:

Sainsbury Wellcome Centre Foraging Behaviour Working Group. (2023). Aeon: An open-source platform to study the neural basis of ethological behaviours over naturalistic timescales, https://doi.org/10.5281/zenodo.8413142
Sainsbury Wellcome Centre Foraging Behaviour Working Group. (2023). Aeon: An open-source platform to study the neural basis of ethological behaviours over naturalistic timescales, https://doi.org/10.5281/zenodo.8411157

[![DOI](https://zenodo.org/badge/485512362.svg)](https://zenodo.org/badge/latestdoi/485512362)
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8411157.svg)](https://zenodo.org/doi/10.5281/zenodo.8411157)
174 changes: 134 additions & 40 deletions aeon/dj_pipeline/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
import numpy as np
import pandas as pd

from aeon.analysis import utils as analysis_utils
from aeon.io import api as io_api
from aeon.schema import schemas as aeon_schemas
from aeon.io import reader as io_reader
from aeon.schema import dataset as aeon_schema
from aeon.analysis import utils as analysis_utils

from . import get_schema_name
from .utils import paths
from aeon.dj_pipeline import get_schema_name, lab, subject
from aeon.dj_pipeline.utils import paths

logger = dj.logger
schema = dj.schema(get_schema_name("acquisition"))
Expand All @@ -22,19 +22,16 @@
"exp0.1-r0": "FrameTop",
"social0-r1": "FrameTop",
"exp0.2-r0": "CameraTop",
"oct1.0-r0": "CameraTop",
"social0.1-a3": "CameraTop",
"social0.1-a4": "CameraTop"
}

_device_schema_mapping = {
"exp0.1-r0": aeon_schema.exp01,
"social0-r1": aeon_schema.exp01,
"exp0.2-r0": aeon_schema.exp02,
"oct1.0-r0": aeon_schema.octagon01,
"social0.1-a3": aeon_schema.social01,
"social0.1-a4": aeon_schema.social01
}
# _device_schema_mapping = {
# "exp0.1-r0": aeon_schemas.exp01,
# "social0-r1": aeon_schemas.exp01,
# "exp0.2-r0": aeon_schemas.exp02,
# "oct1.0-r0": aeon_schemas.octagon01,
# "social0.1-a3": aeon_schemas.social01,
# "social0.1-a4": aeon_schemas.social01,
# }


# ------------------- Type Lookup ------------------------
Expand Down Expand Up @@ -68,6 +65,15 @@ class EventType(dj.Lookup):
]


@schema
class DevicesSchema(dj.Lookup):
definition = """
devices_schema_name: varchar(32)
"""

contents = zip(aeon_schemas.__all__)


# ------------------- Data repository/directory ------------------------


Expand Down Expand Up @@ -95,7 +101,7 @@ class DirectoryType(dj.Lookup):
@schema
class Experiment(dj.Manual):
definition = """
experiment_name: varchar(32) # e.g exp0-r0
experiment_name: varchar(32) # e.g exp0-aeon3
---
experiment_start_time: datetime(6) # datetime of the start of this experiment
experiment_description: varchar(1000)
Expand All @@ -119,6 +125,13 @@ class Directory(dj.Part):
directory_path: varchar(255)
"""

class DevicesSchema(dj.Part):
definition = """
-> master
---
-> DevicesSchema
"""

@classmethod
def get_data_directory(cls, experiment_key, directory_type="raw", as_posix=False):
try:
Expand Down Expand Up @@ -272,16 +285,29 @@ class Config(dj.Part):
metadata_file_path: varchar(255) # path of the file, relative to the experiment repository
"""

class DeviceType(dj.Part):
definition = """ # Device type(s) used in a particular acquisition epoch
-> master
device_type: varchar(36)
"""

@classmethod
def ingest_epochs(cls, experiment_name, start=None, end=None):
"""Ingest epochs for the specified "experiment_name". Ingest only epochs that start in between the specified (start, end) time. If not specified, ingest all epochs.
Note: "start" and "end" are datetime specified a string in the format: "%Y-%m-%d %H:%M:%S".
"""

from .utils import streams_maker
from .utils.load_metadata import (extract_epoch_config,
ingest_epoch_metadata,
insert_device_types)
from aeon.dj_pipeline.utils import streams_maker
from aeon.dj_pipeline.utils.load_metadata import (
extract_epoch_config,
ingest_epoch_metadata,
insert_device_types,
)

devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": experiment_name}).fetch1("devices_schema_name"),
)

device_name = _ref_device_mapping.get(experiment_name, "CameraTop")

Expand Down Expand Up @@ -309,7 +335,9 @@ def ingest_epochs(cls, experiment_name, start=None, end=None):
if experiment_name != "exp0.1-r0":
metadata_yml_filepath = epoch_dir / "Metadata.yml"
if metadata_yml_filepath.exists():
epoch_config = extract_epoch_config(experiment_name, metadata_yml_filepath)
epoch_config = extract_epoch_config(
experiment_name, devices_schema, metadata_yml_filepath
)

metadata_yml_filepath = epoch_config["metadata_file_path"]

Expand Down Expand Up @@ -352,17 +380,24 @@ def ingest_epochs(cls, experiment_name, start=None, end=None):
try:
# Insert new entries for streams.DeviceType, streams.Device.
insert_device_types(
_device_schema_mapping[epoch_key["experiment_name"]],
devices_schema,
metadata_yml_filepath,
)
# Define and instantiate new devices/stream tables under `streams` schema
streams_maker.main()
with cls.connection.transaction:
# Insert devices' installation/removal/settings
ingest_epoch_metadata(experiment_name, metadata_yml_filepath)
epoch_device_types = ingest_epoch_metadata(
experiment_name, devices_schema, metadata_yml_filepath
)
if epoch_device_types is not None:
cls.DeviceType.insert(
epoch_key | {"device_type": n} for n in epoch_device_types
)
epoch_list.append(epoch_key)
except Exception as e:
(cls.Config & epoch_key).delete_quick()
(cls.DeviceType & epoch_key).delete_quick()
(cls & epoch_key).delete_quick()
raise e

Expand Down Expand Up @@ -452,19 +487,19 @@ def ingest_chunks(cls, experiment_name):
epoch_end = (EpochEnd & epoch_key).fetch1("epoch_end")
chunk_end = min(chunk_end, epoch_end)

if chunk_start in chunk_starts:
# handle cases where two chunks with identical start_time
# (starts in the same hour) but from 2 consecutive epochs
# using epoch_start as chunk_start in this case
chunk_start = epoch_start

# --- insert to Chunk ---
chunk_key = {"experiment_name": experiment_name, "chunk_start": chunk_start}

if cls.proj() & chunk_key:
# skip over those already ingested
continue

if chunk_start in chunk_starts:
# handle cases where two chunks with identical start_time
# (starts in the same hour) but from 2 consecutive epochs
# using epoch_start as chunk_start in this case
chunk_key["chunk_start"] = epoch_start

# chunk file and directory
raw_data_dir, directory, repo_path = _match_experiment_directory(
experiment_name, chunk_rep_file, raw_data_dirs
Expand Down Expand Up @@ -527,7 +562,13 @@ def make(self, key):
pd.Timestamp(chunk_end),
)
else:
device = _device_schema_mapping[key["experiment_name"]].ExperimentalMetadata
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)
device = devices_schema.ExperimentalMetadata
subject_data = io_api.load(
root=raw_data_dir.as_posix(),
reader=device.SubjectState,
Expand Down Expand Up @@ -578,7 +619,13 @@ def make(self, key):
pd.Timestamp(chunk_end),
)
else:
device = _device_schema_mapping[key["experiment_name"]].ExperimentalMetadata
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)
device = devices_schema.ExperimentalMetadata
subject_data = io_api.load(
root=raw_data_dir.as_posix(),
reader=device.SubjectState,
Expand Down Expand Up @@ -617,7 +664,13 @@ def make(self, key):

# Populate the part table
raw_data_dir = Experiment.get_data_directory(key)
device = _device_schema_mapping[key["experiment_name"]].ExperimentalMetadata
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)
device = devices_schema.ExperimentalMetadata

try:
# handles corrupted files - issue: https://github.com/SainsburyWellcomeCentre/aeon_mecha/issues/153
Expand Down Expand Up @@ -698,7 +751,14 @@ def make(self, key):

raw_data_dir = Experiment.get_data_directory(key, directory_type=dir_type)

device = getattr(_device_schema_mapping[key["experiment_name"]], food_patch_description)
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

device = getattr(devices_schema, food_patch_description)

pellet_data = pd.concat(
[
Expand Down Expand Up @@ -775,7 +835,14 @@ def make(self, key):

raw_data_dir = Experiment.get_data_directory(key, directory_type=dir_type)

device = getattr(_device_schema_mapping[key["experiment_name"]], food_patch_description)
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

device = getattr(devices_schema, food_patch_description)

wheel_data = io_api.load(
root=raw_data_dir.as_posix(),
Expand All @@ -799,7 +866,14 @@ def get_wheel_data(cls, experiment_name, start, end, patch_name="Patch1", using_
key = {"experiment_name": experiment_name}
raw_data_dir = Experiment.get_data_directory(key)

device = getattr(_device_schema_mapping[key["experiment_name"]], patch_name)
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

device = getattr(devices_schema, patch_name)

wheel_data = io_api.load(
root=raw_data_dir.as_posix(),
Expand Down Expand Up @@ -886,7 +960,14 @@ def make(self, key):
food_patch_description = (ExperimentFoodPatch & key).fetch1("food_patch_description")
raw_data_dir = Experiment.get_data_directory(key, directory_type=dir_type)

device = getattr(_device_schema_mapping[key["experiment_name"]], food_patch_description)
devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

device = getattr(devices_schema, food_patch_description)

wheel_state = io_api.load(
root=raw_data_dir.as_posix(),
Expand Down Expand Up @@ -945,9 +1026,16 @@ def make(self, key):

weight_scale_description = (ExperimentWeightScale & key).fetch1("weight_scale_description")

devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

# in some epochs/chunks, the food patch device was mapped to "Nest"
for device_name in (weight_scale_description, "Nest"):
device = getattr(_device_schema_mapping[key["experiment_name"]], device_name)
device = getattr(devices_schema, device_name)
weight_data = io_api.load(
root=raw_data_dir.as_posix(),
reader=device.WeightRaw,
Expand Down Expand Up @@ -987,9 +1075,16 @@ def make(self, key):
raw_data_dir = Experiment.get_data_directory(key, directory_type=dir_type)
weight_scale_description = (ExperimentWeightScale & key).fetch1("weight_scale_description")

devices_schema = getattr(
aeon_schemas,
(Experiment.DevicesSchema & {"experiment_name": key["experiment_name"]}).fetch1(
"devices_schema_name"
),
)

# in some epochs/chunks, the food patch device was mapped to "Nest"
for device_name in (weight_scale_description, "Nest"):
device = getattr(_device_schema_mapping[key["experiment_name"]], device_name)
device = getattr(devices_schema, device_name)
weight_filtered = io_api.load(
root=raw_data_dir.as_posix(),
reader=device.WeightFiltered,
Expand Down Expand Up @@ -1097,8 +1192,7 @@ def _load_legacy_subjectdata(experiment_name, data_dir, start, end):
return subject_data

if experiment_name == "social0-r1":
from aeon.dj_pipeline.create_experiments.create_socialexperiment_0 import \
fixID
from aeon.dj_pipeline.create_experiments.create_socialexperiment_0 import fixID

sessdf = subject_data.copy()
sessdf = sessdf[~sessdf.id.str.contains("test")]
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"VideoController": "CameraController", "CameraTop": "VideoSource", "CameraWest": "VideoSource", "CameraEast": "VideoSource", "CameraNorth": "VideoSource", "CameraSouth": "VideoSource", "CameraPatch1": "VideoSource", "CameraPatch2": "VideoSource", "CameraNest": "VideoSource", "AudioAmbient": "AudioSource", "Patch1": "UndergroundFeeder", "Patch2": "UndergroundFeeder", "WeightNest": "WeightScale", "TrackingTop": "PositionTracking", "ActivityCenter": "ActivityTracking", "ActivityArena": "ActivityTracking", "ActivityNest": "ActivityTracking", "ActivityPatch1": "ActivityTracking", "ActivityPatch2": "ActivityTracking", "InNest": "RegionTracking", "InPatch1": "RegionTracking", "InPatch2": "RegionTracking", "ArenaCenter": "DistanceFromPoint", "InArena": "InRange", "InCorridor": "InRange", "ClockSynchronizer": "Synchronizer", "Rfid": "Rfid Reader"}
{"VideoController": "CameraController", "CameraTop": "SpinnakerVideoSource", "CameraWest": "SpinnakerVideoSource", "CameraEast": "SpinnakerVideoSource", "CameraNorth": "SpinnakerVideoSource", "CameraSouth": "SpinnakerVideoSource", "CameraPatch1": "SpinnakerVideoSource", "CameraPatch2": "SpinnakerVideoSource", "CameraNest": "SpinnakerVideoSource", "AudioAmbient": "AudioSource", "Patch1": "UndergroundFeeder", "Patch2": "UndergroundFeeder", "WeightNest": "WeightScale", "TrackingTop": "PositionTracking", "ActivityCenter": "ActivityTracking", "ActivityArena": "ActivityTracking", "ActivityNest": "ActivityTracking", "ActivityPatch1": "ActivityTracking", "ActivityPatch2": "ActivityTracking", "InNest": "RegionTracking", "InPatch1": "RegionTracking", "InPatch2": "RegionTracking", "ArenaCenter": "DistanceFromPoint", "InArena": "InRange", "InCorridor": "InRange", "ClockSynchronizer": "TimestampGenerator", "Rfid": "Rfid Reader", "CameraPatch3": "SpinnakerVideoSource", "Patch3": "UndergroundFeeder", "Nest": "WeightScale", "RfidNest1": "RfidReader", "RfidNest2": "RfidReader", "RfidGate": "RfidReader", "RfidPatch1": "RfidReader", "RfidPatch2": "RfidReader", "RfidPatch3": "RfidReader", "LightCycle": "EnvironmentCondition"}
Loading

0 comments on commit 5220b0d

Please sign in to comment.