Skip to content

Commit

Permalink
Merge pull request #137 from AllenNeuralDynamics/release-v0.17.0
Browse files Browse the repository at this point in the history
Release v0.17.0
  • Loading branch information
jtyoung84 authored Sep 7, 2024
2 parents c35cda9 + 6ab84c1 commit 62b9ca6
Show file tree
Hide file tree
Showing 32 changed files with 9,999 additions and 8,555 deletions.
10 changes: 8 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@ readme = "README.md"
dynamic = ["version"]

dependencies = [
"aind-data-schema==0.36.0",
"aind-data-schema-models==0.1.7",
"aind-data-schema==1.0.0",
"aind-data-schema-models>=0.3.2",
"pydantic-settings>=2.0",
"pydantic<2.9"
]

[project.optional-dependencies]
Expand All @@ -40,6 +41,7 @@ all = [
"aind-metadata-mapper[mesoscope]",
"aind-metadata-mapper[openephys]",
"aind-metadata-mapper[dynamicrouting]",
"aind-metadata-mapper[u19]",
]

bergamo = [
Expand Down Expand Up @@ -70,6 +72,10 @@ dynamicrouting = [
"pyyaml >= 6.0.0",
]

u19 = [
"pandas >= 2.2.2",
]

[tool.setuptools.packages.find]
where = ["src"]

Expand Down
2 changes: 1 addition & 1 deletion src/aind_metadata_mapper/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Init package"""

__version__ = "0.16.1"
__version__ = "0.17.0"
22 changes: 12 additions & 10 deletions src/aind_metadata_mapper/bergamo/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -1069,7 +1069,7 @@ def run_job(self) -> JobResponse: # noqa: C901
)
),
) # from Jon's script - seconds

wavelength = self.job_settings.photostim_laser_wavelength
stim_epoch_photostim = StimulusEpoch(
stimulus_start_time=stream_start_time,
stimulus_end_time=stream_end_time, # datetime,
Expand All @@ -1079,15 +1079,17 @@ def run_job(self) -> JobResponse: # noqa: C901
stimulus_parameters=[photostim],
# opticalBCI class to be added in future
stimulus_device_names=self.job_settings.stimulus_device_names,
light_source_config=LaserConfig(
# from rig json
name=self.job_settings.photostim_laser_name,
wavelength=self.job_settings.photostim_laser_wavelength,
# user set value
excitation_power=np.nanmean(group_powers),
# from tiff header,
excitation_power_unit=PowerUnit.PERCENT,
),
light_source_config=[
LaserConfig(
# from rig json
name=self.job_settings.photostim_laser_name,
wavelength=wavelength,
# user set value
excitation_power=np.nanmean(group_powers),
# from tiff header,
excitation_power_unit=PowerUnit.PERCENT,
)
],
output_parameters={
"tiff_files": tiff_list,
"tiff_stem": tiff_stem,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,6 @@ def update_modification_date(
modification_date = date.today()

extracted_source.rig_id = (
f"{room_id}_{rig_name}_{modification_date.strftime('%y%m%d')}"
f"{room_id}_{rig_name}_{modification_date.strftime('%Y%m%d')}"
)
extracted_source.modification_date = modification_date
3 changes: 2 additions & 1 deletion src/aind_metadata_mapper/models.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Module to define models for Gather Metadata Job"""

from pathlib import Path
from typing import List, Optional, Union
from typing import List, Literal, Optional, Union

from aind_data_schema.core.processing import PipelineProcess
from aind_data_schema_models.modalities import Modality
Expand Down Expand Up @@ -106,6 +106,7 @@ class MetadataSettings(BaseJobSettings):
class JobSettings(BaseJobSettings):
"""Fields needed to gather all metadata"""

job_settings_name: Literal["GatherMetadata"] = "GatherMetadata"
metadata_service_domain: Optional[str] = None
subject_settings: Optional[SubjectSettings] = None
session_settings: Optional[SessionSettings] = None
Expand Down
12 changes: 6 additions & 6 deletions src/aind_metadata_mapper/open_ephys/utils/behavior_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -730,9 +730,9 @@ def fix_omitted_end_frame(stim_pres_table: pd.DataFrame) -> pd.DataFrame:
stim_pres_table[stim_pres_table["omitted"]]["start_frame"]
+ median_stim_frame_duration
)
stim_pres_table.loc[
stim_pres_table["omitted"], "end_frame"
] = omitted_end_frames
stim_pres_table.loc[stim_pres_table["omitted"], "end_frame"] = (
omitted_end_frames
)

stim_dtypes = stim_pres_table.dtypes.to_dict()
stim_dtypes["start_frame"] = int
Expand Down Expand Up @@ -796,9 +796,9 @@ def compute_is_sham_change(
if np.array_equal(
active_images, stim_image_names[passive_block_mask].values
):
stim_df.loc[
passive_block_mask, "is_sham_change"
] = stim_df[active_block_mask]["is_sham_change"].values
stim_df.loc[passive_block_mask, "is_sham_change"] = (
stim_df[active_block_mask]["is_sham_change"].values
)

return stim_df.sort_index()

Expand Down
1 change: 1 addition & 0 deletions src/aind_metadata_mapper/u19/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Init u19 package"""
37 changes: 37 additions & 0 deletions src/aind_metadata_mapper/u19/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
"""Defines Job Settings for U19 ETL"""

from pathlib import Path
from typing import List, Literal, Optional

from pydantic import Field
from pydantic_settings import BaseSettings


class JobSettings(BaseSettings):
"""Data that needs to be input by user."""

job_settings_name: Literal["U19"] = "U19"
tissue_sheet_path: Path
tissue_sheet_names: List[str]
output_directory: Optional[Path] = Field(
default=None,
description=(
"Directory where to save the json file to. If None, then json"
" contents will be returned in the Response message."
),
)
experimenter_full_name: List[str]
subject_to_ingest: str = Field(
default=None,
description=(
"subject ID to ingest. If None,"
" then all subjects in spreadsheet will be ingested."
),
)
procedures_download_link: str = Field(
description="Link to download the relevant procedures "
"from metadata service",
)
allow_validation_errors: bool = Field(
False, description="Whether or not to allow validation errors."
)
Loading

0 comments on commit 62b9ca6

Please sign in to comment.