Skip to content

Commit

Permalink
Fix - Remove plane_idx from write_single_bigtiff() for caiman pro…
Browse files Browse the repository at this point in the history
…cessing
  • Loading branch information
kushalbakshi committed Feb 1, 2024
1 parent 3c02d12 commit 4dc70a8
Show file tree
Hide file tree
Showing 6 changed files with 119 additions and 81 deletions.
8 changes: 5 additions & 3 deletions element_calcium_imaging/export/nwb/nwb.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,9 +192,11 @@ def _add_scan_to_nwb(session_key, nwbfile):
name="ImagingPlane",
optical_channel=optical_channel,
imaging_rate=frame_rate,
description=scan_notes
if scan_notes != ""
else f"Imaging plane for field {field_no+1}, channel {channel+1}",
description=(
scan_notes
if scan_notes != ""
else f"Imaging plane for field {field_no+1}, channel {channel+1}"
),
device=device,
excitation_lambda=nan,
indicator="unknown",
Expand Down
58 changes: 34 additions & 24 deletions element_calcium_imaging/imaging.py
Original file line number Diff line number Diff line change
Expand Up @@ -1027,30 +1027,40 @@ def make(self, key):
}
for fkey, ref_image, ave_img, corr_img, max_img in zip(
field_keys,
caiman_dataset.motion_correction["reference_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["reference_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["average_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["average_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["correlation_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["correlation_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["max_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["max_image"][...][
np.newaxis, ...
],
(
caiman_dataset.motion_correction["reference_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["reference_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["average_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["average_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["correlation_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["correlation_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["max_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["max_image"][...][
np.newaxis, ...
]
),
)
]
self.Summary.insert(summary_images)
Expand Down
1 change: 0 additions & 1 deletion element_calcium_imaging/imaging_no_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,6 @@ def make(self, key):
)
image_files = [
PVmeta.write_single_bigtiff(
plane_idx=0,
channel=channel,
output_dir=output_dir,
caiman_compatible=True,
Expand Down
58 changes: 34 additions & 24 deletions element_calcium_imaging/imaging_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -1240,30 +1240,40 @@ def make(self, key):
}
for fkey, ref_image, ave_img, corr_img, max_img in zip(
field_keys,
caiman_dataset.motion_correction["reference_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["reference_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["average_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["average_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["correlation_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["correlation_image"][...][
np.newaxis, ...
],
caiman_dataset.motion_correction["max_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["max_image"][...][
np.newaxis, ...
],
(
caiman_dataset.motion_correction["reference_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["reference_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["average_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["average_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["correlation_image"].transpose(
2, 0, 1
)
if is3D
else caiman_dataset.motion_correction["correlation_image"][...][
np.newaxis, ...
]
),
(
caiman_dataset.motion_correction["max_image"].transpose(2, 0, 1)
if is3D
else caiman_dataset.motion_correction["max_image"][...][
np.newaxis, ...
]
),
)
]
self.Summary.insert(summary_images)
Expand Down
74 changes: 45 additions & 29 deletions element_calcium_imaging/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,17 +330,21 @@ def make(self, key):
px_width=scan.field_widths[field_id],
um_height=scan.field_heights_in_microns[field_id],
um_width=scan.field_widths_in_microns[field_id],
field_x=x_zero
+ scan._degrees_to_microns(scan.fields[field_id].x)
if x_zero
else None,
field_y=y_zero
+ scan._degrees_to_microns(scan.fields[field_id].y)
if y_zero
else None,
field_z=z_zero + scan.fields[field_id].depth
if z_zero
else None,
field_x=(
x_zero
+ scan._degrees_to_microns(scan.fields[field_id].x)
if x_zero
else None
),
field_y=(
y_zero
+ scan._degrees_to_microns(scan.fields[field_id].y)
if y_zero
else None
),
field_z=(
z_zero + scan.fields[field_id].depth if z_zero else None
),
delay_image=scan.field_offsets[field_id],
roi=scan.field_rois[field_id][0],
)
Expand All @@ -359,9 +363,11 @@ def make(self, key):
um_width=getattr(scan, "image_width_in_microns", None),
field_x=x_zero if x_zero else None,
field_y=y_zero if y_zero else None,
field_z=z_zero + scan.scanning_depths[plane_idx]
if z_zero
else None,
field_z=(
z_zero + scan.scanning_depths[plane_idx]
if z_zero
else None
),
delay_image=scan.field_offsets[plane_idx],
)
for plane_idx in range(scan.num_scanning_depths)
Expand All @@ -387,9 +393,11 @@ def make(self, key):
self.insert1(
dict(
key,
nfields=sbx_meta["num_fields"]
if is_multiROI
else sbx_meta["num_planes"],
nfields=(
sbx_meta["num_fields"]
if is_multiROI
else sbx_meta["num_planes"]
),
nchannels=sbx_meta["num_channels"],
nframes=sbx_meta["num_frames"],
ndepths=sbx_meta["num_planes"],
Expand All @@ -412,12 +420,16 @@ def make(self, key):
field_idx=plane_idx,
px_height=px_height,
px_width=px_width,
um_height=px_height * sbx_meta["um_per_pixel_y"]
if sbx_meta["um_per_pixel_y"]
else None,
um_width=px_width * sbx_meta["um_per_pixel_x"]
if sbx_meta["um_per_pixel_x"]
else None,
um_height=(
px_height * sbx_meta["um_per_pixel_y"]
if sbx_meta["um_per_pixel_y"]
else None
),
um_width=(
px_width * sbx_meta["um_per_pixel_x"]
if sbx_meta["um_per_pixel_x"]
else None
),
field_x=x_zero,
field_y=y_zero,
field_z=z_zero + sbx_meta["etl_pos"][plane_idx],
Expand Down Expand Up @@ -462,9 +474,11 @@ def estimate_nd2_scan_duration(nd2_scan_obj):
scan_datetime = nd2_file.text_info["date"]
scan_datetime = datetime.strptime(
scan_datetime,
"%m/%d/%Y %H:%M:%S %p"
if re.search(("AM|PM"), scan_datetime)
else "%m/%d/%Y %H:%M:%S",
(
"%m/%d/%Y %H:%M:%S %p"
if re.search(("AM|PM"), scan_datetime)
else "%m/%d/%Y %H:%M:%S"
),
)
scan_datetime = datetime.strftime(scan_datetime, "%Y-%m-%d %H:%M:%S")
except: # noqa: E722
Expand Down Expand Up @@ -552,9 +566,11 @@ def estimate_nd2_scan_duration(nd2_scan_obj):
um_width=PVScan_info["width_in_um"],
field_x=PVScan_info["fieldX"],
field_y=PVScan_info["fieldY"],
field_z=PVScan_info["fieldZ"]
if PVScan_info["num_planes"] == 1
else PVScan_info["fieldZ"][plane_idx],
field_z=(
PVScan_info["fieldZ"]
if PVScan_info["num_planes"] == 1
else PVScan_info["fieldZ"][plane_idx]
),
)
for plane_idx in range(PVScan_info["num_planes"])
)
Expand Down
1 change: 1 addition & 0 deletions element_calcium_imaging/version.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
"""Package metadata."""

__version__ = "0.9.2"

0 comments on commit 4dc70a8

Please sign in to comment.