Skip to content

Commit

Permalink
Track Sources with BIDSURI interface (#1283)
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo authored Oct 9, 2024
1 parent c26499b commit 4519468
Show file tree
Hide file tree
Showing 16 changed files with 413 additions and 475 deletions.
3 changes: 3 additions & 0 deletions xcp_d/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,6 +524,9 @@ def _process_value(value):
"preprocessed": cls.fmri_dir,
"templateflow": Path(TF_LAYOUT.root),
}
if cls.atlases:
dataset_links["atlas"] = cls.output_dir / "atlases"

for dset_name, dset_path in cls.datasets.items():
dataset_links[dset_name] = dset_path
cls.dataset_links = dataset_links
Expand Down
2 changes: 1 addition & 1 deletion xcp_d/interfaces/ants.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@


class _ConvertTransformFileInputSpec(CommandLineInputSpec):
dimension = traits.Enum((3, 2), default=3, usedefault=True, argstr="%d", position=0)
dimension = traits.Enum(3, 2, usedefault=True, argstr="%d", position=0)
in_transform = traits.File(exists=True, argstr="%s", mandatory=True, position=1)
out_transform = traits.File(
argstr="%s",
Expand Down
70 changes: 68 additions & 2 deletions xcp_d/interfaces/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,17 @@
from nipype.interfaces.base import (
BaseInterfaceInputSpec,
Directory,
DynamicTraitedSpec,
File,
SimpleInterface,
TraitedSpec,
traits,
)
from nipype.interfaces.io import add_traits
from niworkflows.interfaces.bids import DerivativesDataSink as BaseDerivativesDataSink

from xcp_d.data import load as load_data
from xcp_d.utils.bids import get_entity
from xcp_d.utils.bids import _get_bidsuris, get_entity

# NOTE: Modified for xcpd's purposes
xcp_d_spec = loads(load_data("xcp_d_bids_config.json").read_text())
Expand Down Expand Up @@ -193,6 +195,11 @@ class _CopyAtlasInputSpec(BaseInterfaceInputSpec):
desc="The atlas name.",
mandatory=True,
)
Sources = traits.List(
traits.Str,
desc="List of sources for the atlas.",
mandatory=False,
)


class _CopyAtlasOutputSpec(TraitedSpec):
Expand Down Expand Up @@ -243,6 +250,7 @@ def _run_interface(self, runtime):
meta_dict = self.inputs.meta_dict
name_source = self.inputs.name_source
atlas = self.inputs.atlas
Sources = self.inputs.Sources

atlas_out_dir = os.path.join(output_dir, f"atlases/atlas-{atlas}")

Expand Down Expand Up @@ -284,11 +292,69 @@ def _run_interface(self, runtime):
shutil.copyfile(in_file, out_file)

# Only write out a sidecar if metadata are provided
if meta_dict:
if meta_dict or Sources:
meta_file = os.path.join(atlas_out_dir, f"{out_basename}.json")
meta_dict = meta_dict or {}
meta_dict = meta_dict.copy()
if Sources:
meta_dict["Sources"] = meta_dict.get("Sources", []) + Sources

with open(meta_file, "w") as fo:
dump(meta_dict, fo, sort_keys=True, indent=4)

self._results["out_file"] = out_file

return runtime


class _BIDSURIInputSpec(DynamicTraitedSpec):
dataset_links = traits.Dict(mandatory=True, desc="Dataset links")
out_dir = traits.Str(mandatory=True, desc="Output directory")
metadata = traits.Dict(desc="Metadata dictionary")
field = traits.Str(
"Sources",
usedefault=True,
desc="Field to use for BIDS URIs in metadata dict",
)


class _BIDSURIOutputSpec(TraitedSpec):
out = traits.List(
traits.Str,
desc="BIDS URI(s) for file",
)
metadata = traits.Dict(
desc="Dictionary with 'Sources' field.",
)


class BIDSURI(SimpleInterface):
"""Convert input filenames to BIDS URIs, based on links in the dataset.
This interface can combine multiple lists of inputs.
"""

input_spec = _BIDSURIInputSpec
output_spec = _BIDSURIOutputSpec

def __init__(self, numinputs=0, **inputs):
super().__init__(**inputs)
self._numinputs = numinputs
if numinputs >= 1:
input_names = [f"in{i + 1}" for i in range(numinputs)]
else:
input_names = []
add_traits(self.inputs, input_names)

def _run_interface(self, runtime):
inputs = [getattr(self.inputs, f"in{i + 1}") for i in range(self._numinputs)]
uris = _get_bidsuris(inputs, self.inputs.dataset_links, self.inputs.out_dir)
self._results["out"] = uris

# Add the URIs to the metadata dictionary.
metadata = self.inputs.metadata or {}
metadata = metadata.copy()
metadata[self.inputs.field] = metadata.get(self.inputs.field, []) + uris
self._results["metadata"] = metadata

return runtime
43 changes: 19 additions & 24 deletions xcp_d/interfaces/censoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ class _CensorInputSpec(BaseInterfaceInputSpec):
),
)
column = traits.Str(
default="framewise_displacement",
"framewise_displacement",
usedefault=True,
mandatory=False,
desc="Column name in the temporal mask to use for censoring.",
Expand Down Expand Up @@ -404,11 +404,12 @@ def _run_interface(self, runtime):
class _ProcessMotionInputSpec(BaseInterfaceInputSpec):
TR = traits.Float(mandatory=True, desc="Repetition time in seconds")
fd_thresh = traits.Float(
0.3,
mandatory=False,
default_value=0.3,
usedefault=True,
desc="Framewise displacement threshold. All values above this will be dropped.",
)
head_radius = traits.Float(mandatory=False, default_value=50, desc="Head radius in mm ")
head_radius = traits.Float(50, mandatory=False, usedefault=True, desc="Head radius in mm")
motion_file = File(
exists=True,
mandatory=True,
Expand Down Expand Up @@ -494,6 +495,11 @@ def _run_interface(self, runtime):
filtered=False,
)
fd_timeseries = motion_df["framewise_displacement"].to_numpy()
motion_metadata["framewise_displacement"] = {
"Description": "Framewise displacement calculated according to Power et al. (2012).",
"HeadRadius": self.inputs.head_radius,
"Units": "mm",
}
if self.inputs.motion_filter_type:
motion_df["framewise_displacement_filtered"] = compute_fd(
confound=motion_df,
Expand All @@ -504,18 +510,14 @@ def _run_interface(self, runtime):

# Compile motion metadata from confounds metadata, adding in filtering info
# First drop any columns that are not motion parameters
orig_motion_df = pd.read_table(self.inputs.motion_file)
orig_motion_cols = orig_motion_df.columns.tolist()
cols_to_drop = sorted(list(set(orig_motion_cols) - set(motion_df.columns.tolist())))
orig_cols = list(motion_metadata.keys())
orig_cols = [c for c in orig_cols if c[0] == c[0].lower()]
cols_to_drop = sorted(list(set(orig_cols) - set(motion_df.columns.tolist())))
motion_metadata = {k: v for k, v in motion_metadata.items() if k not in cols_to_drop}
for col in motion_df.columns.tolist():
col_metadata = motion_metadata.get(col, {})
if col.startswith("framewise_displacement"):
col_metadata["Description"] = (
"Framewise displacement calculated according to Power et al. (2012)."
)
col_metadata["Units"] = "mm"
col_metadata["HeadRadius"] = self.inputs.head_radius
if col.endswith("_filtered") and col[:-9] in motion_metadata:
col_metadata = motion_metadata[col[:-9]]

if self.inputs.motion_filter_type == "lp" and col.endswith("_filtered"):
filters = col_metadata.get("SoftwareFilters", {})
Expand Down Expand Up @@ -694,7 +696,7 @@ def _run_interface(self, runtime):
import nibabel as nb
import pandas as pd

from xcp_d.utils.bids import make_bids_uri
from xcp_d.utils.bids import _get_bidsuris
from xcp_d.utils.confounds import filter_motion, volterra

in_img = nb.load(self.inputs.in_file)
Expand Down Expand Up @@ -752,7 +754,7 @@ def _run_interface(self, runtime):
confounds_metadata[found_column] = confounds_metadata.get(
found_column, {}
)
confounds_metadata[found_column]["Sources"] = make_bids_uri(
confounds_metadata[found_column]["Sources"] = _get_bidsuris(
in_files=[confound_file],
dataset_links=self.inputs.dataset_links,
out_dir=self.inputs.out_dir,
Expand All @@ -772,19 +774,12 @@ def _run_interface(self, runtime):
new_confound_df.fillna({column: 0}, inplace=True)

confounds_metadata[column] = confounds_metadata.get(column, {})
confounds_metadata[column]["Sources"] = make_bids_uri(
confounds_metadata[column]["Sources"] = _get_bidsuris(
in_files=[confound_file],
dataset_links=self.inputs.dataset_links,
out_dir=self.inputs.out_dir,
)

# Collect column metadata
for column in new_confound_df.columns:
if column in confound_metadata:
confounds_metadata[column] = confound_metadata[column]
else:
confounds_metadata[column] = {}

else: # Voxelwise confounds
confound_img = nb.load(confound_file)
if confound_img.ndim == 2: # CIFTI
Expand All @@ -804,7 +799,7 @@ def _run_interface(self, runtime):
# Collect image metadata
new_confound_df.loc[:, confound_name] = np.nan # fill with NaNs as a placeholder
confounds_metadata[confound_name] = confound_metadata
confounds_metadata[confound_name]["Sources"] = make_bids_uri(
confounds_metadata[confound_name]["Sources"] = _get_bidsuris(
in_files=[confound_file],
dataset_links=self.inputs.dataset_links,
out_dir=self.inputs.out_dir,
Expand All @@ -815,7 +810,7 @@ def _run_interface(self, runtime):
)

# This actually gets overwritten in init_postproc_derivatives_wf.
confounds_metadata["Sources"] = make_bids_uri(
confounds_metadata["Sources"] = _get_bidsuris(
in_files=confound_files,
dataset_links=self.inputs.dataset_links,
out_dir=self.inputs.out_dir,
Expand Down
2 changes: 1 addition & 1 deletion xcp_d/interfaces/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class _NiftiParcellateInputSpec(BaseInterfaceInputSpec):
atlas = File(exists=True, mandatory=True, desc="atlas file")
atlas_labels = File(exists=True, mandatory=True, desc="atlas labels file")
min_coverage = traits.Float(
default=0.5,
0.5,
usedefault=True,
desc=(
"Coverage threshold to apply to parcels. "
Expand Down
6 changes: 3 additions & 3 deletions xcp_d/interfaces/nilearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,9 +261,9 @@ class _DenoiseImageInputSpec(BaseInterfaceInputSpec):
)
TR = traits.Float(mandatory=True, desc="Repetition time")
bandpass_filter = traits.Bool(mandatory=True, desc="To apply bandpass or not")
low_pass = traits.Float(mandatory=True, default_value=0.10, desc="Lowpass filter in Hz")
high_pass = traits.Float(mandatory=True, default_value=0.01, desc="Highpass filter in Hz")
filter_order = traits.Int(mandatory=True, default_value=2, desc="Filter order")
low_pass = traits.Float(mandatory=True, desc="Lowpass filter in Hz")
high_pass = traits.Float(mandatory=True, desc="Highpass filter in Hz")
filter_order = traits.Int(mandatory=True, desc="Filter order")


class _DenoiseImageOutputSpec(TraitedSpec):
Expand Down
13 changes: 6 additions & 7 deletions xcp_d/interfaces/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ class _QCPlotsESInputSpec(BaseInterfaceInputSpec):
Undefined,
desc="TSV file with temporal mask.",
)
TR = traits.Float(default_value=1, desc="Repetition time")
TR = traits.Float(1, usedefault=True, desc="Repetition time")
standardize = traits.Bool(
mandatory=True,
desc=(
Expand Down Expand Up @@ -529,7 +529,6 @@ class _SlicesDirInputSpec(FSLCommandInputSpec):

out_extension = traits.Enum(
(".gif", ".png", ".svg"),
default=".gif",
usedefault=True,
desc="Convenience parameter to let xcp_d select the extension.",
)
Expand Down Expand Up @@ -670,27 +669,27 @@ class _PlotCiftiParcellationInputSpec(BaseInterfaceInputSpec):
desc="Labels for the CIFTI files.",
)
out_file = File(
"plot.svg",
exists=False,
mandatory=False,
desc="Output file.",
default="plot.svg",
usedefault=True,
)
vmin = traits.Float(
0,
mandatory=False,
default_value=0,
usedefault=True,
desc="Minimum value for the colormap.",
)
vmax = traits.Float(
0,
mandatory=False,
default_value=0,
usedefault=True,
desc="Maximum value for the colormap.",
)
base_desc = traits.Str(
"",
mandatory=False,
default_value="",
usedefault=True,
desc="Base description for the output file.",
)
Expand Down Expand Up @@ -910,8 +909,8 @@ class _PlotDenseCiftiInputSpec(BaseInterfaceInputSpec):
desc="CIFTI file to plot.",
)
base_desc = traits.Str(
"",
mandatory=False,
default_value="",
usedefault=True,
desc="Base description for the output file.",
)
Expand Down
4 changes: 2 additions & 2 deletions xcp_d/interfaces/workbench.py
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ class _CiftiParcellateWorkbenchInputSpec(_WBCommandInputSpec):
"MODE",
"COUNT_NONZERO",
position=12,
default="MEAN",
usedefault=True,
argstr="-method %s",
desc="Specify method of parcellation (default MEAN, or MODE if label data)",
)
Expand Down Expand Up @@ -709,7 +709,7 @@ class _CiftiSurfaceResampleInputSpec(_WBCommandInputSpec):
"The BARYCENTRIC method is generally recommended for anatomical surfaces, "
"in order to minimize smoothing."
),
default="BARYCENTRIC",
usedefault=True,
)
out_file = File(
name_source=["in_file"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ atlases/atlas-4S156Parcels/atlas-4S156Parcels_space-MNI152NLin2009cAsym_dseg.jso
atlases/atlas-4S156Parcels/atlas-4S156Parcels_space-MNI152NLin2009cAsym_dseg.nii.gz
atlases/atlas-Schaefer100
atlases/atlas-Schaefer100/atlas-Schaefer100_dseg.tsv
atlases/atlas-Schaefer100/atlas-Schaefer100_space-MNI152NLin2009cAsym_dseg.json
atlases/atlas-Schaefer100/atlas-Schaefer100_space-MNI152NLin2009cAsym_dseg.nii.gz
atlases/dataset_description.json
dataset_description.json
Expand Down
Loading

0 comments on commit 4519468

Please sign in to comment.