Skip to content

Commit

Permalink
Update.
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo committed Oct 16, 2023
1 parent bdaf171 commit aba3e1b
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 80 deletions.
8 changes: 0 additions & 8 deletions xcp_d/interfaces/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,6 @@ def _list_outputs(self):
f"bids:{self.inputs.dataset_name}:{str(Path(p).relative_to(self.inputs.dataset_path))}"
for p in raw_paths
]
if not bids_uris:
raise ValueError(
"Something's missing:\n"
f"\tdataset_name: {self.inputs.dataset_name}\n"
f"\tdataset_path: {self.inputs.dataset_path}\n"
f"\tvalues: {values}\n"
f"\traw_paths: {raw_paths}"
)

outputs["bids_uris"] = bids_uris
return outputs
10 changes: 10 additions & 0 deletions xcp_d/tests/test_interfaces_bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,13 @@ def test_infer_bids_uris():
f"bids:{dataset_name}:sub-01/ses-01/func/sub-01_ses-01_task-rest_run-02_bold.nii.gz",
f"bids:{dataset_name}:sub-01/ses-01/func/sub-01_ses-01_task-nback_run-01_bold.nii.gz",
]

dataset_name = "ds000001"
dataset_path = "/path/to/dset"
infer_bids_uris = bids.InferBIDSURIs(
numinputs=0,
dataset_name=dataset_name,
dataset_path=dataset_path,
)
out = infer_bids_uris.run()
assert out.outputs.bids_uris == []
13 changes: 13 additions & 0 deletions xcp_d/tests/test_utils_bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,3 +294,16 @@ def test_group_across_runs():
"/path/sub-01_task-rest_dir-LR_run-2_bold.nii.gz",
"/path/sub-01_task-rest_dir-RL_run-2_bold.nii.gz",
]


def test_make_uri():
"""Test _make_uri."""
in_file = "/path/to/dset/sub-01/func/sub-01_task-rest_bold.nii.gz"
dataset_name = "test"
dataset_path = "/path/to/dset"
uri = xbids._make_uri(in_file, dataset_name=dataset_name, dataset_path=dataset_path)
assert uri == "bids:test:sub-01/func/sub-01_task-rest_bold.nii.gz"

dataset_path = "/another/path/haha"
with pytest.raises(ValueError, match="is not in the subpath of"):
xbids._make_uri(in_file, dataset_name=dataset_name, dataset_path=dataset_path)
13 changes: 0 additions & 13 deletions xcp_d/tests/test_utils_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,19 +450,6 @@ def test_make_dictionary():
assert out_metadata["Sources"] == ["b"]


def test_out_file_to_source():
"""Test _out_file_to_source."""
in_file = "/path/to/dset/sub-01/func/sub-01_task-rest_bold.nii.gz"
dataset_name = "test"
dataset_path = "/path/to/dset"
uri = utils._out_file_to_source(in_file, dataset_name=dataset_name, dataset_path=dataset_path)
assert uri == "bids:test:sub-01/func/sub-01_task-rest_bold.nii.gz"

dataset_path = "/another/path/haha"
with pytest.raises(ValueError, match="is not in the subpath of"):
utils._out_file_to_source(in_file, dataset_name=dataset_name, dataset_path=dataset_path)


def test_transpose_lol():
"""Test _transpose_lol."""
inputs = [
Expand Down
49 changes: 49 additions & 0 deletions xcp_d/utils/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""
import os
import warnings
from pathlib import Path

import nibabel as nb
import yaml
Expand Down Expand Up @@ -975,3 +976,51 @@ def group_across_runs(in_files):
out_files[group_idx].append(in_file)

return out_files


def _make_uri(in_file, dataset_name, dataset_path):
"""Convert a filename to a BIDS URI.
Raises
------
ValueError
If ``in_file`` is not relative to ``dataset_path``.
"""
bids_uri = [f"bids:{dataset_name}:{str(Path(in_file).relative_to(dataset_path))}"]
return bids_uri


def _make_xcpd_uri(out_file, output_dir):
"""Convert postprocessing derivative's path to BIDS URI."""
import os

from xcp_d.utils.bids import _make_uri

dataset_path = os.path.join(output_dir, "xcp_d")

if isinstance(out_file, list):
return [_make_uri(of, "xcp_d", dataset_path) for of in out_file]
else:
return _make_uri(out_file, "xcp_d", dataset_path)


def _make_preproc_uri(out_file, fmri_dir):
"""Convert preprocessing derivative's path to BIDS URI."""
from xcp_d.utils.bids import _make_uri

if isinstance(out_file, list):
return [_make_uri(of, "preprocessed", fmri_dir) for of in out_file]
else:
return _make_uri(out_file, "preprocessed", fmri_dir)


def _make_custom_uri(out_file):
"""Convert custom confound's path to BIDS URI."""
import os

from xcp_d.utils.bids import _make_uri

if isinstance(out_file, list):
return [_make_uri(of, "custom_confounds", os.path.dirname(of)) for of in out_file]
else:
return _make_uri(out_file, "custom_confounds", os.path.dirname(out_file))
7 changes: 0 additions & 7 deletions xcp_d/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# -*- coding: utf-8 -*-
"""Miscellaneous utility functions for xcp_d."""
import warnings
from pathlib import Path

import nibabel as nb
import numpy as np
Expand Down Expand Up @@ -565,12 +564,6 @@ def _make_dictionary(metadata=None, **kwargs):
return dict(kwargs)


def _out_file_to_source(in_file, dataset_name, dataset_path):
"""Convert a filename to a BIDS URI."""
bids_uri = [f"bids:{dataset_name}:{str(Path(in_file).relative_to(dataset_path))}"]
return bids_uri


def _transpose_lol(lol):
"""Transpose list of lists."""
return list(map(list, zip(*lol)))
76 changes: 24 additions & 52 deletions xcp_d/workflows/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@

from xcp_d.interfaces.bids import DerivativesDataSink, InferBIDSURIs
from xcp_d.interfaces.utils import FilterUndefined
from xcp_d.utils.bids import get_entity
from xcp_d.utils.bids import (
_make_custom_uri,
_make_preproc_uri,
_make_xcpd_uri,
get_entity,
)
from xcp_d.utils.doc import fill_doc
from xcp_d.utils.utils import _make_dictionary

Expand Down Expand Up @@ -282,39 +287,6 @@ def init_postproc_derivatives_wf(
name="outputnode",
)

def _postproc_to_source(out_file, output_dir):
import os

from xcp_d.utils.utils import _out_file_to_source

if isinstance(out_file, list):
return [
_out_file_to_source(of, "xcp_d", os.path.join(output_dir, "xcp_d"))
for of in out_file
]
else:
return _out_file_to_source(out_file, "xcp_d", os.path.join(output_dir, "xcp_d"))

def _preproc_to_source(out_file, fmri_dir):
from xcp_d.utils.utils import _out_file_to_source

if isinstance(out_file, list):
return [_out_file_to_source(of, "preprocessed", fmri_dir) for of in out_file]
else:
return _out_file_to_source(out_file, "preprocessed", fmri_dir)

def _custom_to_source(out_file):
import os

from xcp_d.utils.utils import _out_file_to_source

if isinstance(out_file, list):
return [
_out_file_to_source(of, "custom_confounds", os.path.dirname(of)) for of in out_file
]
else:
return _out_file_to_source(out_file, "custom_confounds", os.path.dirname(out_file))

# Create dictionary of basic information
cleaned_data_dictionary = {
"nuisance parameters": params,
Expand Down Expand Up @@ -343,7 +315,7 @@ def _custom_to_source(out_file):
# Determine cohort (if there is one) in the original data
cohort = get_entity(name_source, "cohort")

preproc_bold_src = _preproc_to_source(name_source, fmri_dir)
preproc_bold_src = _make_preproc_uri(name_source, fmri_dir)

atlas_src = pe.MapNode(
InferBIDSURIs(
Expand Down Expand Up @@ -389,7 +361,7 @@ def _custom_to_source(out_file):
(inputnode, ds_filtered_motion, [
("motion_metadata", "meta_dict"),
("filtered_motion", "in_file"),
(("fmriprep_confounds_file", _preproc_to_source, fmri_dir), "Sources"),
(("fmriprep_confounds_file", _make_preproc_uri, fmri_dir), "Sources"),
]),
(ds_filtered_motion, outputnode, [("out_file", "filtered_motion")]),
])
Expand Down Expand Up @@ -425,11 +397,11 @@ def _custom_to_source(out_file):
("temporal_mask", "in_file"),
]),
(ds_filtered_motion, ds_temporal_mask, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
(ds_temporal_mask, outputnode, [("out_file", "temporal_mask")]),
(ds_temporal_mask, merge_dense_src, [
(("out_file", _postproc_to_source, output_dir), "in2"),
(("out_file", _make_xcpd_uri, output_dir), "in2"),
]),
])
# fmt:on
Expand All @@ -448,16 +420,16 @@ def _custom_to_source(out_file):
# fmt:off
workflow.connect([
(ds_temporal_mask, confounds_src, [
(("out_file", _postproc_to_source, output_dir), "in2"),
(("out_file", _make_xcpd_uri, output_dir), "in2"),
]),
])
# fmt:on

if custom_confounds_file:
confounds_src.inputs.in3 = _custom_to_source(custom_confounds_file)
confounds_src.inputs.in3 = _make_custom_uri(custom_confounds_file)

elif custom_confounds_file:
confounds_src.inputs.in2 = _custom_to_source(custom_confounds_file)
confounds_src.inputs.in2 = _make_custom_uri(custom_confounds_file)

ds_confounds = pe.Node(
DerivativesDataSink(
Expand All @@ -476,7 +448,7 @@ def _custom_to_source(out_file):
(inputnode, ds_confounds, [("confounds_file", "in_file")]),
(confounds_src, ds_confounds, [("out", "Sources")]),
(ds_confounds, merge_dense_src, [
(("out_file", _postproc_to_source, output_dir), f"in{3 if fd_thresh > 0 else 2}"),
(("out_file", _make_xcpd_uri, output_dir), f"in{3 if fd_thresh > 0 else 2}"),
]),
])
# fmt:on
Expand Down Expand Up @@ -529,7 +501,7 @@ def _custom_to_source(out_file):
("interpolated_filtered_bold", "in_file"),
]),
(ds_denoised_bold, ds_interpolated_denoised_bold, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
(ds_interpolated_denoised_bold, outputnode, [
("out_file", "interpolated_filtered_bold"),
Expand Down Expand Up @@ -579,7 +551,7 @@ def _custom_to_source(out_file):
workflow.connect([
(make_atlas_dict, add_denoised_to_src, [("metadata", "metadata")]),
(ds_denoised_bold, add_denoised_to_src, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand Down Expand Up @@ -608,7 +580,7 @@ def _custom_to_source(out_file):
workflow.connect([
(inputnode, ds_smoothed_bold, [("smoothed_denoised_bold", "in_file")]),
(ds_denoised_bold, ds_smoothed_bold, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
(ds_smoothed_bold, outputnode, [("out_file", "smoothed_denoised_bold")]),
])
Expand Down Expand Up @@ -655,7 +627,7 @@ def _custom_to_source(out_file):
workflow.connect([
(add_denoised_to_src, add_coverage_to_src, [("metadata", "metadata")]),
(ds_coverage, add_coverage_to_src, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand Down Expand Up @@ -764,7 +736,7 @@ def _custom_to_source(out_file):
workflow.connect([
(add_denoised_to_src, add_ccoverage_to_src, [("metadata", "metadata")]),
(ds_coverage_ciftis, add_ccoverage_to_src, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand Down Expand Up @@ -897,7 +869,7 @@ def _custom_to_source(out_file):
# fmt:off
workflow.connect([
(inputnode, ds_reho, [("reho", "in_file")]),
(ds_denoised_bold, ds_reho, [(("out_file", _postproc_to_source, output_dir), "Sources")]),
(ds_denoised_bold, ds_reho, [(("out_file", _make_xcpd_uri, output_dir), "Sources")]),
])
# fmt:on

Expand All @@ -915,7 +887,7 @@ def _custom_to_source(out_file):
# fmt:off
workflow.connect([
(make_atlas_dict, add_reho_to_src, [("metadata", "metadata")]),
(ds_reho, add_reho_to_src, [(("out_file", _postproc_to_source, output_dir), "Sources")]),
(ds_reho, add_reho_to_src, [(("out_file", _make_xcpd_uri, output_dir), "Sources")]),
])
# fmt:on

Expand Down Expand Up @@ -968,7 +940,7 @@ def _custom_to_source(out_file):
workflow.connect([
(inputnode, ds_alff, [("alff", "in_file")]),
(ds_denoised_bold, ds_alff, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand Down Expand Up @@ -997,7 +969,7 @@ def _custom_to_source(out_file):
workflow.connect([
(inputnode, ds_smoothed_alff, [("smoothed_alff", "in_file")]),
(ds_alff, ds_smoothed_alff, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand All @@ -1017,7 +989,7 @@ def _custom_to_source(out_file):
workflow.connect([
(make_atlas_dict, add_alff_to_src, [("metadata", "metadata")]),
(ds_alff, add_alff_to_src, [
(("out_file", _postproc_to_source, output_dir), "Sources"),
(("out_file", _make_xcpd_uri, output_dir), "Sources"),
]),
])
# fmt:on
Expand Down

0 comments on commit aba3e1b

Please sign in to comment.