Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Read scalar maps from previous qsirecon runs #192

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions qsirecon/data/pipelines/ingress_scalars_for_bundles.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: bundle_scalar_map
anatomical: []
nodes:

- action: import_scalars
input: qsirecon
name: dipy_dki_ingress
parameters:
# Whatever was specified with --datasets dipydki=/x/y/z
dataset_name: dipydki
# Do you want a copy of the inputs to go in the output dataset?
# Metadata from the input will be included in the output no matter what
copy_inputs: false
# This is where the tsv will go
qsirecon_suffix: DIPYDKI
# Future idea: Get ALFF/REHO/something into ACPC, then resample into each dwi ACPC space

- action: autotrack
input: dsistudio_gqi
name: autotrackgqi
parameters:
tolerance: 22,26,30
track_id: Fasciculus,Cingulum,Aslant,Corticos,Thalamic_R,Reticular,Optic,Fornix,Corpus
track_voxel_ratio: 2.0
yield_rate: 1.0e-06
qsirecon_suffix: DSIStudio
software: DSI Studio

- action: bundle_map
input: autotrackgqi
name: bundle_means
scalars_from:
- dipy_dki_ingress
software: qsirecon

- action: template_map
input: qsirecon
name: template_map
parameters:
interpolation: NearestNeighbor
scalars_from:
- dipy_dki
software: qsirecon
29 changes: 29 additions & 0 deletions qsirecon/data/recon_scalar_bids_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
"name": "qsirecon",
"entities": [
{
"name": "cohort",
"pattern": "(?:^|_)cohort-([0-9]+)",
"dtype": "int"
},
{
"name": "seg",
"pattern": "(?:^|_)seg-([a-zA-Z0-9]+)"
},
{
"name": "bundles",
"pattern": "(?:^|_)bundles-([a-zA-Z0-9]+)"
},
{
"name": "param",
"pattern": "(?:^|_)param-([a-zA-Z0-9]+)"
},
{
"name": "bundle",
"pattern": "(?:^|_)bundle-([a-zA-Z0-9]+)"
}
],
"default_path_patterns": [
"sub-{subject}[/ses-{session}]/{datatype<dwi>|dwi}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_dir-{direction}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_seg-{seg}][_model-{model}][_bundles-{bundles}][_param-{param}][_bundle-{bundle}][_label-{label}][_desc-{desc}]_{suffix<dwimap|config|dwi>}.{extension<nii|nii.gz|fib|fib.gz|mif|mif.gz|pickle.gz|b|bvec|bval|txt|json>|nii.gz}"
]
}
9 changes: 8 additions & 1 deletion qsirecon/workflows/recon/build_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,12 @@
from .scalar_mapping import init_scalar_to_bundle_wf, init_scalar_to_template_wf
from .steinhardt import init_steinhardt_order_param_wf
from .tortoise import init_tortoise_estimator_wf
from .utils import init_conform_dwi_wf, init_discard_repeated_samples_wf, init_test_wf
from .utils import (
init_conform_dwi_wf,
init_discard_repeated_samples_wf,
init_test_wf,
init_import_scalars_wf,
)


def _check_repeats(nodelist):
Expand Down Expand Up @@ -286,6 +291,8 @@ def workflow_from_spec(inputs_dict, node_spec):
return init_scalar_to_template_wf(**kwargs)
if node_spec["action"] == "test_workflow":
return init_test_wf(**kwargs)
if node_spec["action"] == "import_scalars":
return init_import_scalars_wf(**kwargs)

raise Exception("Unknown node %s" % node_spec)

Expand Down
53 changes: 53 additions & 0 deletions qsirecon/workflows/recon/scalar_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,20 @@

import logging

from bids.layout import BIDSLayout
import nipype.interfaces.utility as niu
import nipype.pipeline.engine as pe
from niworkflows.engine.workflows import LiterateWorkflow as Workflow

from ... import config
from ...interfaces.bids import DerivativesDataSink
from ...interfaces.interchange import recon_workflow_input_fields
from ...interfaces.recon_scalars import ReconScalarsTableSplitterDataSink
from ...interfaces.scalar_mapping import BundleMapper, TemplateMapper
from ...utils.bids import clean_datasinks
from .utils import init_scalar_output_wf


LOGGER = logging.getLogger("nipype.workflow")


Expand Down Expand Up @@ -199,3 +202,53 @@ def init_scalar_to_surface_wf(
):
"""Maps scalar data to a surface."""
raise NotImplementedError()


def init_import_scalars_wf(
inputs_dict,
name="import_scalars",
qsirecon_suffix="",
params={},
):
"""Read in scalars from an input dataset."""

from qsirecon.data import load as load_data

inputnode = pe.Node(
niu.IdentityInterface(
fields=recon_workflow_input_fields,
),
name="inputnode",
)
outputnode = pe.Node(
niu.IdentityInterface(
fields=[
"scalar_image_info",
"recon_scalars",
]
),
name="outputnode",
)
workflow = Workflow(name=name)
dataset_name = params.get("dataset")
if not dataset_name:
raise Exception("Must specify a dataset key in the recon spec parameters")

dataset_path = config.execution.datasets.get(dataset_name)
if dataset_path is None:
raise Exception(
f"Dataset {dataset_name} is not available in the specified "
"inputs datasets. Either change the recon spec yaml file or "
f"include --datasets {dataset_name}=/path/to/dataset on the command line."
)
scalars_cfg = load_data("recon_scalars_bids_config.json")
layout = BIDSLayout(dataset_path, config=[scalars_cfg], validate=False)

# Find all dwimaps that have the same entities (-desc) as the input dwi file
input_dwi_file = config.execution.layout.get(inputs_dict["dwi_file"])
dwimaps = layout.get(suffix="dwimap", **input_dwi_file.entities)

# Send them to the scalar gatherer


return workflow