Skip to content

Commit

Permalink
Use config throughout workflows.
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo committed Oct 3, 2023
1 parent 7f97e18 commit 8a80d78
Show file tree
Hide file tree
Showing 12 changed files with 197 additions and 761 deletions.
15 changes: 15 additions & 0 deletions xcp_d/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@

if not hasattr(sys, "_is_pytest_session"):
sys._is_pytest_session = False # Trick to avoid sklearn's FutureWarnings

# Disable all warnings in main and children processes only on production versions
if not any(
(
Expand Down Expand Up @@ -177,9 +178,11 @@ def load(cls, settings, init=True):
for k, v in settings.items():
if v is None:
continue

if k in cls._paths:
setattr(cls, k, Path(v).absolute())
continue

if hasattr(cls, k):
setattr(cls, k, v)

Expand All @@ -196,12 +199,15 @@ def get(cls):
for k, v in cls.__dict__.items():
if k.startswith("_") or v is None:
continue

if callable(getattr(cls, k)):
continue

if k in cls._paths:
v = str(v)

out[k] = v

return out


Expand Down Expand Up @@ -275,6 +281,7 @@ def get_plugin(cls):
out["plugin_args"]["n_procs"] = int(cls.nprocs)
if cls.memory_gb:
out["plugin_args"]["memory_gb"] = float(cls.memory_gb)

return out

@classmethod
Expand Down Expand Up @@ -399,8 +406,14 @@ def init(cls):

class workflow(_Config):
"""Configure the particular execution graph of this workflow."""
analysis_level = "participant"
# The BIDS App analysis level (only "participant" allowed)
input_type = "fmriprep"
# The pipeline used to generate the preprocessed derivatives.
combineruns = False
# After denoising, concatenate each derivative from each task across runs.
cifti = False
# Whether to process ciftis or niftis.
smoothing = 6
# FWHM, in millimeters, of the Gaussian smoothing kernel to apply to the denoised BOLD data.
# This may be set to 0.
Expand All @@ -417,6 +430,8 @@ class workflow(_Config):
# Coverage threshold to apply to parcels in each atlas.
min_time = 100
# Post-scrubbing threshold to apply to individual runs in the dataset.
exact_time = []
# Produce correlation matrices limited to each requested amount of time
dummy_scans = 0
# Number of volumes to remove from the beginning of each run.
disable_bandpass_filter = True
Expand Down
Loading

0 comments on commit 8a80d78

Please sign in to comment.