Skip to content

Commit

Permalink
Include DVARS to the project
Browse files Browse the repository at this point in the history
  • Loading branch information
bclenet committed Mar 13, 2024
1 parent 3972f77 commit 5b9a532
Show file tree
Hide file tree
Showing 4 changed files with 83 additions and 4 deletions.
File renamed without changes.
67 changes: 67 additions & 0 deletions narps_open/core/interfaces/confounds.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/usr/bin/python
# coding: utf-8

""" This module defines custom interfaces related to confounds computation """

from os.path import abspath

from nipype.interfaces.base import (
BaseInterface, BaseInterfaceInputSpec, traits, TraitedSpec, ImageFile, File
)
from DVARS import DVARS_Calc

class ComputeDVARSInputSpec(BaseInterfaceInputSpec):
""" Input specifications of a ComputeDVARS interface """
in_file = ImageFile(mandatory = True, desc = '4D nifti input file')
nb_time_points = traits.Int(mandatory = True, desc = 'Number of time points in the input file')
out_file_name = traits.Str(
mandatory = True,
desc = 'Base name for the output file, without extension'
)

class ComputeDVARSOutputSpec(TraitedSpec):
""" Output specifications of a ComputeDVARS interface """
out_file = File(
exists = True,
desc = 'Output file containig a regressor identifying corrupted points'

Check failure on line 26 in narps_open/core/interfaces/confounds.py

View workflow job for this annotation

GitHub Actions / Check for spelling errors

containig ==> containing
)

class ComputeDVARS(BaseInterface):
""" Map the MATLAB code from the following article :
Afyouni, Soroosh & Nichols, Thomas. (2018).
Insight and inference for DVARS. NeuroImage. 172.
10.1016/j.neuroimage.2017.12.098.
Code is available here:
https://github.com/asoroosh/DVARS
Returns
matlab_output : capture of matlab output which may be
parsed by user to get computation results
"""
input_spec = ComputeDVARSInputSpec
output_spec = ComputeDVARSOutputSpec

def _run_interface(self, runtime):
""" Run the DVARS computation and identify corrupted points """

# Compute DVARS
dvars_output = DVARS_Calc(self.inputs.in_file)

# Identify corrupted points
#% find(Stat.pvals<0.05./(T-1) & Stat.DeltapDvar>5) %print corrupted DVARS data-points
pvalues = [e < (0.05/(nb_time_points-1)) for e in dvars_output['Inference']['Pval']]
deltapdvar = [e > 5 for e in dvars_output['DVARS']['DeltapDvar']]

# Write result to file
with open(abspath(self.inputs.out_file_name + '.txt'), 'w') as file:
file.write(tabulate([a and b for a,b in zip(pvalues, deltapdvar)]))

return runtime

def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = abspath(self.inputs.out_file_name + '.txt')
return outputs
15 changes: 12 additions & 3 deletions narps_open/pipelines/team_0ED6.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from nipype.algorithms.confounds import ComputeDVARS

from narps_open.pipelines import Pipeline
from narps_open.core.interfaces.confounds import ComputeDVARS
from narps_open.data.task import TaskInformation
from narps_open.data.participants import get_group
from narps_open.utils.configuration import Configuration
Expand Down Expand Up @@ -213,6 +214,7 @@ def get_preprocessing(self):
select_func.inputs.index = [0] # func file
preprocessing.connect(smoothing, 'smoothed_files', select_func, 'inlist')

"""
# MATHS COMMAND - Apply threshold to sbref normalized GM probalility map
# and binarise result
# TODO : add wm in the mask ?
Expand All @@ -226,12 +228,19 @@ def get_preprocessing(self):
reslice_mask = Node(Reslice(), name ='reslice_mask')
preprocessing.connect(threshold, 'out_file', reslice_mask, 'in_file')
preprocessing.connect(select_func, 'out', reslice_mask, 'space_defining')
"""

# COMPUTE DVARS - Identify corrupted time-points from func
compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars')
"""compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars')
compute_dvars.inputs.series_tr = TaskInformation()['RepetitionTime']
preprocessing.connect(select_func, 'out', compute_dvars, 'in_file')
preprocessing.connect(reslice_mask, 'out_file', compute_dvars, 'in_mask')
preprocessing.connect(reslice_mask, 'out_file', compute_dvars, 'in_mask')"""

# COMPUTE DVARS - Identify corrupted time-points from func
compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars')
compute_dvars.inputs.nb_time_points = 453
compute_dvars.inputs.out_file_name = 'corrupted_points'
preprocessing.connect(select_func, 'out', compute_dvars, 'in_file')

# DATA SINK - store the wanted results in the wanted repository
data_sink = Node(DataSink(), name = 'data_sink')
Expand All @@ -240,7 +249,7 @@ def get_preprocessing(self):
preprocessing.connect(
realign_unwarp, 'realignment_parameters',
data_sink, 'preprocessing.@realignement_parameters')
preprocessing.connect(compute_dvars, 'out_std', data_sink, 'preprocessing.@dvars_file')
preprocessing.connect(compute_dvars, 'out_file', data_sink, 'preprocessing.@dvars_file')

# Remove large files, if requested
if Configuration()['pipelines']['remove_unused_data']:
Expand Down
5 changes: 4 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@
'pytest-helpers-namespace>=2021.12.29,<2021.13',
'pytest-mock>=3.12.0,<3.13',
'checksumdir>=1.2.0,<1.3'
]
],
'scientific': [
'DVARS @ git+https://[email protected]/bclenet/DVARS'
]
}

setup(
Expand Down

0 comments on commit 5b9a532

Please sign in to comment.