diff --git a/narps_open/core/interfaces.py b/narps_open/core/interfaces/__init__.py similarity index 100% rename from narps_open/core/interfaces.py rename to narps_open/core/interfaces/__init__.py diff --git a/narps_open/core/interfaces/confounds.py b/narps_open/core/interfaces/confounds.py new file mode 100644 index 00000000..2fe6b93f --- /dev/null +++ b/narps_open/core/interfaces/confounds.py @@ -0,0 +1,67 @@ +#!/usr/bin/python +# coding: utf-8 + +""" This module defines custom interfaces related to confounds computation """ + +from os.path import abspath + +from nipype.interfaces.base import ( + BaseInterface, BaseInterfaceInputSpec, traits, TraitedSpec, ImageFile, File + ) +from DVARS import DVARS_Calc + +class ComputeDVARSInputSpec(BaseInterfaceInputSpec): + """ Input specifications of a ComputeDVARS interface """ + in_file = ImageFile(mandatory = True, desc = '4D nifti input file') + nb_time_points = traits.Int(mandatory = True, desc = 'Number of time points in the input file') + out_file_name = traits.Str( + mandatory = True, + desc = 'Base name for the output file, without extension' + ) + +class ComputeDVARSOutputSpec(TraitedSpec): + """ Output specifications of a ComputeDVARS interface """ + out_file = File( + exists = True, + desc = 'Output file containig a regressor identifying corrupted points' + ) + +class ComputeDVARS(BaseInterface): + """ Map the MATLAB code from the following article : + + Afyouni, Soroosh & Nichols, Thomas. (2018). + Insight and inference for DVARS. NeuroImage. 172. + 10.1016/j.neuroimage.2017.12.098. + + Code is available here: + https://github.com/asoroosh/DVARS + + Returns + + matlab_output : capture of matlab output which may be + parsed by user to get computation results + """ + input_spec = ComputeDVARSInputSpec + output_spec = ComputeDVARSOutputSpec + + def _run_interface(self, runtime): + """ Run the DVARS computation and identify corrupted points """ + + # Compute DVARS + dvars_output = DVARS_Calc(self.inputs.in_file) + + # Identify corrupted points + #% find(Stat.pvals<0.05./(T-1) & Stat.DeltapDvar>5) %print corrupted DVARS data-points + pvalues = [e < (0.05/(nb_time_points-1)) for e in dvars_output['Inference']['Pval']] + deltapdvar = [e > 5 for e in dvars_output['DVARS']['DeltapDvar']] + + # Write result to file + with open(abspath(self.inputs.out_file_name + '.txt'), 'w') as file: + file.write(tabulate([a and b for a,b in zip(pvalues, deltapdvar)])) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['out_file'] = abspath(self.inputs.out_file_name + '.txt') + return outputs diff --git a/narps_open/pipelines/team_0ED6.py b/narps_open/pipelines/team_0ED6.py index 8b27c5b2..cf6e6b90 100644 --- a/narps_open/pipelines/team_0ED6.py +++ b/narps_open/pipelines/team_0ED6.py @@ -21,6 +21,7 @@ from nipype.algorithms.confounds import ComputeDVARS from narps_open.pipelines import Pipeline +from narps_open.core.interfaces.confounds import ComputeDVARS from narps_open.data.task import TaskInformation from narps_open.data.participants import get_group from narps_open.utils.configuration import Configuration @@ -213,6 +214,7 @@ def get_preprocessing(self): select_func.inputs.index = [0] # func file preprocessing.connect(smoothing, 'smoothed_files', select_func, 'inlist') + """ # MATHS COMMAND - Apply threshold to sbref normalized GM probalility map # and binarise result # TODO : add wm in the mask ? @@ -226,12 +228,19 @@ def get_preprocessing(self): reslice_mask = Node(Reslice(), name ='reslice_mask') preprocessing.connect(threshold, 'out_file', reslice_mask, 'in_file') preprocessing.connect(select_func, 'out', reslice_mask, 'space_defining') + """ # COMPUTE DVARS - Identify corrupted time-points from func - compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars') + """compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars') compute_dvars.inputs.series_tr = TaskInformation()['RepetitionTime'] preprocessing.connect(select_func, 'out', compute_dvars, 'in_file') - preprocessing.connect(reslice_mask, 'out_file', compute_dvars, 'in_mask') + preprocessing.connect(reslice_mask, 'out_file', compute_dvars, 'in_mask')""" + + # COMPUTE DVARS - Identify corrupted time-points from func + compute_dvars = Node(ComputeDVARS(), name = 'compute_dvars') + compute_dvars.inputs.nb_time_points = 453 + compute_dvars.inputs.out_file_name = 'corrupted_points' + preprocessing.connect(select_func, 'out', compute_dvars, 'in_file') # DATA SINK - store the wanted results in the wanted repository data_sink = Node(DataSink(), name = 'data_sink') @@ -240,7 +249,7 @@ def get_preprocessing(self): preprocessing.connect( realign_unwarp, 'realignment_parameters', data_sink, 'preprocessing.@realignement_parameters') - preprocessing.connect(compute_dvars, 'out_std', data_sink, 'preprocessing.@dvars_file') + preprocessing.connect(compute_dvars, 'out_file', data_sink, 'preprocessing.@dvars_file') # Remove large files, if requested if Configuration()['pipelines']['remove_unused_data']: diff --git a/setup.py b/setup.py index b17409b6..55c9f131 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,10 @@ 'pytest-helpers-namespace>=2021.12.29,<2021.13', 'pytest-mock>=3.12.0,<3.13', 'checksumdir>=1.2.0,<1.3' - ] + ], + 'scientific': [ + 'DVARS @ git+https://git@github.com/bclenet/DVARS' + ] } setup(