From 3a7159b5daa92f2306fc689b2db8175a3d9a50fd Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Mon, 2 Oct 2023 16:12:35 -0400 Subject: [PATCH 1/9] Solved compatibility issue between autobox and reading workflow graph. --- Dockerfile | 2 +- rabies/preprocess_pkg/main_wf.py | 15 +++++++++------ rabies/preprocess_pkg/utils.py | 11 +++++++++++ 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 12f2009..99bc4f9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ # Install a few minimal AFNI components RUN curl -L -O https://afni.nimh.nih.gov/pub/dist/tgz/linux_ubuntu_16_64.tgz && \ mkdir -p /opt/afni && \ - tar xzvf linux_ubuntu_16_64.tgz -C /opt/afni linux_ubuntu_16_64/{libmri.so,libf2c.so,3dDespike,3dTshift} --strip-components=1 && \ + tar xzvf linux_ubuntu_16_64.tgz -C /opt/afni linux_ubuntu_16_64/{libmri.so,libf2c.so,3dDespike,3dTshift,3dAutobox} --strip-components=1 && \ rm -f linux_ubuntu_16_64.tgz ENV PATH=/opt/afni${PATH:+:$PATH} diff --git a/rabies/preprocess_pkg/main_wf.py b/rabies/preprocess_pkg/main_wf.py index fb6f747..95a8675 100644 --- a/rabies/preprocess_pkg/main_wf.py +++ b/rabies/preprocess_pkg/main_wf.py @@ -4,11 +4,10 @@ from nipype.interfaces import utility as niu from nipype.interfaces.io import DataSink from nipype.interfaces.utility import Function -from nipype.interfaces.afni import Autobox from .inho_correction import init_inho_correction_wf from .commonspace_reg import init_commonspace_reg_wf from .bold_main_wf import init_bold_main_wf -from .utils import BIDSDataGraber, prep_bids_iter, convert_to_RAS, resample_template +from .utils import BIDSDataGraber, prep_bids_iter, convert_to_RAS, resample_template, apply_autobox from . import preprocess_visual_QC def init_main_wf(data_dir_path, output_folder, opts, name='main_wf'): @@ -175,8 +174,10 @@ def init_main_wf(data_dir_path, output_folder, opts, name='main_wf'): name="format_bold_buffer") if opts.bold_autobox: # apply AFNI's 3dAutobox - bold_autobox = pe.Node(Autobox(padding=1, outputtype='NIFTI_GZ'), - name="bold_autobox") + bold_autobox = pe.Node(Function(input_names=['in_file'], + output_names=['out_file'], + function=apply_autobox), + name='bold_autobox') workflow.connect([ (bold_convert_to_RAS_node, bold_autobox, [ ('RAS_file', 'in_file'), @@ -337,8 +338,10 @@ def init_main_wf(data_dir_path, output_folder, opts, name='main_wf'): name="format_anat_buffer") if opts.anat_autobox: # apply AFNI's 3dAutobox - anat_autobox = pe.Node(Autobox(padding=1, outputtype='NIFTI_GZ'), - name="anat_autobox") + anat_autobox = pe.Node(Function(input_names=['in_file'], + output_names=['out_file'], + function=apply_autobox), + name='anat_autobox') workflow.connect([ (anat_convert_to_RAS_node, anat_autobox, [ ('RAS_file', 'in_file'), diff --git a/rabies/preprocess_pkg/utils.py b/rabies/preprocess_pkg/utils.py index 7eb6c47..2499c8e 100644 --- a/rabies/preprocess_pkg/utils.py +++ b/rabies/preprocess_pkg/utils.py @@ -172,6 +172,17 @@ def _list_outputs(self): return {'out_file': getattr(self, 'out_file')} +def apply_autobox(in_file): + import pathlib + import os + from rabies.utils import run_command + split = pathlib.Path(in_file).name.rsplit(".nii")[0] + out_file = os.path.abspath(f"{split}_autobox.nii.gz") + command = f'3dAutobox -input {in_file} -prefix {out_file} -npad 1' + rc = run_command(command) + return out_file + + def convert_to_RAS(img_file, out_dir=None): # convert the input image to the RAS orientation convention import os From d7acf8feb0c591670ad4370995bc5900e7c4bd3a Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Mon, 2 Oct 2023 16:22:16 -0400 Subject: [PATCH 2/9] Fixed AFNI's despiking for the same errors as autobox. --- rabies/preprocess_pkg/bold_main_wf.py | 11 ++++++----- rabies/preprocess_pkg/utils.py | 12 ++++++++++++ 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/rabies/preprocess_pkg/bold_main_wf.py b/rabies/preprocess_pkg/bold_main_wf.py index 511dc49..ea72d38 100644 --- a/rabies/preprocess_pkg/bold_main_wf.py +++ b/rabies/preprocess_pkg/bold_main_wf.py @@ -1,5 +1,5 @@ from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, afni +from nipype.interfaces import utility as niu from .hmc import init_bold_hmc_wf,EstimateMotionParams from .bold_ref import init_bold_reference_wf @@ -8,7 +8,7 @@ from .inho_correction import init_inho_correction_wf from .registration import init_cross_modal_reg_wf from nipype.interfaces.utility import Function - +from .utils import apply_despike def init_bold_main_wf(opts, output_folder, number_functional_scans, inho_cor_only=False, name='bold_main_wf'): """ @@ -193,9 +193,10 @@ def resample_isotropic(bold_file, rabies_data_type): ]) if opts.apply_despiking: - despike = pe.Node( - afni.Despike(outputtype='NIFTI_GZ'), - name='despike') + despike = pe.Node(Function(input_names=['in_file'], + output_names=['out_file'], + function=apply_despike), + name='despike') workflow.connect([ (inputnode, despike, [('bold', 'in_file')]), (despike, boldbuffer, [('out_file', 'bold_file')]), diff --git a/rabies/preprocess_pkg/utils.py b/rabies/preprocess_pkg/utils.py index 2499c8e..a1cfa33 100644 --- a/rabies/preprocess_pkg/utils.py +++ b/rabies/preprocess_pkg/utils.py @@ -172,6 +172,18 @@ def _list_outputs(self): return {'out_file': getattr(self, 'out_file')} +###WRAPPERS FOR AFNI'S FUNCTIONS; NECESSARY TO PREVENT ISSUES WHEN READING INPUTS/OUTPUTS FROM WORKFLOW GRAPH + +def apply_despike(in_file): + import pathlib + import os + from rabies.utils import run_command + split = pathlib.Path(in_file).name.rsplit(".nii")[0] + out_file = os.path.abspath(f"{split}_despike.nii.gz") + command = f'3dDespike -prefix {out_file} {in_file}' + rc = run_command(command) + return out_file + def apply_autobox(in_file): import pathlib import os From e9606e08f416e86cc62cce171d4a1d9707fd8192 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Mon, 2 Oct 2023 17:06:24 -0400 Subject: [PATCH 3/9] Testing is conducted on all parameters which the token data can handle. --- scripts/error_check_rabies.py | 112 +++++++++++++++++++++++++++------- 1 file changed, 91 insertions(+), 21 deletions(-) diff --git a/scripts/error_check_rabies.py b/scripts/error_check_rabies.py index 3d57625..5addca3 100755 --- a/scripts/error_check_rabies.py +++ b/scripts/error_check_rabies.py @@ -6,6 +6,27 @@ import subprocess from rabies.utils import generate_token_data +'''PARAMETERS NOT TESTED +preprocess: + --bids_filter: requires creating a JSON + --apply_slice_mc: doesn't run on token data + --anat_inho_cor/bold_inho_cor: requires MINC/ANTs, which doesn't run on token data + --anat_robust_inho_cor/bold_robust_inho_cor: requires MINC/ANTs, which doesn't run on token data + --commonspace_reg: requires MINC/ANTs, which doesn't run on token data + --bold2anat_coreg: requires MINC/ANTs, which doesn't run on token data + +confound_correction: + --highpass/lowpass/edge_cutoff; since filtering doesn't work with 3 timepoints + +analysis: + --prior_maps: not available for token data + --ROI_csv: no such file for token data + --ROI_type parcellated: no parcellation for token data + --seed_prior_list: not available for token data + --outlier_threshold: not necessary +''' + + import argparse def get_parser(): """Build parser object""" @@ -65,6 +86,8 @@ def get_parser(): command += f" --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=false,brain_extraction=false --commonspace_reg masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg --data_type int16" + command += f" {tmppath}/inputs {tmppath}/outputs" + if 'confound_correction' in command or 'analysis' in command: if not os.path.isfile(f'{tmppath}/outputs/rabies_preprocess_workflow.pkl'): # provide preprocess outputs to run cc stage @@ -81,8 +104,7 @@ def get_parser(): check=True, shell=True, ) - - command += f" {tmppath}/outputs {tmppath}/outputs" + command += f" {tmppath}/outputs {tmppath}/outputs" process = subprocess.run( command, @@ -91,10 +113,10 @@ def get_parser(): ) sys.exit() -command = f"rabies --exclusion_ids {tmppath}/inputs/sub-token2_bold.nii.gz {tmppath}/inputs/sub-token3_bold.nii.gz --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ +command = f"rabies --exclusion_ids {tmppath}/inputs/sub-token2_bold.nii.gz {tmppath}/inputs/sub-token3_bold.nii.gz --force --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=false,brain_extraction=false --commonspace_reg masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg --data_type int16 --bold_only --detect_dummy \ - --tpattern seq-z --apply_STC --voxelwise_motion --isotropic_HMC" + --tpattern seq-z --apply_STC --voxelwise_motion --isotropic_HMC --interp_method linear --nativespace_resampling 1x1x1 --commonspace_resampling 1x1x1 --anatomical_resampling 1x1x1" process = subprocess.run( command, check=True, @@ -104,23 +126,14 @@ def get_parser(): command = f"rabies --inclusion_ids {tmppath}/inputs/sub-token1_bold.nii.gz --verbose 1 --force preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=true,brain_extraction=true --commonspace_reg masking=true,brain_extraction=true,fast_commonspace=true,template_registration=no_reg --data_type int16 \ - --HMC_option 0" -process = subprocess.run( - command, - check=True, - shell=True, - ) - -command = f"rabies --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --ica_aroma apply=true,dim=0,random_seed=1 --frame_censoring FD_censoring=true,FD_threshold=0.05,DVARS_censoring=true,minimum_timepoint=3 --nativespace_analysis" + --HMC_option 0 --apply_despiking --anat_autobox --bold_autobox" process = subprocess.run( command, check=True, shell=True, ) -# rerunning confound correction without censoring, which removes all scans -os.remove(f'{tmppath}/outputs/rabies_confound_correction.pkl') -command = f"rabies --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --nativespace_analysis" +command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --conf_list aCompCor_5 --nativespace_analysis" process = subprocess.run( command, check=True, @@ -128,7 +141,7 @@ def get_parser(): ) # testing --data_diagnosis in native space -command = f"rabies --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --data_diagnosis" +command = f"rabies --force --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --data_diagnosis" process = subprocess.run( command, check=True, @@ -136,9 +149,53 @@ def get_parser(): ) if opts.complete: - ###GROUP LEVEL, RUNNING ALL 3 SCANS - shutil.rmtree(f'{tmppath}/outputs/') - command = f"rabies --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ + ####CONFOUND CORRECTION#### + command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs \ + --generate_CR_null --timeseries_interval 0,5 --TR 1 --scale_variance_voxelwise \ + --smoothing_filter 0.3 --detrending_order quadratic --image_scaling global_variance " + process = subprocess.run( + command, + check=True, + shell=True, + ) + + # testing censoring on its own, since it removes all scans and prevent further testing + command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --frame_censoring FD_censoring=true,FD_threshold=0.05,DVARS_censoring=true,minimum_timepoint=3" + process = subprocess.run( + command, + check=True, + shell=True, + ) + + # testing AROMA on its own to retain degrees of freedom + command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --ica_aroma apply=true,dim=0,random_seed=1" + process = subprocess.run( + command, + check=True, + shell=True, + ) + + # testing --conf_list on its own to retain degrees of freedom + command = f"rabies --force --verbose 1 confound_correction --read_datasink {tmppath}/outputs {tmppath}/outputs \ + --conf_list mot_24 aCompCor_percent global_signal" + process = subprocess.run( + command, + check=True, + shell=True, + ) + + ####ANALYSIS#### + command = f"rabies --force --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --network_weighting relative \ + --optimize_NPR apply=true,window_size=2,min_prior_corr=0.5,diff_thresh=0.03,max_iter=5,compute_max=false \ + --FC_matrix --ROI_type voxelwise" + process = subprocess.run( + command, + check=True, + shell=True, + ) + + ####GROUP LEVEL, RUNNING ALL 3 SCANS#### + command = f"rabies --force --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask_half.nii.gz --CSF_mask {tmppath}/inputs/token_mask_half.nii.gz --vascular_mask {tmppath}/inputs/token_mask_half.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=false,brain_extraction=false --commonspace_reg masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg --data_type int16 \ --HMC_option 0" @@ -148,14 +205,27 @@ def get_parser(): shell=True, ) - command = f"rabies --verbose 1 confound_correction --read_datasink {tmppath}/outputs {tmppath}/outputs --conf_list mot_6 --smoothing_filter 0.3" + command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --conf_list mot_6" + process = subprocess.run( + command, + check=True, + shell=True, + ) + + # testing group level --data_diagnosis + command = f"rabies --force --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --NPR_temporal_comp 1 \ + --data_diagnosis --extended_QC --DR_ICA --seed_list {tmppath}/inputs/token_mask_half.nii.gz" process = subprocess.run( command, check=True, shell=True, ) - command = f"rabies --force --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --NPR_temporal_comp 1 --data_diagnosis --DR_ICA --seed_list {tmppath}/inputs/token_mask_half.nii.gz" + # test for the scan QC thresholds + scan_QC="'{DR:{Dice:[0.5],Conf:[0.1],Amp:true},SBC:{Dice:[0.3]}}'" + command = f"rabies --force --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs \ + --data_diagnosis --extended_QC --scan_QC_thresholds {scan_QC} \ + --prior_bold_idx 5 --prior_confound_idx 0 2 21 22" process = subprocess.run( command, check=True, From 9a5bb00e6ea94f14d64687397774e612a7f129d7 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Mon, 2 Oct 2023 17:09:43 -0400 Subject: [PATCH 4/9] Removed --scan_list which was replaced by --inclusion_ids --- rabies/analysis_pkg/main_wf.py | 21 --------------------- rabies/parser.py | 13 ------------- 2 files changed, 34 deletions(-) diff --git a/rabies/analysis_pkg/main_wf.py b/rabies/analysis_pkg/main_wf.py index ea4d555..bc0b9f3 100644 --- a/rabies/analysis_pkg/main_wf.py +++ b/rabies/analysis_pkg/main_wf.py @@ -393,27 +393,6 @@ def read_confound_workflow(conf_output, nativespace=False): return split_dict, split_name, target_list -def get_iterable_scan_list(scan_list, split_name): - # prep the subset of scans on which the analysis will be run - import numpy as np - import pandas as pd - if os.path.isfile(os.path.abspath(scan_list[0])): - updated_split_name=[] - if '.nii' in pathlib.Path(scan_list[0]).name: - for scan in scan_list: - updated_split_name.append(find_split(scan, split_name)) - else: - # read the file as a .txt - scan_list = np.array(pd.read_csv(os.path.abspath(scan_list[0]), header=None)).flatten() - for scan in scan_list: - updated_split_name.append(find_split(scan, split_name)) - elif scan_list[0]=='all': - updated_split_name = split_name - else: - raise ValueError(f"The --scan_list {scan_list} input had improper format. It must the full path to a .txt or .nii files, or 'all' to keep all scans.") - return updated_split_name - - def find_split(scan, split_name): for split in split_name: if split in scan: diff --git a/rabies/parser.py b/rabies/parser.py index 517790b..d0dcc6f 100644 --- a/rabies/parser.py +++ b/rabies/parser.py @@ -763,19 +763,6 @@ def get_parser(): "path for analysis outputs.\n" "\n" ) - analysis.add_argument( - '--scan_list', type=str, - nargs="*", # 0 or more values expected => creates a list - default=['all'], - help= - "This option offers to run the analysis on a subset of the scans. The scans are selected by\n" - "providing the full path to the corresponding EPI file in the input BIDS folder. The list \n" - "of scan can be specified manually as a list of file name '--scan_list scan1.nii.gz \n" - "scan2.nii.gz ...' or the files can be imbedded into a .txt file with one filename per row.\n" - "By default, 'all' will use all the scans previously processed.\n" - "(default: %(default)s)\n" - "\n" - ) analysis.add_argument( '--prior_maps', action='store', type=Path, default=f"{rabies_path}/melodic_IC.nii.gz", From 9d76f2bff2c5790bf115f533e40c186f6acc6e75 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Mon, 2 Oct 2023 18:02:18 -0400 Subject: [PATCH 5/9] Token timeseries always simulated network activity for realism and to help MELODIC converge. --- rabies/utils.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/rabies/utils.py b/rabies/utils.py index 5190829..e41c440 100644 --- a/rabies/utils.py +++ b/rabies/utils.py @@ -520,6 +520,7 @@ def generate_token_data(tmppath, number_scans): template = f"{rabies_path}/DSURQE_40micron_average.nii.gz" mask = f"{rabies_path}/DSURQE_40micron_mask.nii.gz" + melodic_file = f"{rabies_path}/melodic_IC.nii.gz" spacing = (float(1), float(1), float(1)) # resample to 1mmx1mmx1mm resampled_template = resample_image_spacing(sitk.ReadImage(template), spacing) @@ -539,13 +540,21 @@ def generate_token_data(tmppath, number_scans): # generate fake scans from the template array = sitk.GetArrayFromImage(resampled_template) array_4d = np.repeat(array[np.newaxis, :, :, :], 15, axis=0) + + melodic_img = sitk.ReadImage(melodic_file) + network1_map = sitk.GetArrayFromImage(sitk.Resample(melodic_img[:,:,:,5], resampled_template)) + network2_map = sitk.GetArrayFromImage(sitk.Resample(melodic_img[:,:,:,19], resampled_template)) + time1 = np.random.normal(0, array_4d.mean()/100, array_4d.shape[0]) # network timecourse; scale is 1% of image intensity + time2 = np.random.normal(0, array_4d.mean()/100, array_4d.shape[0]) # network timecourse; scale is 1% of image intensity + # creating fake network timeseries + network1_time = (np.repeat(network1_map[np.newaxis, :, :, :], 15, axis=0).T*time1).T + network2_time = (np.repeat(network2_map[np.newaxis, :, :, :], 15, axis=0).T*time2).T for i in range(number_scans): # generate anatomical scan sitk.WriteImage(resampled_template, tmppath+f'/inputs/sub-token{i+1}_T1w.nii.gz') # generate functional scan - array_4d_ = array_4d + np.random.normal(0, array_4d.mean() - / 100, array_4d.shape) # add gaussian noise + array_4d_ = array_4d + network1_time + network2_time + np.random.normal(0, array_4d.mean()/ 100, array_4d.shape) # add gaussian noise; scale is 1% of the mean intensity of the template sitk.WriteImage(sitk.GetImageFromArray(array_4d_, isVector=False), tmppath+f'/inputs/sub-token{i+1}_bold.nii.gz') From d220063948876dde88ae2fcb5ada60c961283d53 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Tue, 3 Oct 2023 11:18:47 -0400 Subject: [PATCH 6/9] Added zeropad manually, necessary for AROMA --- scripts/zeropad | 93 +++++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 1 + 2 files changed, 94 insertions(+) create mode 100755 scripts/zeropad diff --git a/scripts/zeropad b/scripts/zeropad new file mode 100755 index 0000000..a969798 --- /dev/null +++ b/scripts/zeropad @@ -0,0 +1,93 @@ +#!/bin/sh + +# ZEROPAD - Pad numbers with zeros +# +# Tem Behrens, FMRIB Image Analysis Group +# +# Copyright (C) 2004 University of Oxford +# +# Part of FSL - FMRIB's Software Library +# http://www.fmrib.ox.ac.uk/fsl +# fsl@fmrib.ox.ac.uk +# +# Developed at FMRIB (Oxford Centre for Functional Magnetic Resonance +# Imaging of the Brain), Department of Clinical Neurology, Oxford +# University, Oxford, UK +# +# +# LICENCE +# +# FMRIB Software Library, Release 6.0 (c) 2018, The University of +# Oxford (the "Software") +# +# The Software remains the property of the Oxford University Innovation +# ("the University"). +# +# The Software is distributed "AS IS" under this Licence solely for +# non-commercial use in the hope that it will be useful, but in order +# that the University as a charitable foundation protects its assets for +# the benefit of its educational and research purposes, the University +# makes clear that no condition is made or to be implied, nor is any +# warranty given or to be implied, as to the accuracy of the Software, +# or that it will be suitable for any particular purpose or for use +# under any specific conditions. Furthermore, the University disclaims +# all responsibility for the use which is made of the Software. It +# further disclaims any liability for the outcomes arising from using +# the Software. +# +# The Licensee agrees to indemnify the University and hold the +# University harmless from and against any and all claims, damages and +# liabilities asserted by third parties (including claims for +# negligence) which arise directly or indirectly from the use of the +# Software or the sale of any products based on the Software. +# +# No part of the Software may be reproduced, modified, transmitted or +# transferred in any form or by any means, electronic or mechanical, +# without the express permission of the University. The permission of +# the University is not required if the said reproduction, modification, +# transmission or transference is done without financial return, the +# conditions of this Licence are imposed upon the receiver of the +# product, and all original and amended source code is included in any +# transmitted product. You may be held legally responsible for any +# copyright infringement that is caused or encouraged by your failure to +# abide by these terms and conditions. +# +# You are not permitted under this Licence to use this Software +# commercially. Use for which any financial return is received shall be +# defined as commercial use, and includes (1) integration of all or part +# of the source code or the Software into a product for sale or license +# by or on behalf of Licensee to third parties or (2) use of the +# Software or any derivative of it for research with the final aim of +# developing software products for sale or license to a third party or +# (3) use of the Software or any derivative of it for research with the +# final aim of developing non-software products for sale or license to a +# third party, or (4) use of the Software to provide any service to an +# external organisation for which payment is received. If you are +# interested in using the Software commercially, please contact Oxford +# University Innovation ("OUI"), the technology transfer company of the +# University, to negotiate a licence. Contact details are: +# fsl@innovation.ox.ac.uk quoting Reference Project 9564, FSL. +export LC_ALL=C + +Usage() { +echo "" +echo "Usage: zeropad " +echo "e.g. zeropad 1 4 gives 0001" +echo "" +exit 1 +} + +[ "$1" = "" ] && Usage +[ "$2" = "" ] && Usage + + +i=`echo $1 | wc -c`; +j=0; +k=` expr $2 - $i`; +k=` expr $k + 1`; +num=$1; +while [ "$j" -lt "$k" ];do +num=0$num; +j=` expr $j + 1` +done +echo $num diff --git a/setup.py b/setup.py index 12dceb1..88661ce 100644 --- a/setup.py +++ b/setup.py @@ -151,6 +151,7 @@ def run(self): 'scripts/gen_DSURQE_masks.py', 'scripts/install_DSURQE.sh', 'scripts/error_check_rabies.py', + 'scripts/zeropad', 'scripts/preprocess_scripts/multistage_otsu_cor.py', 'scripts/preprocess_scripts/null_nonlin.sh', 'scripts/preprocess_scripts/plot_overlap.sh', From 78169c037de53c0989a8aa1f1efa328d14a24b47 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Tue, 3 Oct 2023 11:19:36 -0400 Subject: [PATCH 7/9] 2 components for AROMA --- scripts/error_check_rabies.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/error_check_rabies.py b/scripts/error_check_rabies.py index 5addca3..dd20575 100755 --- a/scripts/error_check_rabies.py +++ b/scripts/error_check_rabies.py @@ -151,7 +151,7 @@ def get_parser(): if opts.complete: ####CONFOUND CORRECTION#### command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs \ - --generate_CR_null --timeseries_interval 0,5 --TR 1 --scale_variance_voxelwise \ + --generate_CR_null --timeseries_interval 2,12 --TR 1 --scale_variance_voxelwise \ --smoothing_filter 0.3 --detrending_order quadratic --image_scaling global_variance " process = subprocess.run( command, @@ -168,7 +168,7 @@ def get_parser(): ) # testing AROMA on its own to retain degrees of freedom - command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --ica_aroma apply=true,dim=0,random_seed=1" + command = f"rabies --force --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --ica_aroma apply=true,dim=2,random_seed=1" process = subprocess.run( command, check=True, From 1e5c9b894cde7c7accf165b2990886c74f3873d9 Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Tue, 3 Oct 2023 12:27:12 -0400 Subject: [PATCH 8/9] run_command now outputs stdout --- rabies/analysis_pkg/analysis_functions.py | 4 +-- .../diagnosis_pkg/diagnosis_functions.py | 2 +- rabies/preprocess_pkg/bold_ref.py | 2 +- rabies/preprocess_pkg/commonspace_reg.py | 6 ++-- rabies/preprocess_pkg/hmc.py | 8 ++--- rabies/preprocess_pkg/inho_correction.py | 32 +++++++++---------- rabies/preprocess_pkg/preprocess_visual_QC.py | 2 +- rabies/preprocess_pkg/registration.py | 2 +- rabies/preprocess_pkg/stc.py | 2 +- rabies/preprocess_pkg/utils.py | 6 ++-- rabies/run_main.py | 2 +- rabies/utils.py | 14 ++++---- rabies/visualization.py | 2 +- 13 files changed, 42 insertions(+), 42 deletions(-) diff --git a/rabies/analysis_pkg/analysis_functions.py b/rabies/analysis_pkg/analysis_functions.py index 0828428..13da395 100644 --- a/rabies/analysis_pkg/analysis_functions.py +++ b/rabies/analysis_pkg/analysis_functions.py @@ -26,7 +26,7 @@ def seed_based_FC(dict_file, seed_dict, seed_name): seed_file = seed_dict[seed_name] resampled = os.path.abspath('resampled.nii.gz') command=f'antsApplyTransforms -i {seed_file} -r {mask_file} -o {resampled} -n GenericLabel' - rc = run_command(command) + rc,c_out = run_command(command) roi_mask = sitk.GetArrayFromImage(sitk.ReadImage(resampled))[volume_indices].astype(bool) # extract the voxel timeseries within the mask, and take the mean ROI timeseries @@ -139,7 +139,7 @@ def run_group_ICA(bold_file_list, mask_file, dim, random_seed): from rabies.utils import run_command out_dir = os.path.abspath('group_melodic.ica') command = f'melodic -i {file_path} -m {mask_file} -o {out_dir} -d {dim} --report --seed={str(random_seed)}' - rc = run_command(command) + rc,c_out = run_command(command) IC_file = out_dir+'/melodic_IC.nii.gz' return out_dir, IC_file diff --git a/rabies/analysis_pkg/diagnosis_pkg/diagnosis_functions.py b/rabies/analysis_pkg/diagnosis_pkg/diagnosis_functions.py index 27237f6..f72a77a 100644 --- a/rabies/analysis_pkg/diagnosis_pkg/diagnosis_functions.py +++ b/rabies/analysis_pkg/diagnosis_pkg/diagnosis_functions.py @@ -29,7 +29,7 @@ def resample_mask(in_file, ref_file): transform_string += f"-t {transform} " command = f'antsApplyTransforms -i {in_file} {transform_string}-n GenericLabel -r {ref_file} -o {out_file}' - rc = run_command(command) + rc,c_out = run_command(command) return out_file diff --git a/rabies/preprocess_pkg/bold_ref.py b/rabies/preprocess_pkg/bold_ref.py index 9be3c64..7b0b246 100644 --- a/rabies/preprocess_pkg/bold_ref.py +++ b/rabies/preprocess_pkg/bold_ref.py @@ -172,7 +172,7 @@ def _run_interface(self, runtime): # denoise the resulting reference image through non-local mean denoising # Denoising reference image. command = f'DenoiseImage -d 3 -i {out_ref_fname} -o {out_ref_fname}' - rc = run_command(command) + rc,c_out = run_command(command) setattr(self, 'ref_image', out_ref_fname) diff --git a/rabies/preprocess_pkg/commonspace_reg.py b/rabies/preprocess_pkg/commonspace_reg.py index 5314395..129c5f8 100644 --- a/rabies/preprocess_pkg/commonspace_reg.py +++ b/rabies/preprocess_pkg/commonspace_reg.py @@ -515,7 +515,7 @@ def _run_interface(self, runtime): template_folder = self.inputs.output_folder command = f'mkdir -p {template_folder}' - rc = run_command(command) + rc,c_out = run_command(command) merged = flatten_list(list(self.inputs.moving_image_list)) # create a csv file of the input image list @@ -580,13 +580,13 @@ def _run_interface(self, runtime): # for an intermediate modelbuild step and include --close and --initial-transform to the registration. # To avoid this, the template file is renamed to another generic filename. command = f'cp {self.inputs.template_anat} {template_folder}/modelbuild_starting_target.nii.gz' - rc = run_command(command) + rc,c_out = run_command(command) log.debug(f"The --starting-target template original file is {self.inputs.template_anat}, and was renamed to {template_folder}/modelbuild_starting_target.nii.gz.") command = f'QBATCH_SYSTEM={cluster_type} QBATCH_CORES={num_threads} modelbuild.sh \ --float --average-type median --gradient-step 0.25 --iterations 2 --starting-target {template_folder}/modelbuild_starting_target.nii.gz --stages rigid,affine,nlin \ --output-dir {template_folder} --sharpen-type unsharp --block --debug {masks} {csv_path}' - rc = run_command(command) + rc,c_out = run_command(command) unbiased_template = template_folder + \ diff --git a/rabies/preprocess_pkg/hmc.py b/rabies/preprocess_pkg/hmc.py index abcbbde..18960a2 100644 --- a/rabies/preprocess_pkg/hmc.py +++ b/rabies/preprocess_pkg/hmc.py @@ -123,7 +123,7 @@ def _run_interface(self, runtime): # change the name of the first iteration directory to prevent overlap of files with second iteration if self.inputs.second: command = 'mv ants_mc_tmp first_ants_mc_tmp' - rc = run_command(command) + rc,c_out = run_command(command) # make a tmp directory to store the files os.makedirs('ants_mc_tmp', exist_ok=True) @@ -212,7 +212,7 @@ def _run_interface(self, runtime): raise ValueError("No smoothing coefficient was found.") else: raise ValueError("Wrong moreaccurate provided.") - rc = run_command(command) + rc,c_out = run_command(command) setattr(self, 'csv_params', os.path.abspath('ants_mc_tmp/motcorrMOCOparams.csv')) setattr(self, 'mc_corrected_bold', os.path.abspath('ants_mc_tmp/motcorr.nii.gz')) @@ -395,14 +395,14 @@ def _run_interface(self, runtime): # first the voxelwise positioning map command = f'antsMotionCorrStats -m {self.inputs.motcorr_params} -o {filename_split[0]}_pos_file.csv -x {self.inputs.raw_brain_mask} \ -d {self.inputs.raw_bold}' - rc = run_command(command) + rc,c_out = run_command(command) pos_voxelwise = os.path.abspath( f"{filename_split[0]}_pos_file.nii.gz") # then the voxelwise framewise displacement map command = f'antsMotionCorrStats -m {self.inputs.motcorr_params} -o {filename_split[0]}_FD_file.csv -x {self.inputs.raw_brain_mask} \ -d {self.inputs.raw_bold} -f 1' - rc = run_command(command) + rc,c_out = run_command(command) FD_csv = os.path.abspath(f"{filename_split[0]}_FD_file.csv") FD_voxelwise = os.path.abspath(f"{filename_split[0]}_FD_file.nii.gz") diff --git a/rabies/preprocess_pkg/inho_correction.py b/rabies/preprocess_pkg/inho_correction.py index 740f147..aaf2d70 100644 --- a/rabies/preprocess_pkg/inho_correction.py +++ b/rabies/preprocess_pkg/inho_correction.py @@ -277,7 +277,7 @@ def _run_interface(self, runtime): else: raise ValueError(f"Image type must be 'EPI' or 'structural', {self.inputs.image_type}") command = f'{processing_script} {target_img} {corrected} {self.inputs.anat_ref} {self.inputs.anat_mask} {self.inputs.inho_cor_method} {self.inputs.multistage_otsu} {str(self.inputs.otsu_threshold)}' - rc = run_command(command) + rc,c_out = run_command(command) resampled_mask = corrected.split('.nii.gz')[0]+'_mask.nii.gz' init_denoise = corrected.split('.nii.gz')[0]+'_init_denoise.nii.gz' @@ -297,25 +297,25 @@ def _run_interface(self, runtime): input_anat = target_img command = 'ImageMath 3 null_mask.nii.gz ThresholdAtMean %s 0' % (input_anat) - rc = run_command(command) + rc,c_out = run_command(command) command = 'ImageMath 3 thresh_mask.nii.gz ThresholdAtMean %s 1.2' % (input_anat) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -s 4 -i %s -b [20] -c [200x200x200,0.0] -w thresh_mask.nii.gz -x null_mask.nii.gz -o N4.nii.gz' % (input_anat) - rc = run_command(command) + rc,c_out = run_command(command) command = 'DenoiseImage -d 3 -i N4.nii.gz -o denoise.nii.gz' - rc = run_command(command) + rc,c_out = run_command(command) from rabies.preprocess_pkg.registration import run_antsRegistration [affine, warp, inverse_warp, warped_image] = run_antsRegistration(reg_method='Affine', moving_image=input_anat, fixed_image=self.inputs.anat_ref, fixed_mask=self.inputs.anat_mask) command = 'antsApplyTransforms -d 3 -i %s -t [%s,1] -r %s -o resampled_mask.nii.gz -n GenericLabel' % (self.inputs.anat_mask, affine, input_anat) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -s 2 -i %s -b [20] -c [200x200x200x200,0.0] -w resampled_mask.nii.gz -r 1 -x null_mask.nii.gz -o N4.nii.gz' % (input_anat) - rc = run_command(command) + rc,c_out = run_command(command) command = 'DenoiseImage -d 3 -i N4.nii.gz -o %s' % (corrected) - rc = run_command(command) + rc,c_out = run_command(command) # resample image to specified data format sitk.WriteImage(sitk.ReadImage(corrected, self.inputs.rabies_data_type), corrected) @@ -412,7 +412,7 @@ def _run_interface(self, runtime): [affine, warp, inverse_warp, warped_image] = run_antsRegistration(reg_method='Rigid', moving_image=cwd+'/corrected_iter2.nii.gz', fixed_image=self.inputs.anat, fixed_mask=self.inputs.anat_mask) command = 'antsApplyTransforms -d 3 -i %s -t [%s,1] -r %s -o %s -n GenericLabel' % (self.inputs.anat_mask, affine, cwd+'/corrected_iter2.nii.gz',resampled_mask) - rc = run_command(command) + rc,c_out = run_command(command) otsu_bias_cor(target=bias_cor_input, otsu_ref=cwd+'/corrected_iter2.nii.gz', out_name=cwd+'/final_otsu.nii.gz', b_value=b_value, mask=resampled_mask) @@ -444,9 +444,9 @@ def otsu_bias_cor(target, otsu_ref, out_name, b_value, mask=None, n_iter=200): import SimpleITK as sitk from rabies.utils import run_command command = 'ImageMath 3 null_mask.nii.gz ThresholdAtMean %s 0' % (otsu_ref) - rc = run_command(command) + rc,c_out = run_command(command) command = 'ThresholdImage 3 %s otsu_weight.nii.gz Otsu 4' % (otsu_ref) - rc = run_command(command) + rc,c_out = run_command(command) otsu_img = sitk.ReadImage( 'otsu_weight.nii.gz', sitk.sitkUInt8) @@ -485,16 +485,16 @@ def otsu_bias_cor(target, otsu_ref, out_name, b_value, mask=None, n_iter=200): sitk.WriteImage(mask_img, 'mask1234.nii.gz') command = 'N4BiasFieldCorrection -d 3 -i %s -b %s -s 1 -c [%sx%sx%s,1e-4] -w mask12.nii.gz -x null_mask.nii.gz -o corrected1.nii.gz' % (target, str(b_value), str(n_iter),str(n_iter),str(n_iter),) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -i corrected1.nii.gz -b %s -s 1 -c [%sx%sx%s,1e-4] -w mask34.nii.gz -x null_mask.nii.gz -o corrected2.nii.gz' % (str(b_value), str(n_iter),str(n_iter),str(n_iter),) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -i corrected2.nii.gz -b %s -s 1 -c [%sx%sx%s,1e-4] -w mask123.nii.gz -x null_mask.nii.gz -o corrected3.nii.gz' % (str(b_value), str(n_iter),str(n_iter),str(n_iter),) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -i corrected3.nii.gz -b %s -s 1 -c [%sx%sx%s,1e-4] -w mask234.nii.gz -x null_mask.nii.gz -o corrected4.nii.gz' % (str(b_value), str(n_iter),str(n_iter),str(n_iter),) - rc = run_command(command) + rc,c_out = run_command(command) command = 'N4BiasFieldCorrection -d 3 -i corrected4.nii.gz -b %s -s 1 -c [%sx%sx%s,1e-4] -w mask1234.nii.gz -x null_mask.nii.gz -o %s' % (str(b_value), str(n_iter),str(n_iter),str(n_iter),out_name,) - rc = run_command(command) + rc,c_out = run_command(command) diff --git a/rabies/preprocess_pkg/preprocess_visual_QC.py b/rabies/preprocess_pkg/preprocess_visual_QC.py index 2b49ede..3ec6fc2 100644 --- a/rabies/preprocess_pkg/preprocess_visual_QC.py +++ b/rabies/preprocess_pkg/preprocess_visual_QC.py @@ -51,7 +51,7 @@ def _run_interface(self, runtime): filename_template+f'_registration.png' command = f'{script_path} {self.inputs.moving} {self.inputs.fixed} {out_name}' - rc = run_command(command) + rc,c_out = run_command(command) setattr(self, 'out_figure', out_name) return runtime diff --git a/rabies/preprocess_pkg/registration.py b/rabies/preprocess_pkg/registration.py index 6befe7f..f70f29b 100644 --- a/rabies/preprocess_pkg/registration.py +++ b/rabies/preprocess_pkg/registration.py @@ -114,7 +114,7 @@ def run_antsRegistration(reg_method, brain_extraction=False, moving_image='NULL' else: command = f'{reg_call} {moving_image} {moving_mask} {fixed_image} {fixed_mask} {filename_split[0]}' from rabies.utils import run_command - rc = run_command(command) + rc,c_out = run_command(command) cwd = os.getcwd() warped_image = f'{cwd}/{filename_split[0]}_output_warped_image.nii.gz' diff --git a/rabies/preprocess_pkg/stc.py b/rabies/preprocess_pkg/stc.py index 3771d7a..912b299 100644 --- a/rabies/preprocess_pkg/stc.py +++ b/rabies/preprocess_pkg/stc.py @@ -102,7 +102,7 @@ def slice_timing_correction(in_file, tr='auto', tpattern='alt-z', stc_axis='Y', command = f'3dTshift -{interp_method} -prefix temp_tshift.nii.gz -tpattern {tpattern} -TR {tr} {target_file}' from rabies.utils import run_command - rc = run_command(command) + rc,c_out = run_command(command) tshift_img = sitk.ReadImage( 'temp_tshift.nii.gz', rabies_data_type) diff --git a/rabies/preprocess_pkg/utils.py b/rabies/preprocess_pkg/utils.py index a1cfa33..5af0613 100644 --- a/rabies/preprocess_pkg/utils.py +++ b/rabies/preprocess_pkg/utils.py @@ -181,7 +181,7 @@ def apply_despike(in_file): split = pathlib.Path(in_file).name.rsplit(".nii")[0] out_file = os.path.abspath(f"{split}_despike.nii.gz") command = f'3dDespike -prefix {out_file} {in_file}' - rc = run_command(command) + rc,c_out = run_command(command) return out_file def apply_autobox(in_file): @@ -191,7 +191,7 @@ def apply_autobox(in_file): split = pathlib.Path(in_file).name.rsplit(".nii")[0] out_file = os.path.abspath(f"{split}_autobox.nii.gz") command = f'3dAutobox -input {in_file} -prefix {out_file} -npad 1' - rc = run_command(command) + rc,c_out = run_command(command) return out_file @@ -252,7 +252,7 @@ def resample_template(template_file, mask_file, file_list, spacing='inputs_defin # also resample the brain mask to ensure stable registrations further down resampled_mask = os.path.abspath("resampled_mask.nii.gz") command = f'antsApplyTransforms -d 3 -i {mask_file} -r {resampled_template} -o {resampled_mask} --verbose -n GenericLabel' - rc = run_command(command) + rc,c_out = run_command(command) return resampled_template, resampled_mask diff --git a/rabies/run_main.py b/rabies/run_main.py index 7c83b78..769912b 100644 --- a/rabies/run_main.py +++ b/rabies/run_main.py @@ -326,7 +326,7 @@ def install_DSURQE(log): from rabies.preprocess_pkg.utils import run_command log.info( "SOME FILES FROM THE DEFAULT TEMPLATE ARE MISSING. THEY WILL BE INSTALLED BEFORE FURTHER PROCESSING.") - rc = run_command(f'install_DSURQE.sh {rabies_path}', verbose=True) + rc,c_out = run_command(f'install_DSURQE.sh {rabies_path}', verbose=True) def check_binary_masks(mask): diff --git a/rabies/utils.py b/rabies/utils.py index e41c440..be366fe 100644 --- a/rabies/utils.py +++ b/rabies/utils.py @@ -208,7 +208,7 @@ def _run_interface(self, runtime): if self.inputs.apply_motcorr: command = f'antsMotionCorrStats -m {motcorr_params} -o motcorr_vol{x}.mat -t {x}' - rc = run_command(command) + rc,c_out = run_command(command) transforms = orig_transforms+[f'motcorr_vol{x}.mat'] inverses = orig_inverses+[0] @@ -245,7 +245,7 @@ def exec_applyTransforms(transforms, inverses, input_image, ref_image, output_im interpolation = 'BSpline[5]' command = f'antsApplyTransforms -i {input_image} {transform_string}-n {interpolation} -r {ref_image} -o {output_image}' - rc = run_command(command) + rc,c_out = run_command(command) if not os.path.isfile(output_image): raise ValueError( "Missing output image. Transform call failed: "+command) @@ -360,19 +360,19 @@ def run_command(command, verbose = False): log.warning(e.output.decode("utf-8")) raise - out = process.stdout.decode("utf-8") - if not out == '': + c_out = process.stdout.decode("utf-8") + if not c_out == '': if verbose: - log.info(out) + log.info(c_out) else: - log.debug(out) + log.debug(c_out) if process.stderr is not None: if verbose: log.info(process.stderr) else: log.warning(process.stderr) rc = process.returncode - return rc + return rc,c_out def flatten_list(l): diff --git a/rabies/visualization.py b/rabies/visualization.py index 6577606..c325d06 100644 --- a/rabies/visualization.py +++ b/rabies/visualization.py @@ -27,7 +27,7 @@ def otsu_scaling(image_file): # select a smart vmax for the image display to enhance contrast command = f'ThresholdImage 3 {image_file} otsu_weight.nii.gz Otsu 4' - rc = run_command(command) + rc,c_out = run_command(command) # clip off the background mask = sitk.GetArrayFromImage(sitk.ReadImage('otsu_weight.nii.gz')) From e2b788771aee5054e227699e086cb8294dfc233d Mon Sep 17 00:00:00 2001 From: Gab-D-G Date: Tue, 3 Oct 2023 13:47:58 -0400 Subject: [PATCH 9/9] os.system and subprocess was replaced by run_command in AROMA package for better error catching --- .../mod_ICA_AROMA/ICA_AROMA_functions.py | 157 +++++++++++------- rabies/utils.py | 2 + 2 files changed, 103 insertions(+), 56 deletions(-) diff --git a/rabies/confound_correction_pkg/mod_ICA_AROMA/ICA_AROMA_functions.py b/rabies/confound_correction_pkg/mod_ICA_AROMA/ICA_AROMA_functions.py index 493cf4c..afb6db1 100644 --- a/rabies/confound_correction_pkg/mod_ICA_AROMA/ICA_AROMA_functions.py +++ b/rabies/confound_correction_pkg/mod_ICA_AROMA/ICA_AROMA_functions.py @@ -15,7 +15,8 @@ def run_ICA_AROMA(outDir,inFile,mc,TR,mask="",mask_csf="",denType="nonaggr",melDir="",dim=0,overwrite=False, random_seed=1): ###additional function for the execution of ICA_AROMA within RABIES import os - import subprocess + #import subprocess + from rabies.utils import run_command import shutil import rabies.confound_correction_pkg.mod_ICA_AROMA.classification_plots as classification_plots import rabies.confound_correction_pkg.mod_ICA_AROMA.ICA_AROMA_functions as aromafunc @@ -86,7 +87,8 @@ def run_ICA_AROMA(outDir,inFile,mc,TR,mask="",mask_csf="",denType="nonaggr",melD cmd = ' '.join([os.path.join(fslDir, 'fslinfo'), inFile, '| grep pixdim4 | awk \'{print $2}\'']) - TR = float(subprocess.getoutput(cmd)) + rc,c_out = run_command(cmd) + TR = float(c_out) # Check TR if TR == 0: @@ -170,7 +172,8 @@ def runICA(fslDir, inFile, outDir, melDirIn, mask, dim, TR, random_seed): # Import needed modules import os - import subprocess + #import subprocess + from rabies.utils import run_command # Define the 'new' MELODIC directory and predefine some associated files melDir = os.path.join(outDir, 'melodic.ica') @@ -201,7 +204,7 @@ def runICA(fslDir, inFile, outDir, melDirIn, mask, dim, TR, random_seed): os.path.join(melDir, item)) # Run mixture modeling - os.system(' '.join([os.path.join(fslDir, 'melodic'), + run_command(' '.join([os.path.join(fslDir, 'melodic'), '--in=' + melIC, '--ICs=' + melIC, '--mix=' + melICmix, @@ -217,7 +220,7 @@ def runICA(fslDir, inFile, outDir, melDirIn, mask, dim, TR, random_seed): print(' - The specified MELODIC directory does not contain the required files to run ICA-AROMA. MELODIC will be run seperately.') # Run MELODIC - os.system(' '.join([os.path.join(fslDir, 'melodic'), + run_command(' '.join([os.path.join(fslDir, 'melodic'), '--in=' + inFile, '--outdir=' + melDir, '--mask=' + mask, @@ -229,7 +232,8 @@ def runICA(fslDir, inFile, outDir, melDirIn, mask, dim, TR, random_seed): cmd = ' '.join([os.path.join(fslDir, 'fslinfo'), melIC, '| grep dim4 | head -n1 | awk \'{print $2}\'']) - nrICs = int(float(subprocess.getoutput(cmd))) + rc,c_out = run_command(cmd) + nrICs = int(float(c_out)) # Merge mixture modeled thresholded spatial maps. Note! In case that mixture modeling did not converge, the file will contain two spatial maps. The latter being the results from a simple null hypothesis test. In that case, this map will have to be used (first one will be empty). for i in range(1, nrICs + 1): @@ -238,32 +242,34 @@ def runICA(fslDir, inFile, outDir, melDirIn, mask, dim, TR, random_seed): cmd = ' '.join([os.path.join(fslDir, 'fslinfo'), zTemp, '| grep dim4 | head -n1 | awk \'{print $2}\'']) - lenIC = int(float(subprocess.getoutput(cmd))) + rc,c_out = run_command(cmd) + lenIC = int(float(c_out)) # Define zeropad for this IC-number and new zstat file cmd = ' '.join([os.path.join(fslDir, 'zeropad'), str(i), '4']) - ICnum = subprocess.getoutput(cmd) + rc,c_out = run_command(cmd) + ICnum = c_out zstat = os.path.join(outDir, 'thr_zstat' + ICnum) # Extract last spatial map within the thresh_zstat file - os.system(' '.join([os.path.join(fslDir, 'fslroi'), + run_command(' '.join([os.path.join(fslDir, 'fslroi'), zTemp, # input zstat, # output str(lenIC - 1), # first frame '1'])) # number of frames # Merge and subsequently remove all mixture modeled Z-maps within the output directory - os.system(' '.join([os.path.join(fslDir, 'fslmerge'), + run_command(' '.join([os.path.join(fslDir, 'fslmerge'), '-t', # concatenate in time melICthr, # output os.path.join(outDir, 'thr_zstat????.nii.gz')])) # inputs - os.system('rm ' + os.path.join(outDir, 'thr_zstat????.nii.gz')) + run_command('rm ' + os.path.join(outDir, 'thr_zstat????.nii.gz')) # Apply the mask to the merged file (in case a melodic-directory was predefined and run with a different mask) - os.system(' '.join([os.path.join(fslDir, 'fslmaths'), + run_command(' '.join([os.path.join(fslDir, 'fslmaths'), melICthr, '-mas ' + mask, melICthr])) @@ -531,10 +537,13 @@ def mod_feature_spatial(fslDir, tempDir, melIC, mask_csf, mask_edge, mask_out): # Import required modules import numpy as np import os - import subprocess + #import subprocess + from rabies.utils import run_command # Get the number of ICs - numICs = int(subprocess.getoutput('%sfslinfo %s | grep dim4 | head -n1 | awk \'{print $2}\'' % (fslDir, melIC) )) + cmd = '%sfslinfo %s | grep dim4 | head -n1 | awk \'{print $2}\'' % (fslDir, melIC) + rc,c_out = run_command(cmd) + numICs = int(c_out) # Loop over ICs edgeFract = np.zeros(numICs) @@ -544,27 +553,31 @@ def mod_feature_spatial(fslDir, tempDir, melIC, mask_csf, mask_edge, mask_out): tempIC = os.path.join(tempDir, 'temp_IC.nii.gz') # Extract IC from the merged melodic_IC_thr2MNI2mm file - os.system(' '.join([os.path.join(fslDir, 'fslroi'), + run_command(' '.join([os.path.join(fslDir, 'fslroi'), melIC, tempIC, str(i), '1'])) # Change to absolute Z-values - os.system(' '.join([os.path.join(fslDir, 'fslmaths'), + run_command(' '.join([os.path.join(fslDir, 'fslmaths'), tempIC, '-abs', tempIC])) # Get sum of Z-values within the total Z-map (calculate via the mean and number of non-zero voxels) - totVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + totVox = int(c_out) if not (totVox == 0): - totMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + totMean = float(c_out) else: print(' - The spatial map of component ' + str(i + 1) + ' is empty. Please check!') totMean = 0 @@ -572,46 +585,58 @@ def mod_feature_spatial(fslDir, tempDir, melIC, mask_csf, mask_edge, mask_out): totSum = totMean * totVox # Get sum of Z-values of the voxels located within the CSF (calculate via the mean and number of non-zero voxels) - csfVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_csf, - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + csfVox = int(c_out) if not (csfVox == 0): - csfMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_csf, - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + csfMean = float(c_out) else: csfMean = 0 csfSum = csfMean * csfVox # Get sum of Z-values of the voxels located within the Edge (calculate via the mean and number of non-zero voxels) - edgeVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_edge, - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + edgeVox = int(c_out) if not (edgeVox == 0): - edgeMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_edge, - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + edgeMean = float(c_out) else: edgeMean = 0 edgeSum = edgeMean * edgeVox # Get sum of Z-values of the voxels located outside the brain (calculate via the mean and number of non-zero voxels) - outVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_out, - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + outVox = int(c_out) if not (outVox == 0): - outMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k ', mask_out, - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + outMean = float(c_out) else: outMean = 0 @@ -654,10 +679,13 @@ def feature_spatial(fslDir, tempDir, aromaDir, melIC): # Import required modules import numpy as np import os - import subprocess + #import subprocess + from rabies.utils import run_command # Get the number of ICs - numICs = int(subprocess.getoutput('%sfslinfo %s | grep dim4 | head -n1 | awk \'{print $2}\'' % (fslDir, melIC) )) + cmd = '%sfslinfo %s | grep dim4 | head -n1 | awk \'{print $2}\'' % (fslDir, melIC) + rc,c_out = run_command(cmd) + numICs = int(c_out) # Loop over ICs edgeFract = np.zeros(numICs) @@ -667,27 +695,31 @@ def feature_spatial(fslDir, tempDir, aromaDir, melIC): tempIC = os.path.join(tempDir, 'temp_IC.nii.gz') # Extract IC from the merged melodic_IC_thr2MNI2mm file - os.system(' '.join([os.path.join(fslDir, 'fslroi'), + run_command(' '.join([os.path.join(fslDir, 'fslroi'), melIC, tempIC, str(i), '1'])) # Change to absolute Z-values - os.system(' '.join([os.path.join(fslDir, 'fslmaths'), + run_command(' '.join([os.path.join(fslDir, 'fslmaths'), tempIC, '-abs', tempIC])) # Get sum of Z-values within the total Z-map (calculate via the mean and number of non-zero voxels) - totVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + totVox = int(c_out) if not (totVox == 0): - totMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + totMean = float(c_out) else: print(' - The spatial map of component ' + str(i + 1) + ' is empty. Please check!') totMean = 0 @@ -695,46 +727,58 @@ def feature_spatial(fslDir, tempDir, aromaDir, melIC): totSum = totMean * totVox # Get sum of Z-values of the voxels located within the CSF (calculate via the mean and number of non-zero voxels) - csfVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), - tempIC, - '-k mask_csf.nii.gz', - '-V | awk \'{print $1}\'']))) + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), + tempIC, + '-k mask_csf.nii.gz', + '-M']) + rc,c_out = run_command(cmd) + csfVox = int(c_out) if not (csfVox == 0): - csfMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k mask_csf.nii.gz', - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + csfMean = float(c_out) else: csfMean = 0 csfSum = csfMean * csfVox # Get sum of Z-values of the voxels located within the Edge (calculate via the mean and number of non-zero voxels) - edgeVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k mask_edge.nii.gz', - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + edgeVox = int(c_out) if not (edgeVox == 0): - edgeMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k mask_edge.nii.gz', - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + edgeMean = float(c_out) else: edgeMean = 0 edgeSum = edgeMean * edgeVox # Get sum of Z-values of the voxels located outside the brain (calculate via the mean and number of non-zero voxels) - outVox = int(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k mask_out.nii.gz', - '-V | awk \'{print $1}\'']))) + '-V | awk \'{print $1}\'']) + rc,c_out = run_command(cmd) + outVox = int(c_out) if not (outVox == 0): - outMean = float(subprocess.getoutput(' '.join([os.path.join(fslDir, 'fslstats'), + cmd = ' '.join([os.path.join(fslDir, 'fslstats'), tempIC, '-k mask_out.nii.gz', - '-M']))) + '-M']) + rc,c_out = run_command(cmd) + outMean = float(c_out) else: outMean = 0 @@ -852,6 +896,7 @@ def denoising(fslDir, inFile, outDir, melmix, denType, denIdx): # Import required modules import os import numpy as np + from rabies.utils import run_command # Check if denoising is needed (i.e. are there components classified as motion) check = denIdx.size > 0 @@ -866,7 +911,7 @@ def denoising(fslDir, inFile, outDir, melmix, denType, denIdx): # Non-aggressive denoising of the data using fsl_regfilt (partial regression), if requested if (denType == 'nonaggr') or (denType == 'both'): - os.system(' '.join([os.path.join(fslDir, 'fsl_regfilt'), + run_command(' '.join([os.path.join(fslDir, 'fsl_regfilt'), '--in=' + inFile, '--design=' + melmix, '--filter="' + denIdxStrJoin + '"', @@ -874,7 +919,7 @@ def denoising(fslDir, inFile, outDir, melmix, denType, denIdx): # Aggressive denoising of the data using fsl_regfilt (full regression) if (denType == 'aggr') or (denType == 'both'): - os.system(' '.join([os.path.join(fslDir, 'fsl_regfilt'), + run_command(' '.join([os.path.join(fslDir, 'fsl_regfilt'), '--in=' + inFile, '--design=' + melmix, '--filter="' + denIdxStrJoin + '"', diff --git a/rabies/utils.py b/rabies/utils.py index be366fe..4c1d8c6 100644 --- a/rabies/utils.py +++ b/rabies/utils.py @@ -372,6 +372,8 @@ def run_command(command, verbose = False): else: log.warning(process.stderr) rc = process.returncode + if len(c_out)>0 and c_out[-1]=='\n': # remove the extra break point, can affect string construction + c_out = c_out[:-1] return rc,c_out