From df3e6519ce01a55188d6653511aa4f03fd43bd22 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 28 Sep 2016 08:23:47 -0400 Subject: [PATCH 01/30] return runs using task name not a number eg use task-emosent not task=5 --- subject_level/fmri_ants_bids.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 371d560..9fc43bd 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -553,7 +553,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): import os import numpy as np import re - + condition_info = [] cond_file = os.path.join(base_dir, 'code', 'model', 'model%03d' % model_id, 'condition_key.txt') @@ -564,16 +564,15 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): if len(condition_info) == 0: raise ValueError('No condition info found in %s' % cond_file) taskinfo = np.array(condition_info) - #n_tasks = np.unique(taskinfo[:, 0]) - n_tasks = [] - for x in taskinfo[:, 0]: - if x not in n_tasks: - n_tasks.append(x) + + task_list = list(set(taskinfo[:, 0])) + n_tasks = len(task_list) + conds = [] run_ids = [] - if task_id > len(n_tasks): - raise ValueError('Task id %s does not exist' % task_id) - for idx,task in enumerate(n_tasks): + + + for idx,task in enumerate(task_list): taskidx = np.where(taskinfo[:, 0] == '%s'%(task)) conds.append([condition.replace(' ', '_') for condition in taskinfo[taskidx[0], 2]]) # if 'junk' not in condition]) @@ -588,7 +587,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): subject_id, 'func', '*%s*.nii.gz'%(task)))) - + try: runs = [int(re.search('(?<=run-)\d+',os.path.basename(val)).group(0)) for val in files] except AttributeError: @@ -597,7 +596,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): # TR should be same across runs if session_id: json_info = glob(os.path.join(base_dir, subject_id, session_id, - 'func','*%s*.json'%(n_tasks[task_id-1])))[0] + 'func','*%s*.json'%(task)))[0] else: json_info = glob(os.path.join(base_dir, subject_id, 'func', '*%s*.json'%(n_tasks[task_id-1])))[0] @@ -612,7 +611,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): TR = np.genfromtxt(task_scan_key)[1] else: TR = np.genfromtxt(os.path.join(base_dir, 'scan_key.txt'))[1] - return run_ids[task_id - 1], conds[task_id - 1], TR + return run_ids[0], conds[0], TR """ From de8e002148389d7e27f2700a1faa409d4f4c7215 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Thu, 29 Sep 2016 07:40:58 -0400 Subject: [PATCH 02/30] process arbitrary runs per subject --- subject_level/fmri_ants_bids.py | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 9fc43bd..19010b1 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -528,7 +528,7 @@ def create_fs_reg_workflow(name='registration'): Get info for a given subject """ -def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): +def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None, run_id=None): """Get info for a given subject Parameters ---------- @@ -611,7 +611,15 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None): TR = np.genfromtxt(task_scan_key)[1] else: TR = np.genfromtxt(os.path.join(base_dir, 'scan_key.txt'))[1] - return run_ids[0], conds[0], TR + + if run_id==None: + run_list_process = run_ids[0] + else: + run_list_process = run_id + + print '==================================== process run list ======================' + print run_list_process + return run_list_process, conds[0], TR """ @@ -637,7 +645,8 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*', hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None, - session_id=None): + session_id=None, + run_id=None): """Analyzes an open fmri dataset Parameters @@ -692,13 +701,14 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, ('task_id', task_id)] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', - 'task_id', 'model_id', 'session_id'], + 'task_id', 'model_id', 'session_id', 'run_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') #subjinfo.inputs.base_dir = None ## update with argumentparse eventually subjinfo.inputs.base_dir = data_dir subjinfo.inputs.session_id = session_id + subjinfo.inputs.run_id = run_id """ Get task name (BIDS) @@ -755,13 +765,13 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, else: datasource.inputs.field_template = {'anat': '%s/%s/anat/*T1w.nii.gz', - 'bold': '%s/%s/func/*task-%s_*bold.nii.gz', + 'bold': '%s/%s/func/*task-%s_run-%02d_bold.nii.gz', 'behav': ('code/model/model%03d/onsets/%s/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('code/model/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id', 'session_id']], - 'bold': [['subject_id', 'session_id','task_name']], + 'bold': [['subject_id', 'session_id','task_name', 'run_id']], 'behav': [['model_id', 'subject_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} @@ -1070,7 +1080,9 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/')) subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask')) subs.append(('mask/model%03d/task%03d/' % (model_id, task_id), 'mask/')) + subs.append(('art/model%03d/task%03d/' % (model_id, task_id), 'art/')) subs.append(('tsnr/model%03d/task%03d/' % (model_id, task_id), 'tsnr/')) + subs.append(('model/model%03d/task%03d/' % (model_id, task_id), 'model/')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) @@ -1188,6 +1200,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): "OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz")) parser.add_argument("--session_id", dest="session_id", default=None, help="Session id, ses-1") + parser.add_argument("--run_id", dest="run_id", default=None, + help="Run id list: 1,2 or 1 or 2") parser.add_argument("--crashdump_dir", dest="crashdump_dir", help="Crashdump dir", default=None) @@ -1216,7 +1230,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): fwhm=args.fwhm, subjects_dir=args.subjects_dir, target=args.target_file, - session_id=args.session_id) + session_id=args.session_id, + run_id=[int(i) for i in args.run_id.split(',')]) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 wf.base_dir = work_dir From b893fa2db01f3daef69611fcf76fcfcdb54b2b5a Mon Sep 17 00:00:00 2001 From: gr21783 Date: Thu, 29 Sep 2016 07:41:55 -0400 Subject: [PATCH 03/30] added comments to group script --- group_level/group_multregress_bids.py | 32 +++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index add4112..9453900 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -3,6 +3,30 @@ for help: contact: annepark@mit.edu + + +Example call: + +python /git/gopenfmri/group_level/group_multregress_bids.py \ +-m 124 \ +-t 5 \ +-d /om/project/voice/bids/data/ \ +-l1 /om/project/voice/processedData/l1analysis/l1output_20160901a/ \ +-o /om/project/voice/processedData/groupAnalysis/l2output_20160901a/ \ +-w /om/scratch/Fri/user/group_voice`date +%Y%m%d%H%M%S`/ \ +-p 'SLURM' --plugin_args "{'sbatch_args':'-p om_all_nodes'}" \ +-s /om/project/voice/processedData/openfmri/groups/user/20160904/participant_key.txt \ +-b /om/project/voice/processedData/openfmri/groups/user/20160904/behav.txt \ +-g /om/project/voice/processedData/openfmri/groups/user/20160904/contrasts.txt + +## Required files +# participant_key.txt +# behav.txt +# contrasts.txt + +## Additional documentation +https://github.mit.edu/MGHPCC/OpenMind/wiki/Lab-Specific-Cookbook:-Gabrieli-lab#grouplevelscripts + """ import os @@ -51,11 +75,11 @@ def get_sub_vars(dataset_dir, task_name, model_id, sub_list_file, behav_file, gr import numpy as np import os import pandas as pd - import re - #sub_list_file = os.path.join(dataset_dir, 'code', 'groups', 'participant_key.txt') - #behav_file = os.path.join(dataset_dir, 'code', 'groups', 'behav.txt') - #group_contrast_file = os.path.join(dataset_dir, 'code', 'groups', 'contrasts.txt') + import re + # Read in all subjects in participant_key ("sub_list_file") + # Process only subjects with a nonzero number + # Check to make sure every subject to be processed has a line in behav.txt ("behav_file") subs_list = pd.read_table(sub_list_file, index_col=0)['task-%s' % task_name] subs_needed = subs_list.index[np.nonzero(subs_list)[0]] behav_info = pd.read_table(behav_file, delim_whitespace=True, index_col=0) From 23b687950aef9a430af2c5046207a027f88d104c Mon Sep 17 00:00:00 2001 From: gr21783 Date: Sun, 2 Oct 2016 17:19:31 -0400 Subject: [PATCH 04/30] datasink renaming of qa motion to remove the duplicate model task subfolders in the path --- subject_level/fmri_ants_bids.py | 1 + 1 file changed, 1 insertion(+) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 19010b1..fa76dea 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1083,6 +1083,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subs.append(('art/model%03d/task%03d/' % (model_id, task_id), 'art/')) subs.append(('tsnr/model%03d/task%03d/' % (model_id, task_id), 'tsnr/')) subs.append(('model/model%03d/task%03d/' % (model_id, task_id), 'model/')) + subs.append(('motion/model%03d/task%03d/' % (model_id, task_id), 'motion/')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) From 95c5c5620760b2e5ed636cf2904187c8ef4c53e0 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Tue, 4 Oct 2016 16:37:32 -0400 Subject: [PATCH 05/30] tsnr std output and mean output --- subject_level/fmri_ants_bids.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index fa76dea..a0faa0c 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1127,6 +1127,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files') wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target') wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map') + wf.connect(tsnr, 'stddev_file', datasink, 'qa.tsnr.@stddev') + wf.connect(tsnr, 'mean_file', datasink, 'qa.tsnr.@mean') if subjects_dir: wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost') wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'), From 24e670701738b88d8233917b1aa21304e839cc4e Mon Sep 17 00:00:00 2001 From: gr21783 Date: Fri, 7 Oct 2016 08:23:11 -0400 Subject: [PATCH 06/30] conjunction analysis stub --- group_level/conjunction.ipynb | 515 ++++++++++++++++++++++++++++++++++ 1 file changed, 515 insertions(+) create mode 100644 group_level/conjunction.ipynb diff --git a/group_level/conjunction.ipynb b/group_level/conjunction.ipynb new file mode 100644 index 0000000..f9f1aae --- /dev/null +++ b/group_level/conjunction.ipynb @@ -0,0 +1,515 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%reset -f\n", + "import numpy as np\n", + "import scipy\n", + "from scipy import stats\n", + "from glob import glob\n", + "import nibabel as nib\n", + "import os\n", + "# Perform a conjunction analysis\n", + "\n", + "## Reference\n", + "# Satrajit Ghosh\n", + "# 2005_Nichols_ValidConjunctionInferenceMinimumStatistic.pdf\n", + "# http://stattrek.com/regression/slope-test.aspx?Tutorial=AP\n", + "# http://nilearn.github.io/manipulating_images/manipulating_images.html\n", + "\n", + "\n", + "## About\n", + "# Greg Ciccarelli\n", + "# October 6, 2016\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def get_t_min(t1,t2):\n", + " # t1 and t2 are t_maps for the two contrasts\n", + " # Informally, in words: \n", + " # If activations have opposite sign, return 0\n", + " # If activations have the same sign, t_min is the t value closest to zero (keeping the sign)\n", + "\n", + "\n", + " # First term: assume both positive, otherwise 0\n", + " # Second term: assume both negative, otherwise 0\n", + " t_min = np.maximum(np.minimum(t1,t2), 0) + np.minimum(np.maximum(t1,t2), 0)\n", + "\n", + " return t_min" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# For a single subject, use the t-map for contrast 1 and contrast 2 to make a t_min map\n", + "# Perform significance testing on this map, using multiple hypothesis corrections" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test data\n", + "c1 = np.array([[1,2,3.1], [4, -2, 5]])\n", + "c2 = np.array([[3,-1, 10], [3.8, -8, 0]])\n", + "\n", + "c1 = np.random.randn(2,2,3)\n", + "c2 = np.random.randn(2,2,3)\n", + "print c1\n", + "print c2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "file_path_sub_list1 = glob(\n", + " os.path.join('/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005', \n", + " 'sub-voice8*/tstats/tstat01.nii.gz'))\n", + "file_path_sub_list2 = glob(\n", + " os.path.join('/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005', \n", + " 'sub-voice8*/tstats/tstat02.nii.gz'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "file_path_sub_list1 = glob(\n", + " os.path.join('/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005', \n", + " 'sub-voice8*/zstats/mni/zstat01.nii.gz'))\n", + "file_path_sub_list2 = glob(\n", + " os.path.join('/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005', \n", + " 'sub-voice8*/zstats/mni/zstat02.nii.gz'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "\n", + "\n", + "# CRUCIAL: sort to allow pairwise matching\n", + "file_path_sub_list1 = sorted(file_path_sub_list1)\n", + "file_path_sub_list2 = sorted(file_path_sub_list2)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "print file_path_sub_list1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005/sub-voice852/tstats/tstat01.nii.gz\n", + "/om/project/voice/processedData/l1analysis/l1output_2016100414/model126/task005/sub-voice852/tstats/tstat02.nii.gz" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Assume that both contrasts exist for each subject\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "img = nib.load(file_path_sub_list1[0])\n", + "# get data as numpy array\n", + "t1 = img.get_data()\n", + "\n", + "t_min_all = np.empty((np.shape(t1)[0],np.shape(t1)[1],np.shape(t1)[2],0))\n", + "\n", + "print np.shape(t_min_all)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "for f1, f2 in zip(file_path_sub_list1, file_path_sub_list2)[:3]:\n", + " img = nib.load(f1)\n", + " # get data as numpy array\n", + " t1 = img.get_data()\n", + " \n", + " img = nib.load(f2)\n", + " # get data as numpy array\n", + " t2 = img.get_data() \n", + " \n", + " t_min = get_t_min(t1,t2)\n", + " t_min_all = np.concatenate((t_min_all, t_min[:, :, :, np.newaxis]), axis = 3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "print np.shape(t_min)\n", + "print np.shape(t_min_all)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# The nichol's paper is organized around maps that already have been thresholded\n", + "# into binary form to either declare an effect present for a voxel (H=1) or not (H=0)\n", + "\n", + "# At the group level, a t-test isn't valid because the inputs to the\n", + "# test are 0 or 1 from each subject, not a continuous value \n", + "# The null hypothesis then should be derived from a binomial distribution\n", + "# Unfortunately, an arbitrary value must be chosen to be used for the \n", + "# probability of a 1 when no effect is present. Otherwise, zero probability is \n", + "# assigned to presence of one or more \"1\"s returned from the subjects.\n", + "# Actually the probability of a false positive for a subject should be the error rate e.g. 0.05!\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Can't perform a t-test conjunction on one subject\n", + "#scipy.stats.ttest_1samp(np.array([[1]]), 0, axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# t_min_all = np.random.randn(2,2,3,10)\n", + "# Convert t values to p values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "t1samp, p1samp = scipy.stats.ttest_1samp(t_min_all, 0, axis=3)\n", + "\n", + "print t1samp\n", + "print p1samp[np.logical_not(np.isnan(p1samp))]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Using Nichols family wise error correction (Sidak procedure)\n", + "# https://en.wikipedia.org/wiki/Family-wise_error_rate#The_.C5.A0id.C3.A1k_procedure\n", + "\n", + "# Too conservative as hypotheses can only be tested when the tmap is not NaN\n", + "V = np.size(t_min)\n", + "print V\n", + "V = np.sum(np.logical_not(np.isnan(t1samp)))\n", + "print V\n", + "# This is a p value\n", + "alpha0 = 0.05\n", + "alpha_c_fwe = 1 - (1 - alpha0)**(1./V)\n", + "\n", + "print alpha_c_fwe\n", + "# Family wise error correction to p value" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# To analyze multiple subjects, for each subject contrast 1 and contrast 2, create a t_min map\n", + "# Then, perform a 1 sample t-test (for every voxel independently) across all the t_min maps from all the subjects\n", + "# Null hypothesis in all cases is that the value is 0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.t.html\n", + "\n", + "df = np.shape(t_min_all)[3] - 1\n", + "t_thresh_c_fwe = scipy.stats.t.ppf(alpha_c_fwe/2, df, loc=0, scale=1)\n", + "print t_thresh_c_fwe" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "# Write out the nifti brain with values for visualization\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "# Write out a new image using that numpy data and the original affine transformation matrix\n", + "imgN = nib.Nifti1Image(t1samp, img.affine)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "nib.save(img, 'test3d.nii.gz')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Scratch Work" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "idx = np.where(np.logical_not(np.isnan(p1samp)))\n", + "\n", + "print np.shape(idx)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "idx = np.asarray(idx)\n", + "print np.shape(idx)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "idx[:,0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "i = 2323\n", + "\n", + "print p1samp[idx[:,i][0], idx[:,i][1], idx[:,i][2]]\n", + "print t1samp[idx[:,i][0], idx[:,i][1], idx[:,i][2]]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "df = np.shape(t_min_all)[3] - 1\n", + "scipy.stats.t.ppf(0.630136084224/2, df, loc=0, scale=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From a7eb16a67f03304ed4e965211b826642e741f001 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 19 Oct 2016 07:58:53 -0400 Subject: [PATCH 07/30] fix: group contrast vector truncation --- group_level/group_multregress_bids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 9453900..35195b3 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -104,7 +104,7 @@ def get_sub_vars(dataset_dir, task_name, model_id, sub_list_file, behav_file, gr if val not in behav_info.keys(): raise ValueError('Regressor %s not in behav.txt file' % val) contrast_name = row.split()[1] - contrast_vector = np.array(re.search("\]([\s\d]+)", row).group(1).split()).astype(float).tolist() + contrast_vector = np.array(re.search("\]([\s\d.-]+)", row).group(1).split()).astype(float).tolist() con = [tuple([contrast_name, 'T', regressor_names, contrast_vector])] contrasts.append(con) From 17b9dbcefdbbfdccdc522e711e1662e120be7068 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 19 Oct 2016 13:16:08 -0400 Subject: [PATCH 08/30] Fix: Remove model/task duplicate in data synch for roi --- subject_level/fmri_ants_bids.py | 1 + 1 file changed, 1 insertion(+) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index a0faa0c..f30fd70 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1084,6 +1084,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subs.append(('tsnr/model%03d/task%03d/' % (model_id, task_id), 'tsnr/')) subs.append(('model/model%03d/task%03d/' % (model_id, task_id), 'model/')) subs.append(('motion/model%03d/task%03d/' % (model_id, task_id), 'motion/')) + subs.append(('copes/roi/model%03d/task%03d/' % (model_id, task_id), 'copes/roi/')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) From d7bf4d9786aac75a72a208be4cd53c1bdf123ef6 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 19 Oct 2016 16:09:15 -0400 Subject: [PATCH 09/30] Enh: Sink registration.dat to qa/bbregister --- subject_level/fmri_ants_bids.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index f30fd70..baaae67 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1138,6 +1138,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): ('summary_file', 'copes.roi.@summary')])]) wf.connect(sampleaparc, 'summary_file', datasink, 'timeseries.aparc.@summary') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'timeseries.aparc') + wf.connect(registration, 'outputspec.out_reg_file', datasink, 'qa.bbregister') + wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), From 9d1d189ad7c8d96ab7a954698505357353fe778a Mon Sep 17 00:00:00 2001 From: gr21783 Date: Mon, 24 Oct 2016 07:35:05 -0400 Subject: [PATCH 10/30] Fix: default to all run processing if runs not specified --- subject_level/fmri_ants_bids.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index baaae67..2f26435 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1224,7 +1224,13 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): 'task%03d' % int(args.task)) derivatives = args.derivatives if derivatives is None: - derivatives = False + derivatives = False + + if args.run_id == 'None': + args.run_id = None + elif args.run_id is not None: + args.run_id = [int(i) for i in args.run_id.split(',')] + wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir), subject=args.subject, model_id=int(args.model), @@ -1237,7 +1243,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subjects_dir=args.subjects_dir, target=args.target_file, session_id=args.session_id, - run_id=[int(i) for i in args.run_id.split(',')]) + run_id=args.run_id) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 wf.base_dir = work_dir From 03a4b1007b14022524f65a7c125280cca21ee7a1 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Mon, 24 Oct 2016 13:06:50 -0400 Subject: [PATCH 11/30] Enh: remove extraneous input argument to multiregress internal func --- group_level/group_multregress_bids.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 35195b3..9b369b5 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -71,7 +71,7 @@ def get_taskname(base_dir, task_id): if 'task%03d'%(task_id) in info: return info[1] -def get_sub_vars(dataset_dir, task_name, model_id, sub_list_file, behav_file, group_contrast_file): +def get_sub_vars(task_name, model_id, sub_list_file, behav_file, group_contrast_file): import numpy as np import os import pandas as pd @@ -145,7 +145,7 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu for task in task_id: task_name = get_taskname(dataset_dir, task) cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir) - regressors_needed, contrasts, groups, subj_list = get_sub_vars(dataset_dir, task_name, model_id, + regressors_needed, contrasts, groups, subj_list = get_sub_vars(task_name, model_id, sub_list_file, behav_file, group_contrast_file) for idx, contrast in enumerate(contrasts): wk = Workflow(name='model_%03d_task_%03d_contrast_%s' % (model_id, task, contrast[0][0])) From 230ce790a6e15ff2755b3fe18267777b1695148c Mon Sep 17 00:00:00 2001 From: gr21783 Date: Fri, 28 Oct 2016 20:34:33 -0400 Subject: [PATCH 12/30] Enh: port K. Sitek ppi code. Untested. --- subject_level/fmri_ants_bids.py | 133 ++++++++++++++++++++++++++++---- 1 file changed, 116 insertions(+), 17 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 2f26435..63b1183 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -646,7 +646,8 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None, session_id=None, - run_id=None): + run_id=None, + ppi_flag=False): """Analyzes an open fmri dataset Parameters @@ -862,6 +863,76 @@ def get_contrasts(contrast_file, task_name, conds): name="modelspec") modelspec.inputs.input_units = 'secs' +############ + if ppi_flag: + print "setting up all the ppi code" + # Ported from K. Sitek mit openfmri, obidssparse branch + # subject_id sub-voice854 + def subtract_1(run_id): + run_id_0index = [] + [run_id_0index.append(run-1) for run in run_id] + return run_id_0index + sub_1 = pe.Node(niu.Function(input_names=['run_id'], + output_names=['run_id_0index'], + function=subtract_1), + name='sub_1') + wf.connect(subjinfo, 'run_id', sub_1, 'run_id') # but will it be 1 off? indexed by 0 + + datasource_timeseries = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', + 'task_id', 'model_id'], + outfields=['aparc_timeseries_file']), + name='datasource_timeseries') + datasource_timeseries.inputs.base_directory = '/om/project/voice/processedData/l1analysis/l1output_2016102214' + datasource_timeseries.inputs.template = '*' + datasource_timeseries.inputs.sort_filelist=True + datasource_timeseries.inputs.field_template = {'aparc_timeseries_file': ('model%02d/task%03d/%s/timeseries/' + 'aparc/_aparc_ts%d/aparc+aseg_warped_avgwf.txt')} + datasource_timeseries.inputs.template_args = {'aparc_timeseries_file': [['model_id', + 'task_id','subject_id','run_id']]} + wf.connect(infosource, 'subject_id', datasource_timeseries, 'subject_id') + wf.connect(infosource, 'model_id', datasource_timeseries, 'model_id') + wf.connect(infosource, 'task_id', datasource_timeseries, 'task_id') + wf.connect(sub_1, 'run_id_0index', datasource_timeseries, 'run_id') # but will it be 1 off? indexed by 0 + + def model_ppi_func(session_info,ppi_aparc_timeseries_file): + # Assumes that previous L1 run had three conditions (e.g., emo happy,sad,neutral) + # These get summed together to create the single task regressor + # The ppi_aparc_timeseries file contains the physiological regressors. Pick one for the seed region + # The column labels for the regions in the aparc+aseg_warped_avgwf.txt file are in the summary.stats file + print "model ppi function" + print session_info + + import numpy as np + from copy import copy + session_info_ppi = copy(session_info) + for idx,info in enumerate(session_info): + conds = np.zeros((len(info['regress'][0]['val']),3)) + for c in range(3): + conds[:,c]=np.array(info['regress'][c]['val']) + regress_task_raw = np.sum(conds,1) # 3 conditions in this task + regress_task = regress_task_raw/np.max(regress_task_raw) # rescaled to 0:1 + + ppi_aparc_timeseries = np.genfromtxt(ppi_aparc_timeseries_file[idx]) + ppi_timeseries = ppi_aparc_timeseries[:,14] # 14= right amygdala roi_list.index('ctx-lh-medialorbitofrontal') + regress_phys = ppi_timeseries + + regress_interact = regress_task * regress_phys + + session_info_ppi[idx]['regress'][0]['val'] = regress_task + session_info_ppi[idx]['regress'][1]['val'] = regress_phys + session_info_ppi[idx]['regress'][2]['val'] = regress_interact + + return session_info_ppi + + model_ppi = pe.Node(niu.Function(input_names=['session_info','ppi_aparc_timeseries_file'], + output_names=['session_info_ppi'], + function=model_ppi_func), + name='model_ppi') + wf.connect(datasource_timeseries, 'aparc_timeseries_file', model_ppi, 'ppi_aparc_timeseries_file') + else: + print "not setting up ppi code + +############ def check_behav_list(behav, run_id, conds): import six import numpy as np @@ -892,21 +963,46 @@ def check_behav_list(behav, run_id, conds): wf.connect(taskname, 'task_name', contrastgen, 'task_name') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') - wf.connect([(preproc, art, [('outputspec.motion_parameters', - 'realignment_parameters'), - ('outputspec.realigned_files', - 'realigned_files'), - ('outputspec.mask', 'mask_file')]), - (preproc, modelspec, [('outputspec.highpassed_files', - 'functional_runs'), - ('outputspec.motion_parameters', - 'realignment_parameters')]), - (art, modelspec, [('outlier_files', 'outlier_files')]), - (modelspec, modelfit, [('session_info', - 'inputspec.session_info')]), - (preproc, modelfit, [('outputspec.highpassed_files', - 'inputspec.functional_data')]) - ]) + if ppi_flag: + print "Connecting ppi blocks" + wf.connect([(preproc, art, [('outputspec.motion_parameters', + 'realignment_parameters'), + ('outputspec.realigned_files', + 'realigned_files'), + ('outputspec.mask', 'mask_file')]), + (preproc, modelspec, [('outputspec.highpassed_files', + 'functional_runs'), + ('outputspec.motion_parameters', + 'realignment_parameters')]), + (art, modelspec, [('outlier_files', 'outlier_files')]), + (modelspec, model_ppi, [('session_info', + 'session_info')]), + (model_ppi, modelfit, [('session_info_ppi', + 'inputspec.session_info')]), + (preproc, modelfit, [('outputspec.highpassed_files', + 'inputspec.functional_data')]) + ]) + + else: + print "Not connecting ppi nodes" + wf.connect([(preproc, art, [('outputspec.motion_parameters', + 'realignment_parameters'), + ('outputspec.realigned_files', + 'realigned_files'), + ('outputspec.mask', 'mask_file')]), + (preproc, modelspec, [('outputspec.highpassed_files', + 'functional_runs'), + ('outputspec.motion_parameters', + 'realignment_parameters')]), + (art, modelspec, [('outlier_files', 'outlier_files')]), + (modelspec, modelfit, [('session_info', + 'inputspec.session_info')]), + (preproc, modelfit, [('outputspec.highpassed_files', + 'inputspec.functional_data')]) + ]) + + + # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') @@ -1210,6 +1306,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): help="Run id list: 1,2 or 1 or 2") parser.add_argument("--crashdump_dir", dest="crashdump_dir", help="Crashdump dir", default=None) + parser.add_argument("--ppi_flag", dest="ppi_flag", + help="Perform ppi for voice project", default=False) args = parser.parse_args() outdir = args.outdir @@ -1243,7 +1341,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subjects_dir=args.subjects_dir, target=args.target_file, session_id=args.session_id, - run_id=args.run_id) + run_id=args.run_id, + ppi_flag=args.ppi_flag) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 wf.base_dir = work_dir From 2055e24c35852b7da74a10c5e1eb85758b812923 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Sat, 29 Oct 2016 09:15:25 -0400 Subject: [PATCH 13/30] Enh: add SpecifySparseModel option --- subject_level/fmri_ants_bids.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 2f26435..3d32818 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -605,6 +605,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None, ru with open(json_info, 'rt') as fp: data = json.load(fp) TR = data['RepetitionTime'] + delay = data['global']['const']['CsaSeries.MrPhoenixProtocol.lDelayTimeInTR']/1000000. + TA = TR - delay else: task_scan_key = os.path.join(base_dir, 'code', 'scan_key.txt') if os.path.exists(task_scan_key): @@ -619,7 +621,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None, ru print '==================================== process run list ======================' print run_list_process - return run_list_process, conds[0], TR + return run_list_process, conds[0], TR, TA """ @@ -646,7 +648,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None, session_id=None, - run_id=None): + run_id=None, sparse_flag=False): """Analyzes an open fmri dataset Parameters @@ -702,7 +704,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id', 'session_id', 'run_id'], - output_names=['run_id', 'conds', 'TR'], + output_names=['run_id', 'conds', 'TR', 'TA'], function=get_subjectinfo), name='subjectinfo') #subjinfo.inputs.base_dir = None ## update with argumentparse eventually @@ -857,9 +859,15 @@ def get_contrasts(contrast_file, task_name, conds): iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") + if sparse_flag: + modelspec = pe.Node(interface=model.SpecifySparseModel(), + name="modelspec") + modelspec.inputs.stimuli_as_impulses=False #GAC, added but not sure if this is relevant + modelspec.inputs.model_hrf=True # GAC added 2/8/2015 + else: + modelspec = pe.Node(interface=model.SpecifyModel(), + name="modelspec") - modelspec = pe.Node(interface=model.SpecifyModel(), - name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav, run_id, conds): @@ -878,6 +886,8 @@ def check_behav_list(behav, run_id, conds): name='reshape_behav') wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') + if sparse_flag: + wf.connect(subjinfo, 'TA', modelspec, 'time_acquisition') wf.connect(datasource, 'behav', reshape_behav, 'behav') wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id') wf.connect(subjinfo, 'conds', reshape_behav, 'conds') @@ -1208,6 +1218,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): help="Session id, ses-1") parser.add_argument("--run_id", dest="run_id", default=None, help="Run id list: 1,2 or 1 or 2") + parser.add_argument("--sparse_flag", dest="sparse_flag", default=False, + help="Default False, use nipype.algorithms.modelgen.SpecifyModel. If True, use SpecifySparseModel instead.") parser.add_argument("--crashdump_dir", dest="crashdump_dir", help="Crashdump dir", default=None) @@ -1243,7 +1255,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subjects_dir=args.subjects_dir, target=args.target_file, session_id=args.session_id, - run_id=args.run_id) + run_id=args.run_id, + sparse_flag=args.sparse_flag) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 wf.base_dir = work_dir From b7b359fa9fd2779a28bdfe145bca472855e164e6 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Sat, 29 Oct 2016 10:46:18 -0400 Subject: [PATCH 14/30] Enh: add sparse model check if using ppi --- subject_level/fmri_ants_bids.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 6e24499..5ba7c98 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -873,7 +873,7 @@ def get_contrasts(contrast_file, task_name, conds): modelspec.inputs.input_units = 'secs' ############ - if ppi_flag: + if ppi_flag and sparse_flag: print "setting up all the ppi code" # Ported from K. Sitek mit openfmri, obidssparse branch # subject_id sub-voice854 @@ -891,7 +891,7 @@ def subtract_1(run_id): 'task_id', 'model_id'], outfields=['aparc_timeseries_file']), name='datasource_timeseries') - datasource_timeseries.inputs.base_directory = '/om/project/voice/processedData/l1analysis/l1output_2016102214' + datasource_timeseries.inputs.base_directory = '/om/project/voice/processedData/l1analysis/l1output_2016102908' datasource_timeseries.inputs.template = '*' datasource_timeseries.inputs.sort_filelist=True datasource_timeseries.inputs.field_template = {'aparc_timeseries_file': ('model%02d/task%03d/%s/timeseries/' @@ -919,11 +919,11 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file): for c in range(3): conds[:,c]=np.array(info['regress'][c]['val']) regress_task_raw = np.sum(conds,1) # 3 conditions in this task - regress_task = regress_task_raw/np.max(regress_task_raw) # rescaled to 0:1 + regress_task = regress_task_raw/np.max(np.abs(regress_task_raw)) # rescaled to 0:1 ppi_aparc_timeseries = np.genfromtxt(ppi_aparc_timeseries_file[idx]) ppi_timeseries = ppi_aparc_timeseries[:,14] # 14= right amygdala roi_list.index('ctx-lh-medialorbitofrontal') - regress_phys = ppi_timeseries + regress_phys = ppi_timeseries/np.max(np.abs(ppi_timeseries)) regress_interact = regress_task * regress_phys @@ -938,6 +938,8 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file): function=model_ppi_func), name='model_ppi') wf.connect(datasource_timeseries, 'aparc_timeseries_file', model_ppi, 'ppi_aparc_timeseries_file') + elif ppi_flag and not sparse_flag: + print "PPI code requires session_info format as returned by SpecifySparseModel else: print "not setting up ppi code From 0846aba0689d22ea92a974ae23f85e47c7efe0e3 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Sun, 30 Oct 2016 09:00:45 -0400 Subject: [PATCH 15/30] Fix: correct sparse model and ppi flag arg parsing --- subject_level/fmri_ants_bids.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 5ba7c98..f802267 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -648,6 +648,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None, session_id=None, + run_id=None, ppi_flag=False, sparse_flag=False): @@ -862,6 +863,7 @@ def get_contrasts(contrast_file, task_name, conds): 'mask_file'], name="art") if sparse_flag: + print "Using sparse model" modelspec = pe.Node(interface=model.SpecifySparseModel(), name="modelspec") modelspec.inputs.stimuli_as_impulses=False #GAC, added but not sure if this is relevant @@ -939,9 +941,9 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file): name='model_ppi') wf.connect(datasource_timeseries, 'aparc_timeseries_file', model_ppi, 'ppi_aparc_timeseries_file') elif ppi_flag and not sparse_flag: - print "PPI code requires session_info format as returned by SpecifySparseModel + print "PPI code requires session_info format as returned by SpecifySparseModel" else: - print "not setting up ppi code + print "not setting up ppi code" ############ def check_behav_list(behav, run_id, conds): @@ -1245,8 +1247,9 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): ('summary_file', 'qa.tsnr.@summary')])]) wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'), ('summary_file', 'copes.roi.@summary')])]) - wf.connect(sampleaparc, 'summary_file', datasink, 'timeseries.aparc.@summary') - wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'timeseries.aparc') + if ppi_flag: + wf.connect(sampleaparc, 'summary_file', datasink, 'timeseries.aparc.@summary') + wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'timeseries.aparc') wf.connect(registration, 'outputspec.out_reg_file', datasink, 'qa.bbregister') wf.connect([(splitfunc, datasink, @@ -1343,6 +1346,18 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): args.run_id = None elif args.run_id is not None: args.run_id = [int(i) for i in args.run_id.split(',')] + + if args.sparse_flag: + if args.sparse_flag=='True': + sparse_flag = True + else: + sparse_flag = False + if args.ppi_flag: + if args.ppi_flag=='True': + ppi_flag = True + else: + ppi_flag = False + wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir), subject=args.subject, @@ -1357,8 +1372,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): target=args.target_file, session_id=args.session_id, run_id=args.run_id, - ppi_flag=args.ppi_flag, - sparse_flag=args.sparse_flag) + ppi_flag=ppi_flag, + sparse_flag=sparse_flag) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 From d9ee8a7a8ac4cc9fb9333d94e1b6df32ba8c2c34 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Wed, 2 Nov 2016 08:34:11 -0400 Subject: [PATCH 16/30] Fix: add ppi directory and roi flags. Fix timeseries writeout which had been accidentally removed. --- subject_level/fmri_ants_bids.py | 54 ++++++++++++++++++++++++--------- 1 file changed, 40 insertions(+), 14 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index f802267..b3d8789 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -649,8 +649,10 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, fwhm=6.0, subjects_dir=None, target=None, session_id=None, run_id=None, - ppi_flag=False, - sparse_flag=False): + sparse_flag=False, + ppi_flag=False, + ppi_roi=None, + ppi_base_directory=None): """Analyzes an open fmri dataset @@ -893,7 +895,7 @@ def subtract_1(run_id): 'task_id', 'model_id'], outfields=['aparc_timeseries_file']), name='datasource_timeseries') - datasource_timeseries.inputs.base_directory = '/om/project/voice/processedData/l1analysis/l1output_2016102908' + datasource_timeseries.inputs.base_directory = ppi_base_directory datasource_timeseries.inputs.template = '*' datasource_timeseries.inputs.sort_filelist=True datasource_timeseries.inputs.field_template = {'aparc_timeseries_file': ('model%02d/task%03d/%s/timeseries/' @@ -905,18 +907,33 @@ def subtract_1(run_id): wf.connect(infosource, 'task_id', datasource_timeseries, 'task_id') wf.connect(sub_1, 'run_id_0index', datasource_timeseries, 'run_id') # but will it be 1 off? indexed by 0 - def model_ppi_func(session_info,ppi_aparc_timeseries_file): + def model_ppi_func(session_info,ppi_aparc_timeseries_file, ppi_roi, ppi_base_directory): # Assumes that previous L1 run had three conditions (e.g., emo happy,sad,neutral) # These get summed together to create the single task regressor # The ppi_aparc_timeseries file contains the physiological regressors. Pick one for the seed region # The column labels for the regions in the aparc+aseg_warped_avgwf.txt file are in the summary.stats file + # ppi_roi (string) 'ctx-lh-medialorbitofrontal'. Seed region. + # ppi_base_directory (string) path to L1 analysis from which seed waveforms will be pulled + print "model ppi function" print session_info import numpy as np from copy import copy + import pandas as pd + import os + session_info_ppi = copy(session_info) for idx,info in enumerate(session_info): + + # Read in lookup table to map string roi to index in waveform table + # Assumes that the lookup table is the same for all the aparc timeseries + df = pd.read_csv(os.path.join(os.path.split(ppi_aparc_timeseries_file[idx])[0], + 'summary.stats'), skiprows=52, delim_whitespace=True) + df_clean = df.iloc[:,:-2] + df_clean.columns = df.columns[2:] # Fix headers + ppi_idx_roi = df_clean[df_clean['StructName']==ppi_roi].index[0] + conds = np.zeros((len(info['regress'][0]['val']),3)) for c in range(3): conds[:,c]=np.array(info['regress'][c]['val']) @@ -924,7 +941,7 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file): regress_task = regress_task_raw/np.max(np.abs(regress_task_raw)) # rescaled to 0:1 ppi_aparc_timeseries = np.genfromtxt(ppi_aparc_timeseries_file[idx]) - ppi_timeseries = ppi_aparc_timeseries[:,14] # 14= right amygdala roi_list.index('ctx-lh-medialorbitofrontal') + ppi_timeseries = ppi_aparc_timeseries[:,ppi_idx_roi] # 14= right amygdala roi_list.index('ctx-lh-medialorbitofrontal') regress_phys = ppi_timeseries/np.max(np.abs(ppi_timeseries)) regress_interact = regress_task * regress_phys @@ -935,15 +952,18 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file): return session_info_ppi - model_ppi = pe.Node(niu.Function(input_names=['session_info','ppi_aparc_timeseries_file'], - output_names=['session_info_ppi'], - function=model_ppi_func), - name='model_ppi') + model_ppi = pe.Node(niu.Function(input_names=['session_info','ppi_aparc_timeseries_file', + 'ppi_roi', 'ppi_base_directory'], + output_names=['session_info_ppi'], + function=model_ppi_func), + name='model_ppi') + model_ppi.inputs.ppi_roi = ppi_roi + model_ppi.inputs.ppi_base_directory = ppi_base_directory wf.connect(datasource_timeseries, 'aparc_timeseries_file', model_ppi, 'ppi_aparc_timeseries_file') elif ppi_flag and not sparse_flag: print "PPI code requires session_info format as returned by SpecifySparseModel" else: - print "not setting up ppi code" + print "Not setting up PPI code" ############ def check_behav_list(behav, run_id, conds): @@ -1247,9 +1267,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): ('summary_file', 'qa.tsnr.@summary')])]) wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'), ('summary_file', 'copes.roi.@summary')])]) - if ppi_flag: - wf.connect(sampleaparc, 'summary_file', datasink, 'timeseries.aparc.@summary') - wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'timeseries.aparc') + wf.connect(sampleaparc, 'summary_file', datasink, 'timeseries.aparc.@summary') + wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'timeseries.aparc') wf.connect(registration, 'outputspec.out_reg_file', datasink, 'qa.bbregister') wf.connect([(splitfunc, datasink, @@ -1326,6 +1345,11 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): help="Crashdump dir", default=None) parser.add_argument("--ppi_flag", dest="ppi_flag", help="Perform ppi for voice project", default=False) + parser.add_argument("--ppi_roi", dest="ppi_roi", + help="PPI roi seed region string from summary.txt\n Right-Thalamus-Proper", default=None) + parser.add_argument("--ppi_base_directory", dest="ppi_base_directory", + help="L1 analysis base directory from which seed waveform will be taken for PPI\n" \ + + "/om/project/voice/processedData/l1analysis/l1output_2016102908", default=None) args = parser.parse_args() outdir = args.outdir @@ -1372,8 +1396,10 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): target=args.target_file, session_id=args.session_id, run_id=args.run_id, + sparse_flag=sparse_flag, ppi_flag=ppi_flag, - sparse_flag=sparse_flag) + ppi_roi=args.ppi_roi, + ppi_base_directory=args.ppi_base_directory) #wf.config['execution']['remove_unnecessary_outputs'] = False wf.config['execution']['poll_sleep_duration'] = 2 From 26700bb8d73f5295510219c76083fc4d34648b9b Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Tue, 15 Nov 2016 08:51:20 -0500 Subject: [PATCH 17/30] Enh: sink realigned, smoothed, highpassed and just realigned files for dynamic causal modeling. --- subject_level/fmri_ants_bids.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index b3d8789..ca5a39a 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1280,6 +1280,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') + wf.connect(preproc, 'outputspec.highpassed_files', datasink, 'qa.highpass') + wf.connect(preproc, 'outputspec.realigned_files', datasink, 'qa.realigned') """ Set processing parameters From 3384d41b19488fad182566dcea0d86c4ea055cc3 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Tue, 15 Nov 2016 14:13:03 -0500 Subject: [PATCH 18/30] Fix: set modelfit inputspec.bases to None for sparse model. --- subject_level/fmri_ants_bids.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index ca5a39a..0856398 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1290,7 +1290,12 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff - modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} + if sparse_flag: + print("Setting modelfit.inputs.inputspec.bases to None.") + modelfit.inputs.inputspec.bases = {'none': None} + else: + print("Setting modelfit.inputs.inputspec.bases to dgamma.") + modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 From f0f2d6341be1fc2185fd546cad871fab6776d306 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Mon, 12 Dec 2016 07:13:58 -0500 Subject: [PATCH 19/30] Fix: update print statement as python 3.x format for nipype. Add explicit permutation test number to gp palm. Update TSNR import module source for latest nipype. --- group_level/group_multregress_bids.py | 10 +++++----- subject_level/fmri_ants_bids.py | 24 ++++++++++++------------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 9b369b5..690cf19 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -31,7 +31,7 @@ import os from nipype import config -config.enable_provenance() +#config.enable_provenance() from nipype import Workflow, Node, MapNode, Function from nipype import DataGrabber, DataSink @@ -127,7 +127,7 @@ def run_palm(cope_file, design_file, contrast_file, group_file, mask_file, from glob import glob from nipype.interfaces.base import CommandLine cmd = ("palm -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -eb {group_file} -T " - "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -logp -zstat") + "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -logp -zstat -n 10000") cl = CommandLine(cmd.format(cope_file=cope_file, mask_file=mask_file, design_file=design_file, contrast_file=contrast_file, group_file=group_file, cluster_threshold=cluster_threshold)) @@ -176,8 +176,8 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu wk.connect(info, 'model_id', dg, 'model_id') wk.connect(info, 'task_id', dg, 'task_id') - print '------------' - print dg + print('------------') + print(dg) model = Node(MultipleRegressDesign(), name='l2model') model.inputs.groups = groups @@ -343,7 +343,7 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu behav_file=args.behav_file, group_contrast_file=args.group_contrast_file) wf.config['execution']['poll_sleep_duration'] = args.sleep - + wf.config['execution']['job_finished_timeout'] = 5 if not (args.crashdump_dir is None): wf.config['execution']['crashdump_dir'] = args.crashdump_dir diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 0856398..a946874 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -13,7 +13,7 @@ """ from nipype import config -config.enable_provenance() +#config.enable_provenance() import six @@ -25,7 +25,7 @@ import nipype.algorithms.rapidart as ra import nipype.interfaces.fsl as fsl import nipype.interfaces.ants as ants -from nipype.algorithms.misc import TSNR +from nipype.algorithms.confounds import TSNR from nipype.interfaces.c3 import C3dAffineTool import nipype.interfaces.io as nio import nipype.interfaces.utility as niu @@ -619,8 +619,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id, session_id=None, ru else: run_list_process = run_id - print '==================================== process run list ======================' - print run_list_process + print('==================================== process run list ======================') + print(run_list_process) return run_list_process, conds[0], TR, TA @@ -865,7 +865,7 @@ def get_contrasts(contrast_file, task_name, conds): 'mask_file'], name="art") if sparse_flag: - print "Using sparse model" + print("Using sparse model") modelspec = pe.Node(interface=model.SpecifySparseModel(), name="modelspec") modelspec.inputs.stimuli_as_impulses=False #GAC, added but not sure if this is relevant @@ -878,7 +878,7 @@ def get_contrasts(contrast_file, task_name, conds): ############ if ppi_flag and sparse_flag: - print "setting up all the ppi code" + print("setting up all the ppi code") # Ported from K. Sitek mit openfmri, obidssparse branch # subject_id sub-voice854 def subtract_1(run_id): @@ -915,8 +915,8 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file, ppi_roi, ppi_base_dir # ppi_roi (string) 'ctx-lh-medialorbitofrontal'. Seed region. # ppi_base_directory (string) path to L1 analysis from which seed waveforms will be pulled - print "model ppi function" - print session_info + print("model ppi function") + print(session_info) import numpy as np from copy import copy @@ -961,9 +961,9 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file, ppi_roi, ppi_base_dir model_ppi.inputs.ppi_base_directory = ppi_base_directory wf.connect(datasource_timeseries, 'aparc_timeseries_file', model_ppi, 'ppi_aparc_timeseries_file') elif ppi_flag and not sparse_flag: - print "PPI code requires session_info format as returned by SpecifySparseModel" + print("PPI code requires session_info format as returned by SpecifySparseModel") else: - print "Not setting up PPI code" + print("Not setting up PPI code") ############ def check_behav_list(behav, run_id, conds): @@ -999,7 +999,7 @@ def check_behav_list(behav, run_id, conds): wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') if ppi_flag: - print "Connecting ppi blocks" + print("Connecting ppi blocks") wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', @@ -1019,7 +1019,7 @@ def check_behav_list(behav, run_id, conds): ]) else: - print "Not connecting ppi nodes" + print("Not connecting ppi nodes") wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', From 0e586888d2b96e265dfec36e001b8c07af4d69c2 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Sun, 15 Jan 2017 08:54:10 -0500 Subject: [PATCH 20/30] Enh: add datsink sub for realigned folder. --- subject_level/fmri_ants_bids.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index a946874..8203924 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1215,7 +1215,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): subs.append(('tsnr/model%03d/task%03d/' % (model_id, task_id), 'tsnr/')) subs.append(('model/model%03d/task%03d/' % (model_id, task_id), 'model/')) subs.append(('motion/model%03d/task%03d/' % (model_id, task_id), 'motion/')) - subs.append(('copes/roi/model%03d/task%03d/' % (model_id, task_id), 'copes/roi/')) + subs.append(('copes/roi/model%03d/task%03d/' % (model_id, task_id), 'copes/roi/')) + subs.append(('realigned/model%03d/task%03d/' % (model_id, task_id), 'realigned/')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) From 164b0efbf360c9af1b4e45f572953525929c7794 Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 15 Feb 2017 20:12:12 -0500 Subject: [PATCH 21/30] Fix: correct sparse model None option syntax. --- subject_level/fmri_ants_bids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 8203924..3958bee 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1293,7 +1293,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): modelspec.inputs.high_pass_filter_cutoff = hpcutoff if sparse_flag: print("Setting modelfit.inputs.inputspec.bases to None.") - modelfit.inputs.inputspec.bases = {'none': None} + modelfit.inputs.inputspec.bases = {'none': {'none': None}} else: print("Setting modelfit.inputs.inputspec.bases to dgamma.") modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} From 8188658f6f39c24c330cbfed9d5423dcbe24aadb Mon Sep 17 00:00:00 2001 From: gr21783 Date: Wed, 15 Feb 2017 20:14:01 -0500 Subject: [PATCH 22/30] Fix: cast float as int for reshape. --- subject_level/fmri_ants_bids.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 3958bee..c36a805 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -974,7 +974,7 @@ def check_behav_list(behav, run_id, conds): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] - return behav_array.reshape(num_elements/num_conds, num_conds).tolist() + return behav_array.reshape(int(num_elements/num_conds), num_conds).tolist() reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'], output_names=['behav'], @@ -1066,8 +1066,8 @@ def sort_copes(copes, varcopes, contrasts): n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() - outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist() - outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist() + outcopes = all_copes.reshape(int(len(all_copes)/num_copes), num_copes).T.tolist() + outvarcopes = all_varcopes.reshape(int(len(all_varcopes)/num_copes), num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes', From 4494665c2437d12ebe0c080e38123f7eef150b80 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Wed, 15 Feb 2017 20:16:46 -0500 Subject: [PATCH 23/30] Fix: handle int cast and sparse model none syntax. Duplicate relative to stand alone fixes. Also, a non functioning model for registration warping. --- subject_level/fmri_ants_bids.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 8203924..b79027f 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -969,12 +969,13 @@ def model_ppi_func(session_info,ppi_aparc_timeseries_file, ppi_roi, ppi_base_dir def check_behav_list(behav, run_id, conds): import six import numpy as np + num_conds = len(conds) if isinstance(behav, six.string_types): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] - return behav_array.reshape(num_elements/num_conds, num_conds).tolist() + return behav_array.reshape(int(num_elements/num_conds), num_conds).tolist() reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'], output_names=['behav'], @@ -1066,8 +1067,8 @@ def sort_copes(copes, varcopes, contrasts): n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() - outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist() - outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist() + outcopes = all_copes.reshape(int(len(all_copes)/num_copes), num_copes).T.tolist() + outvarcopes = all_varcopes.reshape(int(len(all_varcopes)/num_copes), num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes', @@ -1127,6 +1128,15 @@ def merge_files(copes, varcopes, zstats): ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') + + reg_func_flag = False + if reg_func_flag: + #Transform the realigned, hpf functionanl data from preproc into MNI152 space + registration_func=registration.clone(name='registration_func') + wf.connect(calc_median, 'median_file', registration_func, 'inputspec.mean_image') + wf.connect(infosource, 'subject_id', registration_func, 'inputspec.subject_id') + wf.connect(preproc, 'outputspec.highpassed_files', registration_func, + 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[0]] @@ -1283,7 +1293,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') wf.connect(preproc, 'outputspec.highpassed_files', datasink, 'qa.highpass') wf.connect(preproc, 'outputspec.realigned_files', datasink, 'qa.realigned') - + #wf.connect(registration_func, 'outputspec.transformed_files', datasink, 'xfm_func') + """ Set processing parameters """ @@ -1293,7 +1304,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): modelspec.inputs.high_pass_filter_cutoff = hpcutoff if sparse_flag: print("Setting modelfit.inputs.inputspec.bases to None.") - modelfit.inputs.inputspec.bases = {'none': None} + modelfit.inputs.inputspec.bases = {'none': {'none': None}} else: print("Setting modelfit.inputs.inputspec.bases to dgamma.") modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} @@ -1410,7 +1421,9 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): ppi_base_directory=args.ppi_base_directory) #wf.config['execution']['remove_unnecessary_outputs'] = False - wf.config['execution']['poll_sleep_duration'] = 2 + wf.config['execution']['poll_sleep_duration'] = 20 #args.sleep + wf.config['execution']['job_finished_timeout'] = 60 + wf.base_dir = work_dir if not (args.crashdump_dir is None): @@ -1419,8 +1432,9 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): if args.plugin_args: wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: - #wf.run('SLURM', plugin_args={'sbatch_args': '-p om_interactive -N1 -c1','max_jobs':40}) - wf.run(args.plugin) + print('-- no sbatch args --') + wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) + #wf.run(args.plugin) From 8bea62ed4d0841ab661934f9f22b9305d8754c15 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Sat, 18 Feb 2017 12:23:49 -0500 Subject: [PATCH 24/30] Enh: create L1 proc graph. --- subject_level/fmri_ants_bids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index b79027f..344cd96 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1436,6 +1436,6 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) #wf.run(args.plugin) - + wf.write_graph(graph2use='colored', format='svg', simple_form=True) From 4946b8a2c5ff1ffbc434d45309946c2693e04554 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Wed, 1 Mar 2017 16:30:40 -0500 Subject: [PATCH 25/30] WIP: update palm. --- group_level/group_multregress_bids.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 690cf19..246fbb8 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -127,7 +127,7 @@ def run_palm(cope_file, design_file, contrast_file, group_file, mask_file, from glob import glob from nipype.interfaces.base import CommandLine cmd = ("palm -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -eb {group_file} -T " - "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -logp -zstat -n 10000") + "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -zstat -n 1000 -logp -ee -ise -nouncorrected") cl = CommandLine(cmd.format(cope_file=cope_file, mask_file=mask_file, design_file=design_file, contrast_file=contrast_file, group_file=group_file, cluster_threshold=cluster_threshold)) @@ -342,12 +342,16 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu sub_list_file=args.sub_list_file, behav_file=args.behav_file, group_contrast_file=args.group_contrast_file) - wf.config['execution']['poll_sleep_duration'] = args.sleep - wf.config['execution']['job_finished_timeout'] = 5 + wf.config['execution']['poll_sleep_duration'] = 20 #args.sleep + wf.config['execution']['job_finished_timeout'] = 1000 if not (args.crashdump_dir is None): wf.config['execution']['crashdump_dir'] = args.crashdump_dir if args.plugin_args: + print('-unlimited jobs-') wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: - wf.run(args.plugin) + #wf.run(args.plugin) + print('--limiting jobs') + #wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) + wf.run(plugin='MultiProc', plugin_args={'n_procs' : 10}) From 93446942c10fdf9cbdc2512865f6f7e73d7fa4c7 Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Wed, 1 Mar 2017 16:31:11 -0500 Subject: [PATCH 26/30] WIP: try smooth by surface, not working. --- subject_level/fmri_ants_bids.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 344cd96..b92f77f 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -33,6 +33,8 @@ create_modelfit_workflow, create_fixed_effects_flow) +from nipype.workflows.fmri.fsl import create_featreg_preproc_smooth # GAC + from nipype import LooseVersion from nipype import Workflow, Node, MapNode from nipype.interfaces import (fsl, Function, ants, freesurfer) @@ -670,7 +672,9 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, Load nipype workflows """ - preproc = create_featreg_preproc(whichvol='first') + #preproc = create_featreg_preproc(whichvol='first') # GAC + preproc = create_featreg_preproc_smooth.create_featreg_preproc_smooth(whichvol='first') #GAC + modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: @@ -1129,9 +1133,10 @@ def merge_files(copes, varcopes, zstats): ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') + reg_func_flag = False if reg_func_flag: - #Transform the realigned, hpf functionanl data from preproc into MNI152 space + #Transform the realigned, hpf functional data from preproc into MNI152 space registration_func=registration.clone(name='registration_func') wf.connect(calc_median, 'median_file', registration_func, 'inputspec.mean_image') wf.connect(infosource, 'subject_id', registration_func, 'inputspec.subject_id') @@ -1299,7 +1304,17 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): Set processing parameters """ - preproc.inputs.inputspec.fwhm = fwhm + #preproc.inputs.inputspec.fwhm = fwhm #GAC + #preproc.inputs.inputspec.vol_fwhm = 6.1234 #GAC + + + #wf.connect(registration, 'outputspec.out_reg_file', preproc, 'inputspec.reg_file') #GAC, creates a nonDAG graph + #preproc.inputs.inputspec.reg_file= os.path.join('/om/scratch/Fri/gr21783/voice/20170214_smooth', + # '05cf281c050be3da4eecf3bc6e8aac1b/model457/task005/854/bids', + # 'registration/_model_id_457_subject_id_sub-voice854_task_id_5', + # 'bbregister/median_bbreg_sub-voice854.dat') + + gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff if sparse_flag: @@ -1429,13 +1444,15 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): if not (args.crashdump_dir is None): wf.config['execution']['crashdump_dir'] = args.crashdump_dir + wf.write_graph(graph2use='flat', format='svg', simple_form=True) + if args.plugin_args: wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: print('-- no sbatch args --') - wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) + wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1 --qos=gablab','max_jobs':10}) #wf.run(args.plugin) - wf.write_graph(graph2use='colored', format='svg', simple_form=True) + wf.write_graph(graph2use='flat', format='svg', simple_form=True) From e63ad8ab549b5ee668b61649228693046a3ba7dc Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Mon, 20 Mar 2017 13:11:21 -0400 Subject: [PATCH 27/30] Enh: remove nouncorrected option in palm. Need to do per Anderson email in order to get valid p values for fdr. --- group_level/group_multregress_bids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 246fbb8..340f114 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -127,7 +127,7 @@ def run_palm(cope_file, design_file, contrast_file, group_file, mask_file, from glob import glob from nipype.interfaces.base import CommandLine cmd = ("palm -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -eb {group_file} -T " - "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -zstat -n 1000 -logp -ee -ise -nouncorrected") + "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -zstat -n 10000 -logp -ee -ise") cl = CommandLine(cmd.format(cope_file=cope_file, mask_file=mask_file, design_file=design_file, contrast_file=contrast_file, group_file=group_file, cluster_threshold=cluster_threshold)) From c7c203d0d48612f5f302d9e50636660eb6a4567a Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Mon, 20 Mar 2017 13:19:59 -0400 Subject: [PATCH 28/30] Enh: revert by hand back to a pre smooth workflow that should work and write out correct graph picture. --- subject_level/fmri_ants_bids.py | 26 ++++---------------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index b92f77f..43fba92 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -33,8 +33,6 @@ create_modelfit_workflow, create_fixed_effects_flow) -from nipype.workflows.fmri.fsl import create_featreg_preproc_smooth # GAC - from nipype import LooseVersion from nipype import Workflow, Node, MapNode from nipype.interfaces import (fsl, Function, ants, freesurfer) @@ -672,9 +670,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, Load nipype workflows """ - #preproc = create_featreg_preproc(whichvol='first') # GAC - preproc = create_featreg_preproc_smooth.create_featreg_preproc_smooth(whichvol='first') #GAC - + preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: @@ -1133,10 +1129,9 @@ def merge_files(copes, varcopes, zstats): ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') - reg_func_flag = False if reg_func_flag: - #Transform the realigned, hpf functional data from preproc into MNI152 space + #Transform the realigned, hpf functionanl data from preproc into MNI152 space registration_func=registration.clone(name='registration_func') wf.connect(calc_median, 'median_file', registration_func, 'inputspec.mean_image') wf.connect(infosource, 'subject_id', registration_func, 'inputspec.subject_id') @@ -1304,17 +1299,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): Set processing parameters """ - #preproc.inputs.inputspec.fwhm = fwhm #GAC - #preproc.inputs.inputspec.vol_fwhm = 6.1234 #GAC - - - #wf.connect(registration, 'outputspec.out_reg_file', preproc, 'inputspec.reg_file') #GAC, creates a nonDAG graph - #preproc.inputs.inputspec.reg_file= os.path.join('/om/scratch/Fri/gr21783/voice/20170214_smooth', - # '05cf281c050be3da4eecf3bc6e8aac1b/model457/task005/854/bids', - # 'registration/_model_id_457_subject_id_sub-voice854_task_id_5', - # 'bbregister/median_bbreg_sub-voice854.dat') - - + preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff if sparse_flag: @@ -1444,15 +1429,12 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): if not (args.crashdump_dir is None): wf.config['execution']['crashdump_dir'] = args.crashdump_dir - wf.write_graph(graph2use='flat', format='svg', simple_form=True) - if args.plugin_args: wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: print('-- no sbatch args --') - wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1 --qos=gablab','max_jobs':10}) + wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) #wf.run(args.plugin) wf.write_graph(graph2use='flat', format='svg', simple_form=True) - From 6f51f52046e3cfa4f141bc464b8e251b460e1c1d Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Mon, 20 Mar 2017 19:13:25 -0400 Subject: [PATCH 29/30] Enh: run locally on a node. --- subject_level/fmri_ants_bids.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/subject_level/fmri_ants_bids.py b/subject_level/fmri_ants_bids.py index 43fba92..5ec7fac 100755 --- a/subject_level/fmri_ants_bids.py +++ b/subject_level/fmri_ants_bids.py @@ -1433,8 +1433,8 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: print('-- no sbatch args --') - wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) - #wf.run(args.plugin) + #wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) + wf.run(args.plugin) wf.write_graph(graph2use='flat', format='svg', simple_form=True) From 11c43dd2494a296ef258a56f405a322d13d6c99b Mon Sep 17 00:00:00 2001 From: Greg Ciccarelli Date: Wed, 29 Mar 2017 10:03:54 -0400 Subject: [PATCH 30/30] Enh: increase palm slurm timelimit. --- group_level/group_multregress_bids.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/group_level/group_multregress_bids.py b/group_level/group_multregress_bids.py index 340f114..879bfa1 100755 --- a/group_level/group_multregress_bids.py +++ b/group_level/group_multregress_bids.py @@ -212,7 +212,7 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu name='palm') palm.inputs.cluster_threshold = 3.09 palm.inputs.mask_file = mask_file - palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True} + palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G --time=23:00:00', 'overwrite': True} wk.connect(model, 'design_mat', palm, 'design_file') wk.connect(model, 'design_con', palm, 'contrast_file') wk.connect(mergecopes, 'merged_file', palm, 'cope_file') @@ -343,15 +343,17 @@ def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1outpu behav_file=args.behav_file, group_contrast_file=args.group_contrast_file) wf.config['execution']['poll_sleep_duration'] = 20 #args.sleep - wf.config['execution']['job_finished_timeout'] = 1000 + wf.config['execution']['job_finished_timeout'] = 300 if not (args.crashdump_dir is None): wf.config['execution']['crashdump_dir'] = args.crashdump_dir + wf.write_graph(graph2use='flat', format='svg', simple_form=True) + if args.plugin_args: print('-unlimited jobs-') wf.run(args.plugin, plugin_args=eval(args.plugin_args)) else: #wf.run(args.plugin) print('--limiting jobs') - #wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1','max_jobs':10}) - wf.run(plugin='MultiProc', plugin_args={'n_procs' : 10}) + wf.run('SLURM', plugin_args={'sbatch_args': '-N1 -c1 --time=23:00:00','max_jobs':150}) + #wf.run(plugin='MultiProc', plugin_args={'n_procs' : 10})