diff --git a/docs/source/Install.rst b/docs/source/Install.rst index f3fbe1b..fa44173 100644 --- a/docs/source/Install.rst +++ b/docs/source/Install.rst @@ -13,7 +13,6 @@ To get started with the pipeline, please install the following requirements: * Brain Connectivity Toolbox for Matlab (https://sites.google.com/site/bctnet/Home/functions/BCT.zip?attredirects=0) * xcpEngine dsn files (https://github.com/PennBBL/xcpEngine/tree/master/designs) * ASHS (https://sites.google.com/site/hipposubfields/) - * LAYNII (https://github.com/layerfMRI/LAYNII) * bidsphysio (https://github.com/cbinyu/bidsphysio) You will need to install git, to clone the build recipe files from this repository. @@ -31,17 +30,14 @@ This following commands can be used to build these required images for the pipel singularity build mriqc-0.16.1.sif docker://poldracklab/mriqc:0.16.1 singularity build heudiconv-0.9.sif docker://nipy/heudiconv:0.9.0 - singularity build fmriprep-v20.2.6.sif docker://nipreps/fmriprep:21.0.0 - singularity build xcpengine-1.2.3.sif docker://pennbbl/xcpengine:1.2.3 - singularity build qsiprep-v0.14.3.sif docker://pennbbl/qsiprep:0.14.3 - #for reorient_fslstd to prepare for SCFSL_GPU - singularity build qsiprep-v0.14.3.sif docker://pennbbl/qsiprep:0.15.1 + singularity build fmriprep-v21.0.1.sif docker://nipreps/fmriprep:21.0.1 + singularity build xcpengine-1.2.4.sif docker://pennbbl/xcpengine:1.2.4 + singularity build qsiprep-v0.15.1.sif docker://pennbbl/qsiprep:0.15.1 # See README.md for more information on # provided def files for ubuntu-jq, python3 - SINGULARITY_NOHTTPS=1 singularity build ubuntu-jq-0.1.sif defjq + SINGULARITY_NOHTTPS=1 singularity build ubuntu-jqjo.sif jqjo.def SINGULARITY_NOHTTPS=1 singularity build python3.sif defpy3 - SINGULARITY_NOHTTPS=1 singularity build laynii-2.0.0.sif layniidef SINGULARITY_NOHTTPS=1 singularity build ashs-1.0.0.sif ashsdef #Start Docker registry for localhost diff --git a/docs/source/Sub-Pipelines.rst b/docs/source/Sub-Pipelines.rst index 0d43d1d..e4012c1 100644 --- a/docs/source/Sub-Pipelines.rst +++ b/docs/source/Sub-Pipelines.rst @@ -110,11 +110,11 @@ Using the structural images and fieldmaps, we perform diffusion-weighted-image p .. code-block:: bash - docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives /data/bids/ --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives /data/bids/ --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} .. code-block:: bash - singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} *Reconstruction* @@ -123,7 +123,7 @@ Constrained Spherical Deconvolution-based multi-shell multi-tissue w/ SIFT2 via .. code-block:: bash #run reconstruction workflow in QSIPrep - docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt_ACT-hsvs --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #calculate network-based statistics and save NxN matrices from .net docker run -v ${scripts}/matlab:/work -v ${scripts}/2019_03_03_BCT:/bctoolbox -v ${projDir}/bids/derivatives/qsirecon:/data ${IMAGEDIR}/matlab-R2019a.sif /work/qsinbs.sh "$subject" "$sesname" @@ -131,7 +131,7 @@ Constrained Spherical Deconvolution-based multi-shell multi-tissue w/ SIFT2 via .. code-block:: bash #run reconstruction workflow in QSIPrep - singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt_ACT-hsvs --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #calculate network-based statistics and save NxN matrices from .net singularity run --cleanenv --bind ${scripts}/matlab:/work,${scripts}/2019_03_03_BCT:/bctoolbox,${projDir}/bids/derivatives/qsirecon:/data ${IMAGEDIR}/matlab-R2019a.sif /work/qsinbs.sh "$subject" "$sesname" @@ -141,7 +141,7 @@ Generalized q-Sampling imaging via DSI Studio .. code-block:: bash #run reconstruction workflow in QSIPrep - docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #get network-based statistics to a csv from .mat docker run -v ${scripts}:/scripts -v ${projDir}/bids/derivatives/qsirecon/${subject}/${sesname}/dwi:/datain -W /datain ${IMAGEDIR}/pylearn.sif /scripts/gqimetrics.py @@ -150,7 +150,7 @@ Generalized q-Sampling imaging via DSI Studio .. code-block:: bash #run reconstruction workflow in QSIPrep - singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #get network-based statistics to a csv from .mat singularity run --cleanenv --bind ${scripts}:/scripts,${projDir}/bids/derivatives/qsirecon/${subject}/${sesname}/dwi:/datain -W /datain ${IMAGEDIR}/pylearn.sif /scripts/gqimetrics.py @@ -160,7 +160,7 @@ NODDI via AMICO python implementation .. code-block:: bash #run reconstruction workflow in QSIPrep - docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #ROI-wise stats docker run -v ${scripts}:/scripts -v ${projDir}/bids/derivatives/qsirecon/${subject}/${sesname}/dwi:/datanoddi ${IMAGEDIR}/neurodoc.sif /scripts/noddi_stats.sh "$subject" "$sesname" @@ -168,7 +168,7 @@ NODDI via AMICO python implementation .. code-block:: bash #run reconstruction workflow in QSIPrep - singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} #ROI-wise stats singularity run --cleanenv --bind ${scripts}:/scripts,${projDir}/bids/derivatives/qsirecon/${subject}/${sesname}/dwi:/datanoddi ${IMAGEDIR}/neurodoc.sif /scripts/noddi_stats.sh "$subject" "$sesname" @@ -187,12 +187,12 @@ QSIPrep preprocessing reorient to FSL space: .. code-block:: bash #run reconstruction workflow in QSIPrep - docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + docker run -v ${IMAGEDIR}:/imgdir -v ${stmpdir}:/paulscratch -v ${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} .. code-block:: bash #run reconstruction workflow in QSIPrep - singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} + singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 1.6 -w /paulscratch participant --participant-label ${subject} CUDA 10.2-accelerated FDT pipeline diff --git a/docs/source/Usage.rst b/docs/source/Usage.rst index 4156ec4..360c611 100644 --- a/docs/source/Usage.rst +++ b/docs/source/Usage.rst @@ -44,6 +44,7 @@ Running FreeSurfer-informed FSL DTI tractography on GPU *Docker* .. code-block:: bash + # Running SCFSL GPU tractography docker exec --gpus all -e LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-10.2/lib64 \ -v /path/to/freesurfer/license.txt:/opt/freesurfer/license.txt \ @@ -52,6 +53,7 @@ Running FreeSurfer-informed FSL DTI tractography on GPU *Singularity* .. code-block:: bash + # Running SCFSL GPU tractography SINGULARITY_ENVLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-10.2/lib64 \ singularity exec --nv -B /path/to/freesurfer/license.txt:/opt/freesurfer/license.txt,/path/project/bids:/data \ diff --git a/docs/source/conf.py b/docs/source/conf.py index 7436f56..153dd95 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -21,7 +21,7 @@ author = 'Paul B Camacho, Evan D Anderson, Nishant Bhamidipati, Aaron T Anderson, Benjamin Zimmerman, Matthew S Moore, Ezra Paul Winter-Nelson, Maximillian K Egan, Brad P Sutton' # The full version, including alpha/beta/rc tags -release = '0.1.0' +release = '0.3.0' # -- General configuration --------------------------------------------------- @@ -53,4 +53,4 @@ # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -pygments_style = 'sphinx' \ No newline at end of file +pygments_style = 'sphinx' diff --git a/jsoncrawler.sh b/jsoncrawler.sh index a107e6b..0098468 100644 --- a/jsoncrawler.sh +++ b/jsoncrawler.sh @@ -1,7 +1,7 @@ #!/bin/bash # #single-sub single-session script for adding intended for statements to fmap jsons before running BIDS apps -# +# requires JQ #usage: jsoncrawler.sh {bids directory} {session} {subject} # #pbc @@ -10,7 +10,6 @@ sesname=$2 seslen=${#sesname} sub=$3 -intend='"IntendedFor": ' seshs=($1/${sub}/${sesname}) for sesh in ${seshs[@]}; do @@ -20,21 +19,15 @@ restfuncs=(./func/*rest*bold*nii.gz) for restfuncmap in ${restfuncmaps[@]}; do img=${restfuncs:1} - jq .IntendedFor $restfuncmap > tmpj + jq '.IntendedFor' $restfuncmap > tmpj if [[ $(cat tmpj) == "${sesname}${img}" ]]; then echo "IntendedFor found as "${sesname}$img"" exit 0 else - printf " " > test.txt - printf $intend >> test.txt - printf " " >> test.txt - printf '"' >> test.txt - printf "${sesname}""$img" >> test.txt - printf '",' >> test.txt - echo "" >> test.txt - echo "" > tmp - awk 'NR==1{a=$0}NR==FNR{next}FNR==32{print a}1' test.txt $restfuncmap >> tmp && mv tmp $restfuncmap + # replace the following with jq or jo + cat $restfuncmap | jq '.IntendedFor |= "${sesname}$img"' > tmp + mv tmp $restfuncmap fi done @@ -48,22 +41,8 @@ taskfuncs=`find ./func -maxdepth 2 -type f \( -iname "*nback*bold*nii.gz" -a -no task="*nback*" if [[ "$img" == "$task" ]]; then -# jq .IntendedFor $taskfuncmap > tmpj -# if [[ "`cat tmpj`" == "${sesname}$img" ]]; -# then -# echo "IntendedFor found as "${sesname}$img"" -# exit 0 -# else - printf " " > test.txt - printf $intend >> test.txt - printf " " >> test.txt - printf '"' >> test.txt - printf "${sesname}""$img" >> test.txt - printf '",' >> test.txt - echo "" >> test.txt - echo "" > tmp - awk 'NR==1{a=$0}NR==FNR{next}FNR==32{print a}1' test.txt $taskfuncmap >> tmp && mv tmp $taskfuncmap - + cat $taskfuncmap | jq '.IntendedFor |= "${sesname}$img"' > tmp + mv tmp $taskfuncmap fi done @@ -72,25 +51,16 @@ dwis=(./dwi/*dwi*nii.gz) for dwimap in ${dwimaps[@]}; do img=${dwis:1} - jq .IntendedFor $dwimap > tmpj + jq '.IntendedFor' $dwimap > tmpj if [[ $(cat tmpj) == "${sesname}$img" ]]; then echo "IntendedFor found as "${sesname}$img"" exit 0 else - printf " " > test.txt - printf $intend >> test.txt - printf " " >> test.txt - printf '"' >> test.txt - printf "${sesname}""$img" >> test.txt - printf '",' >> test.txt - echo "" >> test.txt - echo "" > tmp - awk 'NR==1{a=$0}NR==FNR{next}FNR==32{print a}1' test.txt $dwimap >> tmp && mv tmp $dwimap + cat $dwimap | jq '.IntendedFor |= "${sesname}$img"' > tmp + mv tmp $dwimap fi done done -rm -f test.txt -rm -f tmp rm -f tmpj diff --git a/project_doc.sh b/project_doc.sh index 53f80c9..63439dd 100644 --- a/project_doc.sh +++ b/project_doc.sh @@ -1,112 +1,63 @@ #!/bin/bash # -#script for generating a project xml file +#script for generating a project json file # -# usage project_doc.sh {project} {subject} {session} {process called (heudiconv, mriqc, fmriprep, xcpengine, mridti, scfsl)} {is this a new file? (yes/no)} +# usage project_doc.sh {project} {subject} {session} {process called (heudiconv, mriqc, fmriprep, xcpengine, mridti, scfsl)} {is this a new file? (yes/no)} {app version} # # to be called as part of a pipeline for documentation of processing tool calls -# -# maybe set up to run on batches with reading in the txt files from batchprocd.sh to the project xml project=$1 subject=$2 session=$3 proc=$4 newFile=$5 +version=$6 + +#pull in versions from sif names var in script? #Get date and time NOW=$(date +"%m-%d-%Y-%T") -xmlfile="${project}_${subject}_${session}.xml" +jsonfile="${project}_${subject}_${session}.json" if [ "${newFile}" == "yes" ]; then - echo '' > "$xmlfile" - echo '' >> "$xmlfile" - + jo log="pipeline processing" project="${project}" > $jsonfile fi #if statements for each process (heudiconv, mriqc, fmriprep, xcpengine, mridti, scfsl) if [ "${proc}" == "heudiconv" ]; then - echo '' >> "$xmlfile" - echo 'nipy/heudiconv' >> "$xmlfile" - echo 'HeuDiConv' >> "$xmlfile" - echo '0.9.0' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo ' Heuristic conversion from DICOMs to BIDS-compatible nii.gz images with JSON sidecars.' >> "$xmlfile" - echo 'Chris Rordens dcm2niix' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step1["process"] |= "HeuDiConv"' | jq '.step1["Docker"] |= "nipy/heudiconv:'${version}'"' | jq '.step1["date"] |= '"${NOW}"'' | jq '.step1["description"] |= "Heuristic-based conversion from DICOMs to BIDS-compatible nii.gz images with JSON sidecars with dcm2niix"' > tmp + mv tmp $jsonfile elif [ "${proc}" == "mriqc" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'poldracklab/mriqc' >> "$xmlfile" - echo 'MRIQC' >> "$xmlfile" - echo '0.16.1' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo ' MRIQC extracts no-reference IQMs (image quality metrics) from structural (T1w and T2w) and functional MRI (magnetic resonance imaging) data ' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step2["process"] |= "MRIQC"' | jq '.step2["Docker"] |= "poldracklab/mriqc:'${version}'"' | jq '.step2["date"] |= '"${NOW}"'' | jq '.step2["description"] |= "MRIQC extracts no-reference IQMs (image quality metrics) from structural (T1w and T2w) and functional MRI (magnetic resonance imaging) data"' > tmp + mv tmp "$jsonfile" elif [ "${proc}" == "fmriprep" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'niprep/fmriprep' >> "$xmlfile" - echo 'fMRIPrep' >> "$xmlfile" - echo '20.2.1 LTS' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo 'fMRIPrep is a functional magnetic resonance imaging (fMRI) data preprocessing pipeline that is designed to provide an easily accessible, state-of-the-art interface that is robust to variations in scan acquisition protocols and that requires minimal user input, while providing easily interpretable and comprehensive error and output reporting. It performs basic processing steps (coregistration, normalization, unwarping, noise component extraction, segmentation, skullstripping etc.) providing outputs that can be easily submitted to a variety of group level analyses, including task-based or resting-state fMRI, graph theory measures, surface or volume-based statistics, etc.' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step3["process"] |= "fMRIPrep"' | jq '.step3["Docker"] |= "nipreps/fmriprep:'${version}'"' | jq '.step3["date"] |= '"${NOW}"'' | jq '.step3["description"] |= "fMRIPrep is a functional magnetic resonance imaging (fMRI) data preprocessing pipeline that is designed to provide an easily accessible, state-of-the-art interface that is robust to variations in scan acquisition protocols and that requires minimal user input, while providing easily interpretable and comprehensive error and output reporting. It performs basic processing steps (coregistration, normalization, unwarping, noise component extraction, segmentation, skullstripping etc.) providing outputs that can be easily submitted to a variety of group level analyses, including task-based or resting-state fMRI, graph theory measures, surface or volume-based statistics, etc."' > tmp + mv tmp "$jsonfile" elif [ "${proc}" == "xcpengine" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'PennBBL/xcpEngine' >> "$xmlfile" - echo 'xcpEngine fc-36p, fc-36p_scrub, fc-aroma' >> "$xmlfile" - echo '1.2.3' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo ' The XCP imaging pipeline (XCP system) is a free, open-source software package for processing of multimodal neuroimages. The XCP system uses a modular design to deploy analytic routines from leading MRI analysis platforms, including FSL, AFNI, and ANTs.' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step4["process"] |= "xcpEngine"' | jq '.step4["Docker"] |= "pennbbl/xcpengine:'${version}'"' | jq '.step4["date"] |= '"${NOW}"'' | jq '.step4["description"] |= "The XCP imaging pipeline (XCP system) is a free, open-source software package for processing of multimodal neuroimages. The XCP system uses a modular design to deploy analytic routines from leading MRI analysis platforms, including FSL, AFNI, and ANTs"' > tmp + mv tmp "$jsonfile" elif [ "${proc}" == "mridti" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'mrfil/mridti' >> "$xmlfile" - echo 'MRI DTI with FSL' >> "$xmlfile" - echo '1.0.0' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo 'Diffusion tensor processing pipeline with FSL bedpostx, probtrackx2.0' >> "$xmlfile" - echo '' >> "$xmlfile" -elif [ "${proc}" == "scfsl" ]; -then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'mrfil/scfsl' >> "$xmlfile" - echo 'Structural Connectivity with FSL ' >> "$xmlfile" - echo '1.0.0' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo ' FSL-based structural connectivity analysis with fibre-tracking between 68 cortical regions, 14 subcortical regions, and the left and right cerebellar cortices defined by the Freesurfer recon-all parcellation using the Desikan-Killiany Atlas' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step5["process"] |= "MRIDTI FSL"' | jq '.step5["Docker"] |= "mrfilbi/neurodoc:'${version}'"' | jq '.step5["date"] |= '"${NOW}"'' | jq '.step5["description"] |= "Diffusion tensor processing pipeline with FSL "' > tmp + mv tmp "$jsonfile" elif [ "${proc}" == "qsiprep" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'pennbbl/qsiprep' >> "$xmlfile" - echo 'Diffusion Preprocessing with QSIprep' >> "$xmlfile" - echo '0.12.2' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo 'The preprocessing pipelines are built based on the available BIDS inputs, ensuring that fieldmaps are handled correctly. The preprocessing workflow performs head motion correction, susceptibility distortion correction, MP-PCA denoising, coregistration to T1w images, spatial normalization using ANTs_ and tissue segmentation.' >> "$xmlfile" - echo '' >> "$xmlfile" + #QSIPrep is preferred over mridti due to more complete preprocessing + cat $jsonfile | jq '.step5["process"] |= "QSIPrep preprocessing"' | jq '.step5["Docker"] |= "pennbbl/qsiprep:'${version}'"' | jq '.step5["date"] |= '"${NOW}"'' | jq '.step5["description"] |= "The preprocessing pipelines are built based on the available BIDS inputs, ensuring that fieldmaps are handled correctly. The preprocessing workflow performs head motion correction, susceptibility distortion correction, MP-PCA denoising, coregistration to T1w images, spatial normalization using ANTs_ and tissue segmentation. This requires step 6 to include the reorient_fslstd reconstruction method to use outputs in FSL space!"' > tmp + mv tmp "$jsonfile" elif [ "${proc}" == "qsirecon" ]; then - head -n -1 "$xmlfile" > temp.txt ; mv temp.txt "$xmlfile" - echo '' >> "$xmlfile" - echo 'pennbbl/qsiprep' >> "$xmlfile" - echo 'Tractography with Anatomical Constrains using QSIprep MRtrix implementation ' >> "$xmlfile" - echo '0.13.0RC2' >> "$xmlfile" - echo "${NOW}" >> "$xmlfile" - echo ' This workflow uses the msmt_csd algorithm [Jeurissen2014] to estimate FODs for white matter, gray matter and cerebrospinal fluid using multi-shell acquisitions. The white matter FODs are used for tractography and the T1w segmentation is used for anatomical constraints [Smith2012]. ' >> "$xmlfile" - echo '' >> "$xmlfile" + cat $jsonfile | jq '.step6["process"] |= "QSIPrep reconstruction"' | jq '.step6["Docker"] |= "pennbbl/qsiprep:'${version}'"' | jq '.step6["date"] |= '"${NOW}"'' | jq '.step6["description"] |= "QSIPrep reconstructions performed include: mrtrix_multishell_msmt_ACT-hsvs, dsi_studio_gqi, amico_noddi, and reorient_fslstd. More details at https://qsiprep.readthedocs.io/en/latest/reconstruction.html#"' > tmp + mv tmp "$jsonfile" +elif [ "${proc}" == "scfsl" ]; +then + cat $jsonfile | jq '.step7["process"] |= "SCFSL GPU"' | jq '.step7["Docker"] |= "mrfilbi/scfsl_gpu:'${version}'"' | jq '.step7["date"] |= '"${NOW}"'' | jq '.step7["description"] |= "CUDA-accelerated FSL-based structural connectivity analysis with fibre-tracking between 68 cortical regions, 14 subcortical regions, and the left and right cerebellar cortices defined by the Freesurfer recon-all parcellation using the Desikan-Killiany Atlas"' > tmp + mv tmp "$jsonfile" fi -echo "" >> "$xmlfile" diff --git a/setup/jqjo.def b/setup/jqjo.def new file mode 100644 index 0000000..db7617a --- /dev/null +++ b/setup/jqjo.def @@ -0,0 +1,6 @@ +Bootstrap: docker +From: ubuntu:18.04 + +%post + # Update Ubuntu Software repository + apt update && apt-get update -y && apt-get install -y jq jo nano diff --git a/setup/singularity_image_gen.sh b/setup/singularity_image_gen.sh index d10faee..a8f8aff 100644 --- a/setup/singularity_image_gen.sh +++ b/setup/singularity_image_gen.sh @@ -13,17 +13,16 @@ cd ./singularity_images singularity build mriqc-0.16.1.sif docker://poldracklab/mriqc:0.16.1 singularity build heudiconv-0.9.0.sif docker://nipy/heudiconv:0.9.0 -singularity build fmriprep-20.2.6.sif docker://nipreps/fmriprep:20.2.6 -singularity build xcpengine-1.2.3.sif docker://pennbbl/xcpengine:1.2.3 -singularity build qsiprep-v0.14.3.sif docker://pennbbl/qsiprep:0.14.3 +singularity build fmriprep-21.0.1.sif docker://nipreps/fmriprep:21.0.1 +singularity build xcpengine-1.2.4.sif docker://pennbbl/xcpengine:1.2.4 singularity build bidsphysio.sif docker://cbinyu/bidsphysio #for reorient_fslstd to prepare for SCFSL_GPU -singularity build qsiprep-v0.14.3.sif docker://pennbbl/qsiprep:0.15.1 +singularity build qsiprep-v0.15.1.sif docker://pennbbl/qsiprep:0.15.1 # See README.md for more information on #provide def files for ubuntu-jq, python3 -sudo SINGULARITY_NOHTTPS=1 singularity build ubuntu-jq-0.1.sif defjq +sudo SINGULARITY_NOHTTPS=1 singularity build ubuntu-jqjo.sif jqjo.def sudo SINGULARITY_NOHTTPS=1 singularity build python3.sif defpy3 sudo SINGULARITY_NOHTTPS=1 singularity build bidscoin.sif bidscoindef sudo SINGULARITY_NOHTTPS=1 singularity build laynii-2.0.0.sif layniidef diff --git a/slurm_proc_latest.sh b/slurm_proc_latest.sh index a80ad82..f62893e 100644 --- a/slurm_proc_latest.sh +++ b/slurm_proc_latest.sh @@ -3,9 +3,9 @@ while getopts :p:s:z:m:f:l:b:t:e: option; do case ${option} in - p) export CLEANPROJECT=$OPTARG ;; - s) export CLEANSESSION=$OPTARG ;; - z) export CLEANSUBJECT=$OPTARG ;; + p) export CLEANPROJECT=$OPTARG ;; + s) export CLEANSESSION=$OPTARG ;; + z) export CLEANSUBJECT=$OPTARG ;; m) export MINQC=$OPTARG ;; f) export fieldmaps=$OPTARG ;; l) export longitudinal=$OPTARG ;; @@ -129,7 +129,7 @@ else #heudiconv echo "Running heudiconv" - ${scripts}/project_doc.sh ${project} ${subject} ${sesname} "heudiconv" "yes" + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "heudiconv" "yes" "0.9.0" ses=${sesname:4} sub=${subject:4} SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --cleanenv --bind ${projDir}:/datain ${IMAGEDIR}/heudiconv-0.9.0.sif heudiconv -d /datain/{subject}/{session}/dir/SCANS/*/DICOM/*dcm -f /datain/${project}_heuristic.py -o /datain/bids -s ${sub} -ss ${ses} -c dcm2niix -b --overwrite --minmeta @@ -144,7 +144,7 @@ else #jsoncrawler.sh runs once, for all sessions if [ "${fieldmaps}" == "yes" ]; then - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jq-0.1.sif /scripts/jsoncrawler.sh /data/bids ${sesname} ${subject} + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/jsoncrawler.sh /data/bids ${sesname} ${subject} fi rm ${projDir}/bids/derivatives/${subject}/${sesname}/tmp @@ -172,12 +172,12 @@ else cd $projDir echo "Running mriqc" - ${scripts}/project_doc.sh ${project} ${subject} ${sesname} "mriqc" "no" + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "mriqc" "no" "0.16.1" NOW=$(date +"%m-%d-%Y-%T") echo "MRIQC started $NOW" >> ${scripts}/fulltimer.txt TEMPLATEFLOW_HOST_HOME=$IMAGEDIR/templateflow export SINGULARITYENV_TEMPLATEFLOW_HOME="/templateflow" - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},${projDir}/bids:/data,${projDir}/bids/derivatives/mriqc:/out $IMAGEDIR/mriqc-0.16.1.sif /data /out participant --participant-label ${sub} --session-id ${ses} -v --fft-spikes-detector --despike --no-sub + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},${projDir}/bids:/data,${projDir}/bids/derivatives/mriqc:/out $IMAGEDIR/mriqc-0.16.1.sif /data /out participant --participant-label ${sub} --session-id ${ses} -v --no-sub chmod 2777 -R ${projDir}/bids/derivatives/mriqc NOW=$(date +"%m-%d-%Y-%T") @@ -189,7 +189,6 @@ else #rm -rf ${CACHESING}/* mkdir ${dataqc}/${project} -# cp -R ${projDir}/bids/derivatives/mriqc ${dataqc}/${project}/ NOW=$(date +"%m-%d-%Y-%T") echo "fMRIPrep started $NOW" >> ${scripts}/fulltimer.txt @@ -198,13 +197,13 @@ else echo "Running fmriprep on $subject $sesname" #add more details of fMRIPrep arguments if necessary - ${scripts}/project_doc.sh ${project} ${subject} ${sesname} "fmriprep" "no" + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "fmriprep" "no" "21.0.1" if [ "${longitudinal}" == "yes" ]; then - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --cleanenv --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},$IMAGEDIR/license.txt:/opt/freesurfer/license.txt,$TMPSING:/paulscratch,${projDir}:/datain $IMAGEDIR/fmriprep-20.2.6.sif fmriprep /datain/bids /datain/bids/derivatives participant --participant-label ${subject} --longitudinal --output-spaces {MNI152NLin2009cAsym,T1w,fsnative} --use-aroma -w /paulscratch --fs-license-file /opt/freesurfer/license.txt + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --cleanenv --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},$IMAGEDIR/license.txt:/opt/freesurfer/license.txt,$TMPSING:/paulscratch,${projDir}:/datain $IMAGEDIR/fmriprep-21.0.1.sif fmriprep /datain/bids /datain/bids/derivatives participant --participant-label ${subject} --longitudinal --output-spaces {MNI152NLin2009cAsym,T1w,fsnative} --use-aroma -w /paulscratch --fs-license-file /opt/freesurfer/license.txt elif [ "${longitudinal}" == "no" ]; then - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --cleanenv --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},$IMAGEDIR/license.txt:/opt/freesurfer/license.txt,$TMPSING:/paulscratch,${projDir}:/datain $IMAGEDIR/fmriprep-20.2.6.sif fmriprep /datain/bids /datain/bids/derivatives participant --participant-label ${subject} --output-spaces {MNI152NLin2009cAsym,T1w,fsnative} --use-aroma -w /paulscratch --fs-license-file /opt/freesurfer/license.txt + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --cleanenv --bind ${TEMPLATEFLOW_HOST_HOME}:${SINGULARITYENV_TEMPLATEFLOW_HOME},$IMAGEDIR/license.txt:/opt/freesurfer/license.txt,$TMPSING:/paulscratch,${projDir}:/datain $IMAGEDIR/fmriprep-21.0.1.sif fmriprep /datain/bids /datain/bids/derivatives participant --participant-label ${subject} --output-spaces {MNI152NLin2009cAsym,T1w,fsnative} --use-aroma -w /paulscratch --fs-license-file /opt/freesurfer/license.txt fi @@ -241,10 +240,10 @@ else #xcpEngine 36p - $scripts/project_doc.sh ${project} ${subject} ${sesname} "xcpengine" "no" + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "xcpengine" "no" "1.2.4" cp ${scripts}/xcpEngineDesigns/*_gh.dsn ${projDir}/ cd ${projDir} - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/fc-36p_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_minimal_func -r /data/bids -i /tmpdir + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.4.sif -d /data/fc-36p_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_minimal_func -r /data/bids -i /tmpdir chmod 2777 -R ${projDir}/bids/derivatives/xcp* mv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_func/${subject}/*quality.csv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_func/${subject}/${subject}_${sesname}_quality_fc36p.csv ${scripts}/procd.sh ${project} xcp no ${subject} ${based} @@ -268,9 +267,8 @@ else echo "xcpEngine fc-36p despike started $NOW" >> ${scripts}/fulltimer.txt #xcpEngine 36p despike - $scripts/project_doc.sh ${project} ${subject} ${sesname} "xcpengine" "no" cd ${projDir} - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/fc-36p_despike_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_despike -r /data/bids -i /tmpdir + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.4.sif -d /data/fc-36p_despike_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_despike -r /data/bids -i /tmpdir chmod 2777 -R ${projDir}/bids/derivatives/xcp* mv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_despike/${subject}/*quality.csv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_func/${subject}/${subject}_${sesname}_quality_despike.csv @@ -290,7 +288,7 @@ else #xcpEngine 36p_scrub cd ${projDir} - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/fc-36p_scrub_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_scrub -r /data/bids -i /tmpdir + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.4.sif -d /data/fc-36p_scrub_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_scrub -r /data/bids -i /tmpdir chmod 2777 -R /projects/BICpipeline/Pipeline_Pilot/TestingFork/${project}/bids/derivatives/xcp* mv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_scrub/${subject}/*quality.csv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_func/${subject}/${subject}_${sesname}_quality_scrub.csv @@ -310,9 +308,8 @@ else echo "xcpEngine fc-aroma started $NOW" >> ${scripts}/fulltimer.txt #xcpEngine aroma - $scripts/project_doc.sh ${project} ${subject} ${sesname} "xcpengine" "no" cd ${projDir} - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/fc-aroma_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_minimal_aroma -r /data/bids -i /tmpdir + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.4.sif -d /data/fc-aroma_gh.dsn -c /data/cohort_func_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_minimal_aroma -r /data/bids -i /tmpdir chmod 2777 -R ${projDir}/bids/derivatives/xcp* mv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_aroma/${subject}/*quality.csv ${projDir}/bids/derivatives/xcp/${sesname}/xcp_minimal_aroma/${subject}/${subject}_${sesname}_quality_aroma.csv @@ -329,28 +326,24 @@ else #rm -rf ${CACHESING}/* #rm -rf ${TMPSING}/* - - #generate xcpEngine cohorts for a new subject - ${scripts}/func_task_cohort_maker.sh ${subject} ${sesname} yes nback $based ${project} beta - - - #xcpEngine 36p -# $scripts/project_doc.sh ${project} ${subject} ${sesname} "xcpengine" "no" - cp ${scripts}/xcpEngineDesigns/task.dsn ${projDir}/ - cp -R ${scripts}/task.feat ${projDir}/bids/derivatives/ - num=$(echo "$subject" | cut -d- -f2) - #sed -i 's+old-text+new-text+g' input.txt - cd ${projDir}/bids/derivatives/task.feat/ - sed -i 's+/software/fsl-5.0.10-x86_64+/opt/fsl-5.0.10+g' design.fsf - sed -i 's+/shared/mrfil-data/pcamach2/SAY/bids/derivatives/fmriprep/sub-SAY244/ses-A/func/nback1onset.txt+/data/bids/derivatives/task.feat/custom_timing_files/ev1.txt+g' design.fsf - sed -i 's+/shared/mrfil-data/pcamach2/SAY/bids/derivatives/fmriprep/sub-SAY244/ses-A/func/nback2onset.txt+/data/bids/derivatives/task.feat/custom_timing_files/ev2.txt+g' design.fsf - sed -i 's+/shared/mrfil-data/pcamach2/SAY+/data+g' design.fsf - sed -i "s+SAY244+${num}+g" design.fsf - cd ${projDir} - mkdir ${projDir}/bids/derivatives/xcp/${sesname}/xcp_36p_nback - SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/task.dsn -c /data/cohort_func_task-nback_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_36p_nback -r /data/bids -i /tmpdir - chmod 2777 -R ${projDir}/bids/derivatives/xcp* + #generate xcpEngine cohorts for a new subject + ${scripts}/func_task_cohort_maker.sh ${subject} ${sesname} yes nback $based ${project} beta + #xcpEngine 36p + cp ${scripts}/xcpEngineDesigns/task.dsn ${projDir}/ + cp -R ${scripts}/task.feat ${projDir}/bids/derivatives/ + num=$(echo "$subject" | cut -d- -f2) + #sed -i 's+old-text+new-text+g' input.txt + cd ${projDir}/bids/derivatives/task.feat/ + sed -i 's+/software/fsl-5.0.10-x86_64+/opt/fsl-5.0.10+g' design.fsf + sed -i 's+/shared/mrfil-data/pcamach2/SAY/bids/derivatives/fmriprep/sub-SAY244/ses-A/func/nback1onset.txt+/data/bids/derivatives/task.feat/custom_timing_files/ev1.txt+g' design.fsf + sed -i 's+/shared/mrfil-data/pcamach2/SAY/bids/derivatives/fmriprep/sub-SAY244/ses-A/func/nback2onset.txt+/data/bids/derivatives/task.feat/custom_timing_files/ev2.txt+g' design.fsf + sed -i 's+/shared/mrfil-data/pcamach2/SAY+/data+g' design.fsf + sed -i "s+SAY244+${num}+g" design.fsf + cd ${projDir} + mkdir ${projDir}/bids/derivatives/xcp/${sesname}/xcp_36p_nback + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv -B ${projDir}:/data,$TMPSING:/tmpdir $IMAGEDIR/xcpengine-1.2.3.sif -d /data/task.dsn -c /data/cohort_func_task-nback_${subject}_${sesname}.csv -o /data/bids/derivatives/xcp/${sesname}/xcp_36p_nback -r /data/bids -i /tmpdir + chmod 2777 -R ${projDir}/bids/derivatives/xcp* if [ -d "${projDir}/bids/${subject}/${sesname}/dwi" ]; then @@ -369,49 +362,52 @@ else mkdir ${stmpdir} chmod 777 -R ${stmpdir} chmod 777 -R ${scachedir} + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "qsiprep" "no" "0.15.1" NOW=$(date +"%m-%d-%Y-%T") echo "QSIprep started $NOW" >> ${scripts}/fulltimer.txt - SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.2.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + + SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${IMAGEDIR}/license.txt:/opt/freesurfer/license.txt,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} chmod 777 -R ${projDir}/bids/derivatives/qsiprep ${scripts}/pdf_printer.sh ${project} ${subject} ${sesname} QSIprep ${based} NOW=$(date +"%m-%d-%Y-%T") echo "QSIprep finished $NOW" >> ${scripts}/fulltimer.txt + + SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity exec --bind ${projDir}:/data,${scripts}:/scripts ${IMAGEDIR}/ubuntu-jqjo.sif /scripts/project_doc.sh ${project} ${subject} ${sesname} "qsirecon" "no" "0.15.1" NOW=$(date +"%m-%d-%Y-%T") echo "QSIprep CSD Recon started $NOW" >> ${scripts}/fulltimer.txt - SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.2.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + + SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${IMAGEDIR}/license.txt:/opt/freesurfer/license.txt,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec mrtrix_multishell_msmt_ACT-hsvs --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + NOW=$(date +"%m-%d-%Y-%T") echo "QSIprep CSD Recon finished $NOW" >> ${scripts}/fulltimer.txt chmod 777 -R ${projDir}/bids/derivatives/qsirecon SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${scripts}/matlab:/work,${scripts}/2019_03_03_BCT:/bctoolbox,${projDir}/bids/derivatives/qsirecon:/data ${IMAGEDIR}/matlab-R2019a.sif /work/qsinbs.sh "$subject" "$sesname" - ${scripts}/pdf_printer.sh ${projID} ${subject} ${sesname} QSIprepRecon ${based} mv ${projDir}/bids/derivatives/qsirecon/${subject}* ${projDir}/bids/derivatives/qsicsd - - NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep GQI Recon started $NOW" >> ${scripts}/fulltimer.txt - - SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} - NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep GQI Recon finished $NOW" >> ${scripts}/fulltimer.txt - chmod 777 -R ${projDir}/bids/derivatives/qsirecon + + NOW=$(date +"%m-%d-%Y-%T") + echo "QSIprep GQI Recon started $NOW" >> ${scripts}/fulltimer.txt + SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${IMAGEDIR}/license.txt:/opt/freesurfer/license.txt,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec dsi_studio_gqi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + NOW=$(date +"%m-%d-%Y-%T") + echo "QSIprep GQI Recon finished $NOW" >> ${scripts}/fulltimer.txt + chmod 777 -R ${projDir}/bids/derivatives/qsirecon - ${scripts}/pdf_printer.sh ${projID} ${subject} ${sesname} QSIprepRecon ${based} + ${scripts}/pdf_printer.sh ${projID} ${subject} ${sesname} QSIprepRecon ${based} mv ${projDir}/bids/derivatives/qsirecon/${subject}* ${projDir}/bids/derivatives/qsigqi - NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep NODDI AMICO Recon started $NOW" >> ${scripts}/fulltimer.txt - SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.14.3.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} - NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep NODDI Recon finished $NOW" >> ${scripts}/fulltimer.txt - chmod 777 -R ${projDir}/bids/derivatives/qsirecon + NOW=$(date +"%m-%d-%Y-%T") + echo "QSIprep NODDI AMICO Recon started $NOW" >> ${scripts}/fulltimer.txt + SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${IMAGEDIR}/license.txt:/opt/freesurfer/license.txt,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec amico_noddi --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --fs-license-file /opt/freesurfer/license.txt --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + NOW=$(date +"%m-%d-%Y-%T") + echo "QSIprep NODDI Recon finished $NOW" >> ${scripts}/fulltimer.txt + chmod 777 -R ${projDir}/bids/derivatives/qsirecon - - ${scripts}/pdf_printer.sh ${projID} ${subject} ${sesname} QSIprepRecon ${based} + ${scripts}/pdf_printer.sh ${projID} ${subject} ${sesname} QSIprepRecon ${based} mv ${projDir}/bids/derivatives/qsirecon/${subject}* ${projDir}/bids/derivatives/qsiamiconoddi mkdir $projDir/bids/derivatives/fsl @@ -420,18 +416,14 @@ else SINGULARITY_CACHEDIR=$CACHESING SINGULARITY_TMPDIR=$TMPSING singularity run --cleanenv --bind ${projDir}/bids/derivatives/fsl/${subject}/${sesname}:/fslin --bind ${projDir}/bids/derivatives/qsiprep/${subject}/${sesname}/dwi:/datain $IMAGEDIR/fsl_601.sif dtifit -k /datain/${subject}_${sesname}_run-1_space-T1w_desc-preproc_dwi.nii.gz -o /fslin/sub-SAY244_ses-A_run-1_space-T1w_desc-DTIFIT -m /datain/${subject}_${sesname}_run-1_space-T1w_desc-brain_mask.nii.gz -r /datain/${subject}_${sesname}_run-1_space-T1w_desc-preproc_dwi.bvec -b /datain/${subject}_${sesname}_run-1_space-T1w_desc-preproc_dwi.bval --kurt --save_tensor NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep reorient_fslstd Recon started $NOW" >> ${scripts}/fulltimer.txt - SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} - NOW=$(date +"%m-%d-%Y-%T") - echo "QSIprep reorient_fslstd Recon finished $NOW" >> ${scripts}/fulltimer.txt - echo "See docs for details on running SCFSL DTI probabilistic tractography (CUDA 10.2 GPU required)" - + echo "QSIprep reorient_fslstd Recon started $NOW" >> ${scripts}/fulltimer.txt + SINGULARITY_CACHEDIR=${scachedir} SINGULARITY_TMPDIR=${stmpdir} singularity run --cleanenv --bind ${IMAGEDIR}:/imgdir,${IMAGEDIR}/license.txt:/opt/freesurfer/license.txt,${stmpdir}:/paulscratch,${projDir}:/data ${IMAGEDIR}/qsiprep-v0.15.1.sif --fs-license-file /imgdir/license.txt /data/bids /data/bids/derivatives --recon_input /data/bids/derivatives/qsiprep --recon_spec reorient_fslstd --freesurfer-input /data/bids/derivatives/fmriprep/freesurfer --output-resolution 2.5 -w /paulscratch participant --participant-label ${subject} + NOW=$(date +"%m-%d-%Y-%T") + echo "QSIprep reorient_fslstd Recon finished $NOW" >> ${scripts}/fulltimer.txt + echo "See docs for details on running SCFSL DTI probabilistic tractography (CUDA 10.2 GPU required)" fi fi ${scripts}/pipeline_collate_ext.sh -p ${project} -z ${subject} -s ${sesname} -b ${based} -t beta -e ${address} fi -cd ${projDir} -NOW=$(date +"%m-%d-%Y") -