diff --git a/circle.yml b/circle.yml index 28068db..c25993c 100644 --- a/circle.yml +++ b/circle.yml @@ -29,6 +29,8 @@ test: - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/ds114_test1:/bids_dataset bids/${CIRCLE_PROJECT_REPONAME,,} --version - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/ds114_test1:/bids_dataset -v ${HOME}/outputs1:/outputs bids/${CIRCLE_PROJECT_REPONAME,,} /bids_dataset /outputs participant --participant_label 01 --license_key="~/test.key" --stages autorecon1 && cat ${HOME}/outputs1/sub-01/scripts/recon-all.done : timeout: 21600 + - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/ds114_test2:/bids_dataset -v ${HOME}/outputs2:/outputs bids/${CIRCLE_PROJECT_REPONAME,,} /bids_dataset /outputs participant --participant_label 01 --steps cross-sectional --session_label test --license_key="~/test.key" --stages autorecon1 && cat ${HOME}/outputs2/sub-01_ses-test/scripts/recon-all.done : + timeout: 21600 # group2 tests - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/ds114_test1:/bids_dataset -v ${HOME}/data/ds114_test1_freesurfer_precomp_v6.0.0:/outputs bids/${CIRCLE_PROJECT_REPONAME,,} /bids_dataset /outputs group2 --license_key="~/test.key" && mkdir -p ${HOME}/outputs1/ && sudo mv ${HOME}/data/ds114_test1_freesurfer_precomp_v6.0.0/00_group* ${HOME}/outputs1/ && cat ${HOME}/outputs1/00_group2_stats_tables/lh.aparc.thickness.tsv : timeout: 21600 diff --git a/run.py b/run.py index 0b9065f..db32e42 100755 --- a/run.py +++ b/run.py @@ -45,12 +45,22 @@ def run(command, env={}, ignore_errors=False): 'provided all subjects should be analyzed. Multiple ' 'participants can be specified with a space separated list.', nargs="+") +parser.add_argument('--session_label', help='The label of the session that should be analyzed. The label ' + 'corresponds to ses- from the BIDS spec ' + '(so it does not include "ses-"). If this parameter is not ' + 'provided all sessions should be analyzed. Multiple ' + 'sessions can be specified with a space separated list.', + nargs="+") parser.add_argument('--n_cpus', help='Number of CPUs/cores available to use.', default=1, type=int) parser.add_argument('--stages', help='Autorecon stages to run.', choices=["autorecon1", "autorecon2", "autorecon2-cp", "autorecon2-wm", "autorecon-pial", "autorecon3", "autorecon-all", "all"], default=["autorecon-all"], nargs="+") +parser.add_argument('--steps', help='Longitudinal pipeline steps to run.', + choices=['cross-sectional', 'template', 'longitudinal'], + default=['cross-sectional', 'template', 'longitudinal'], + nargs="+") parser.add_argument('--template_name', help='Name for the custom group level template generated for this dataset', default="average") parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', @@ -149,122 +159,131 @@ def run(command, env={}, ignore_errors=False): ignore_errors=True) for subject_label in subjects_to_analyze: - session_dirs = glob(os.path.join(args.bids_dir,"sub-%s"%subject_label,"ses-*")) - sessions = [os.path.split(dr)[-1].split("-")[-1] for dr in session_dirs] - - timepoints = [] - if len(sessions) > 0 and longitudinal_study == True: - # Running each session separately, prior to doing longitudinal pipeline - for session_label in sessions: - T1s = glob(os.path.join(args.bids_dir, - "sub-%s"%subject_label, - "ses-%s"%session_label, - "anat", - "%s_T1w.nii*"%acq_tpl)) - if T1s: - input_args = "" - for T1 in T1s: - if (round(max(nibabel.load(T1).header.get_zooms()),1) < 1.0 and args.hires_mode == "auto") or args.hires_mode == "enable": - input_args += " -hires" - input_args += " -i %s"%T1 - - T2s = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, - "ses-%s"%session_label, "anat", - "*%s_T2w.nii*"%acq_t2)) - FLAIRs = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, - "ses-%s"%session_label, "anat", - "*%s_FLAIR.nii*"%acq_t2)) - if args.refine_pial == "T2": - for T2 in T2s: - if max(nibabel.load(T2).header.get_zooms()) < 1.2: - input_args += " " + " ".join(["-T2 %s"%T2]) - input_args += " -T2pial" - elif args.refine_pial == "FLAIR": - for FLAIR in FLAIRs: - if max(nibabel.load(FLAIR).header.get_zooms()) < 1.2: - input_args += " " + " ".join(["-FLAIR %s"%FLAIR]) - input_args += " -FLAIRpial" - - fsid = "sub-%s_ses-%s"%(subject_label, session_label) - timepoints.append(fsid) - cmd = "recon-all -subjid %s -sd %s %s -all -parallel -openmp %d"%(fsid, - output_dir, - input_args, - args.n_cpus) - resume_cmd = "recon-all -subjid %s -sd %s -all -parallel -openmp %d"%(fsid, + if glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, "ses-*")): + T1s = glob(os.path.join(args.bids_dir, + "sub-%s"%subject_label, + "ses-*", + "anat", + "%s_T1w.nii*"%acq_tpl)) + sessions = set([os.path.normpath(t1).split(os.sep)[-3].split("-")[-1] for t1 in T1s]) + if args.session_label: + sessions = sessions.intersection(args.session_label) + + if len(sessions) > 0 and longitudinal_study == True: + timepoints = ["sub-%s_ses-%s"%(subject_label, session_label) for session_label in sessions] + if ('cross-sectional' in args.steps): + # Running each session separately, prior to doing longitudinal pipeline + for session_label in sessions: + T1s = glob(os.path.join(args.bids_dir, + "sub-%s"%subject_label, + "ses-%s"%session_label, + "anat", + "%s_T1w.nii*"%acq_tpl)) + input_args = "" + for T1 in T1s: + if (round(max(nibabel.load(T1).header.get_zooms()),1) < 1.0 and args.hires_mode == "auto") or args.hires_mode == "enable": + input_args += " -hires" + input_args += " -i %s"%T1 + + T2s = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, + "ses-%s"%session_label, "anat", + "*%s_T2w.nii*"%acq_t2)) + FLAIRs = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, + "ses-%s"%session_label, "anat", + "*%s_FLAIR.nii*"%acq_t2)) + if args.refine_pial == "T2": + for T2 in T2s: + if max(nibabel.load(T2).header.get_zooms()) < 1.2: + input_args += " " + " ".join(["-T2 %s"%T2]) + input_args += " -T2pial" + elif args.refine_pial == "FLAIR": + for FLAIR in FLAIRs: + if max(nibabel.load(FLAIR).header.get_zooms()) < 1.2: + input_args += " " + " ".join(["-FLAIR %s"%FLAIR]) + input_args += " -FLAIRpial" + + fsid = "sub-%s_ses-%s"%(subject_label, session_label) + stages = " ".join(["-" + stage for stage in args.stages]) + cmd = "recon-all -subjid %s -sd %s %s %s -parallel -openmp %d"%(fsid, output_dir, + input_args, + stages, args.n_cpus) + resume_cmd = "recon-all -subjid %s -sd %s %s -parallel -openmp %d"%(fsid, + output_dir, + stages, + args.n_cpus) + + if os.path.isfile(os.path.join(output_dir, fsid, "scripts/IsRunning.lh+rh")): + rmtree(os.path.join(output_dir, fsid)) + print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") + print(cmd) + run(cmd) + elif os.path.isfile(os.path.join(output_dir, fsid, "mri/aseg.mgz")): + print("SUBJECT ALREADY SEGMENTED, SKIPPING") + elif os.path.exists(os.path.join(output_dir, fsid)): + print("SUBJECT DIR ALREADY EXISTS (without IsRunning.lh+rh), RUNNING COMMAND:") + print(resume_cmd) + run(resume_cmd) + else: + print(cmd) + run(cmd) + + if ('template' in args.steps): + # creating a subject specific template + input_args = " ".join(["-tp %s"%tp for tp in timepoints]) + fsid = "sub-%s"%subject_label + stages = " ".join(["-" + stage for stage in args.stages]) + cmd = "recon-all -base %s -sd %s %s %s -parallel -openmp %d"%(fsid, + output_dir, + input_args, + stages, + args.n_cpus) if os.path.isfile(os.path.join(output_dir, fsid,"scripts/IsRunning.lh+rh")): rmtree(os.path.join(output_dir, fsid)) print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") print(cmd) run(cmd) + elif os.path.isfile(os.path.join(output_dir, fsid, "mri/aseg.mgz")): + print("TEMPLATE ALREADY CREATED, SKIPPING") elif os.path.exists(os.path.join(output_dir, fsid)): print("SUBJECT DIR ALREADY EXISTS (without IsRunning.lh+rh), RUNNING COMMAND:") - print(resume_cmd) - run(resume_cmd) + print(cmd) + run(cmd) else: print(cmd) run(cmd) - else: - print("No T1w nii files found for subject %s, session %s. Skipping session." % (subject_label, - session_label)) - - if timepoints: - # creating a subject specific template - input_args = " ".join(["-tp %s"%tp for tp in timepoints]) - fsid = "sub-%s"%subject_label - stages = " ".join(["-" + stage for stage in args.stages]) - cmd = "recon-all -base %s -sd %s %s %s -parallel -openmp %d"%(fsid, - output_dir, - input_args, - stages, - args.n_cpus) - resume_cmd = "recon-all -base %s -sd %s %s -parallel -openmp %d"%(fsid, - output_dir, - stages, - args.n_cpus) - - if os.path.isfile(os.path.join(output_dir, fsid,"scripts/IsRunning.lh+rh")): - rmtree(os.path.join(output_dir, fsid)) - print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") - print(cmd) - run(cmd) - elif os.path.exists(os.path.join(output_dir, fsid)): - print("SUBJECT DIR ALREADY EXISTS (without IsRunning.lh+rh), RUNNING COMMAND:") - print(resume_cmd) - run(resume_cmd) - else: - print(cmd) - run(cmd) - - for tp in timepoints: - # longitudinally process all timepoints - fsid = "sub-%s"%subject_label - stages = " ".join(["-" + stage for stage in args.stages]) - cmd = "recon-all -long %s %s -sd %s %s -parallel -openmp %d"%(tp, - fsid, - output_dir, - stages, - args.n_cpus) - - if os.path.isfile(os.path.join(output_dir, tp + ".long." + fsid,"scripts/IsRunning.lh+rh")): - rmtree(os.path.join(output_dir, tp + ".long." + fsid)) - print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") - print(cmd) - run(cmd) - else: - print("No T1w nii files found for subject %s. Skipping subject." % subject_label) + + if ('longitudinal' in args.steps): + for tp in timepoints: + # longitudinally process all timepoints + fsid = "sub-%s"%subject_label + stages = " ".join(["-" + stage for stage in args.stages]) + cmd = "recon-all -long %s %s -sd %s %s -parallel -openmp %d"%(tp, + fsid, + output_dir, + stages, + args.n_cpus) - elif len(sessions) > 0 and longitudinal_study == False: - # grab all T1s/T2s from multiple sessions and combine - T1s = glob(os.path.join(args.bids_dir, - "sub-%s"%subject_label, - "ses-*", - "anat", - "%s_T1w.nii*"%acq_tpl)) - if T1s: + if os.path.isfile(os.path.join(output_dir, tp + ".long." + fsid,"scripts/IsRunning.lh+rh")): + rmtree(os.path.join(output_dir, tp + ".long." + fsid)) + print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") + print(cmd) + run(cmd) + elif os.path.isfile(os.path.join(output_dir, tp + ".long." + fsid, "mri/aseg.mgz")): + print("SUBJECT ALREADY SEGMENTED, SKIPPING") + else: + print(cmd) + run(cmd) + + elif len(sessions) > 0 and longitudinal_study == False: + # grab all T1s/T2s from multiple sessions and combine + T1s = glob(os.path.join(args.bids_dir, + "sub-%s"%subject_label, + "ses-*", + "anat", + "%s_T1w.nii*"%acq_tpl)) input_args = "" for T1 in T1s: if (round(max(nibabel.load(T1).header.get_zooms()),1) < 1.0 and args.hires_mode == "auto") or args.hires_mode == "enable": @@ -317,7 +336,7 @@ def run(command, env={}, ignore_errors=False): print(cmd) run(cmd) else: - print("No T1w nii files found for subject %s. Skipping subject." % subject_label) + print("SKIPPING SUBJECT %s (no valid session)." % subject_label) else: # grab all T1s/T2s from single session (no ses-* directories) @@ -325,54 +344,53 @@ def run(command, env={}, ignore_errors=False): "sub-%s"%subject_label, "anat", "%s_T1w.nii*"%acq_tpl)) - if T1s: - input_args = "" - for T1 in T1s: - if (round(max(nibabel.load(T1).header.get_zooms()),1) < 1.0 and args.hires_mode == "auto") or args.hires_mode == "enable": - input_args += " -hires" - input_args += " -i %s"%T1 - T2s = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, "anat", - "*%s_T2w.nii*"%acq_t2)) - FLAIRs = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, "anat", - "*%s_FLAIR.nii*"%acq_t2)) - if args.refine_pial == "T2": - for T2 in T2s: - if max(nibabel.load(T2).header.get_zooms()) < 1.2: - input_args += " " + " ".join(["-T2 %s"%T2]) - input_args += " -T2pial" - elif args.refine_pial == "FLAIR": - for FLAIR in FLAIRs: - if max(nibabel.load(FLAIR).header.get_zooms()) < 1.2: - input_args += " " + " ".join(["-FLAIR %s"%FLAIR]) - input_args += " -FLAIRpial" - - fsid = "sub-%s"%subject_label - stages = " ".join(["-" + stage for stage in args.stages]) - cmd = "recon-all -subjid %s -sd %s %s %s -parallel -openmp %d"%(fsid, + if not T1s: + print("No T1w nii files found for subject %s. Skipping subject." % subject_label) + continue + input_args = "" + for T1 in T1s: + if (round(max(nibabel.load(T1).header.get_zooms()),1) < 1.0 and args.hires_mode == "auto") or args.hires_mode == "enable": + input_args += " -hires" + input_args += " -i %s"%T1 + T2s = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, "anat", + "*%s_T2w.nii*"%acq_t2)) + FLAIRs = glob(os.path.join(args.bids_dir, "sub-%s"%subject_label, "anat", + "*%s_FLAIR.nii*"%acq_t2)) + if args.refine_pial == "T2": + for T2 in T2s: + if max(nibabel.load(T2).header.get_zooms()) < 1.2: + input_args += " " + " ".join(["-T2 %s"%T2]) + input_args += " -T2pial" + elif args.refine_pial == "FLAIR": + for FLAIR in FLAIRs: + if max(nibabel.load(FLAIR).header.get_zooms()) < 1.2: + input_args += " " + " ".join(["-FLAIR %s"%FLAIR]) + input_args += " -FLAIRpial" + + fsid = "sub-%s"%subject_label + stages = " ".join(["-" + stage for stage in args.stages]) + cmd = "recon-all -subjid %s -sd %s %s %s -parallel -openmp %d"%(fsid, + output_dir, + input_args, + stages, + args.n_cpus) + resume_cmd = "recon-all -subjid %s -sd %s %s -parallel -openmp %d"%(fsid, output_dir, - input_args, stages, args.n_cpus) - resume_cmd = "recon-all -subjid %s -sd %s %s -parallel -openmp %d"%(fsid, - output_dir, - stages, - args.n_cpus) - if os.path.isfile(os.path.join(output_dir, fsid,"scripts/IsRunning.lh+rh")): - rmtree(os.path.join(output_dir, fsid)) - print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") - print(cmd) - run(cmd) - elif os.path.exists(os.path.join(output_dir, fsid)): - print("SUBJECT DIR ALREADY EXISTS (without IsRunning.lh+rh), RUNNING COMMAND:") - print(resume_cmd) - run(resume_cmd) - else: - print(cmd) - run(cmd) + if os.path.isfile(os.path.join(output_dir, fsid,"scripts/IsRunning.lh+rh")): + rmtree(os.path.join(output_dir, fsid)) + print("DELETING OUTPUT SUBJECT DIR AND RE-RUNNING COMMAND:") + print(cmd) + run(cmd) + elif os.path.exists(os.path.join(output_dir, fsid)): + print("SUBJECT DIR ALREADY EXISTS (without IsRunning.lh+rh), RUNNING COMMAND:") + print(resume_cmd) + run(resume_cmd) else: - print("No T1w nii files found for subject %s. Skipping subject." % subject_label) - + print(cmd) + run(cmd) elif args.analysis_level == "group1": # running group level if len(subjects_to_analyze) > 1: