diff --git a/Dockerfile b/Dockerfile index 89791f1..7e53be9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Use Ubuntu 14.04 LTS -FROM ubuntu:trusty-20170119 +FROM ubuntu:14.04 ## Install the validator RUN apt-get update && \ @@ -8,7 +8,7 @@ RUN apt-get update && \ apt-get remove -y curl && \ apt-get install -y nodejs -RUN npm install -g bids-validator@0.19.2 +RUN npm install -g bids-validator@0.25.07 # Download FreeSurfer RUN apt-get -y update \ @@ -27,7 +27,9 @@ RUN apt-get -y update \ --exclude='freesurfer/average/mult-comp-cor' \ --exclude='freesurfer/lib/cuda' \ --exclude='freesurfer/lib/qt' && \ - apt-get install -y tcsh bc tar libgomp1 perl-modules curl + apt-get remove -y wget && \ + apt-get install -y tcsh bc tar libgomp1 perl-modules curl && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Set up the environment ENV OS Linux @@ -53,9 +55,9 @@ RUN apt-get update && \ curl -sSL http://neuro.debian.net/lists/trusty.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 && \ apt-get update && \ - apt-get install -y fsl-core=5.0.9-4~nd14.04+1 + apt-get install -y fsl-core=5.0.9-4~nd14.04+1 && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Configure environment ENV FSLDIR=/usr/share/fsl/5.0 ENV FSL_DIR="${FSLDIR}" ENV FSLOUTPUTTYPE=NIFTI_GZ @@ -68,20 +70,24 @@ ENV FSLWISH=/usr/bin/wish ENV FSLOUTPUTTYPE=NIFTI_GZ RUN echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh -# Install Connectome Workbench -RUN apt-get update && apt-get -y install connectome-workbench=1.2.3-1~nd14.04+1 - -ENV CARET7DIR=/usr/bin +# Get connectome workbench +RUN apt-get update && \ + apt-get install -y --no-install-recommends curl && \ + curl -sSL http://neuro.debian.net/lists/trusty.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ + apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 && \ + apt-get update && \ + apt-get -y install connectome-workbench=1.2.3-1~nd14.04+1 # Install HCP Pipelines +WORKDIR / RUN apt-get -y update \ && apt-get install -y --no-install-recommends python-numpy && \ - wget https://github.com/Washington-University/Pipelines/archive/v3.17.0.tar.gz -O pipelines.tar.gz && \ + wget https://github.com/jokedurnez/Pipelines/archive/v0.1.0.tar.gz -O pipelines.tar.gz && \ cd /opt/ && \ tar zxvf /pipelines.tar.gz && \ mv /opt/Pipelines-* /opt/HCP-Pipelines && \ rm /pipelines.tar.gz && \ - cd / + cd / ENV HCPPIPEDIR=/opt/HCP-Pipelines ENV HCPPIPEDIR_Templates=${HCPPIPEDIR}/global/templates @@ -97,12 +103,29 @@ ENV HCPPIPEDIR_dMRI=${HCPPIPEDIR}/DiffusionPreprocessing/scripts ENV HCPPIPEDIR_dMRITract=${HCPPIPEDIR}/DiffusionTractography/scripts ENV HCPPIPEDIR_Global=${HCPPIPEDIR}/global/scripts ENV HCPPIPEDIR_tfMRIAnalysis=${HCPPIPEDIR}/TaskfMRIAnalysis/scripts -ENV MSMBin=${HCPPIPEDIR}/MSMBinaries +ENV MSMCONFIGDIR=${HCPPIPEDIR}/MSMConfig +ENV CARET7DIR=/usr/bin + +RUN apt-get update && apt-get install -y --no-install-recommends python-pip python-six python-nibabel python-setuptools +ADD requirements.txt requirements.txt +RUN pip install -r requirements.txt && \ + rm -rf ~/.cache/pip -RUN apt-get update && apt-get install -y --no-install-recommends python-pip python-six python-nibabel python-setuptools -RUN pip install pybids==0.0.1 ENV PYTHONPATH="" +# missing libraries +RUN echo deb http://security.ubuntu.com/ubuntu precise-security main >> /etc/apt/sources.list && \ + apt update && \ + apt install -y libxp6 libxmu6 + +WORKDIR /opt/freesurfer/bin +RUN wget https://raw.githubusercontent.com/freesurfer/freesurfer/d26114a201333f812d2cef67a338e2685c004d00/scripts/recon-all.v6.hires && \ + chmod +x /opt/freesurfer/bin/recon-all.v6.hires +RUN wget https://raw.githubusercontent.com/freesurfer/freesurfer/dev/scripts/tess1mm && \ + chmod +x /opt/freesurfer/bin/tess1mm +RUN wget -qO- https://fsl.fmrib.ox.ac.uk/fsldownloads/patches/eddy-patch-fsl-5.0.9/centos6/eddy_openmp > $FSLDIR/bin/eddy_openmp +RUN chmod +x $FSLDIR/bin/eddy_openmp + COPY run.py /run.py RUN chmod +x /run.py diff --git a/circle.yml b/circle.yml index 5d4fd3f..b693101 100644 --- a/circle.yml +++ b/circle.yml @@ -11,21 +11,20 @@ dependencies: - "~/data" override: - - if [[ ! -d ~/data/hcp_example_bids ]]; then wget -c -O ${HOME}/hcp_example_bids.zip "https://files.osf.io/v1/resources/9q7dv/providers/osfstorage/58a1fd039ad5a101f23cd94f" && mkdir -p ${HOME}/data && unzip ${HOME}/hcp_example_bids.zip -d ${HOME}/data; fi - - if [[ ! -d ~/data/lifespan_example_bids ]]; then wget -c -O ${HOME}/lifespan_example_bids.zip "https://files.osf.io/v1/resources/9q7dv/providers/osfstorage/58a1fc4eb83f6901f3b47a54" && mkdir -p ${HOME}/data && unzip ${HOME}/lifespan_example_bids.zip -d ${HOME}/data; fi + - if [[ ! -d ~/data/hcp_example_bids ]]; then wget -c -O ${HOME}/hcp_example_bids.zip "https://files.osf.io/v1/resources/9q7dv/providers/osfstorage/5a8efffe91b689000c9f5ce5" && mkdir -p ${HOME}/data && unzip ${HOME}/hcp_example_bids.zip -d ${HOME}/data; fi - if [[ -e ~/docker/image.tar ]]; then docker load -i ~/docker/image.tar; fi - git describe --tags > version - docker build -t bids/${CIRCLE_PROJECT_REPONAME,,} . : - timeout: 21600 + timeout: 40000 - mkdir -p ~/docker; docker save "bids/${CIRCLE_PROJECT_REPONAME,,}" > ~/docker/image.tar - mkdir -p ${HOME}/outputs test: override: # print version - - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids:/bids_dataset bids/${CIRCLE_PROJECT_REPONAME,,} --version + - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids_v3:/bids_dataset bids/${CIRCLE_PROJECT_REPONAME,,} --version # participant level tests for single session dataset - - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids:/bids_dataset -v ${HOME}/outputs1:/outputs bids/${CIRCLE_PROJECT_REPONAME,,} /bids_dataset /outputs participant --participant_label 100307 --stages PreFreeSurfer --license_key="*CxjskRdd7" --n_cpus 2: + - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids_v3:/bids_dataset -v ${HOME}/outputs1:/outputs bids/${CIRCLE_PROJECT_REPONAME,,} /bids_dataset /outputs participant --participant_label 100307 --stages PreFreeSurfer --n_cpus 2: timeout: 21600 deployment: hub: diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..94ae07d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +grabbit==0.1.0 +pybids==0.4.2 diff --git a/run.py b/run.py index 9ef8c4a..9964a46 100644 --- a/run.py +++ b/run.py @@ -8,6 +8,7 @@ from subprocess import Popen, PIPE from shutil import rmtree import subprocess +import copy from bids.grabbids import BIDSLayout from functools import partial from collections import OrderedDict @@ -63,9 +64,9 @@ def run_pre_freesurfer(**args): '--t2samplespacing="{t2samplespacing}" ' + \ '--unwarpdir="{unwarpdir}" ' + \ '--gdcoeffs="NONE" ' + \ + '--extra-eddy-arg="--data_is_shelled"' + \ '--avgrdcmethod={avgrdcmethod} ' + \ - '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ - '--printcom=""' + '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) @@ -77,8 +78,7 @@ def run_freesurfer(**args): '--subjectDIR="{subjectDIR}" ' + \ '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ - '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' + \ - '--printcom=""' + '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' cmd = cmd.format(**args) if not os.path.exists(os.path.join(args["subjectDIR"], "fsaverage")): @@ -107,8 +107,7 @@ def run_post_freesurfer(**args): '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ - '--regname="FS" ' + \ - '--printcom=""' + '--regname="FS" ' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) @@ -132,7 +131,6 @@ def run_generic_fMRI_volume_processsing(**args): '--dcmethod={dcmethod} ' + \ '--gdcoeffs="NONE" ' + \ '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ - '--printcom="" ' + \ '--biascorrection={biascorrection} ' + \ '--mctype="MCFLIRT"' cmd = cmd.format(**args) @@ -161,14 +159,13 @@ def run_diffusion_processsing(**args): '--subject="{subject}" ' + \ '--echospacing="{echospacing}" '+ \ '--PEdir={PEdir} ' + \ - '--gdcoeffs="NONE" ' + \ - '--printcom=""' + '--gdcoeffs="NONE" ' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) __version__ = open('/version').read() -parser = argparse.ArgumentParser(description='HCP Pipeliens BIDS App (T1w, T2w, fMRI)') +parser = argparse.ArgumentParser(description='HCP Pipeliens BIDS App (T1w, T2w, fMRI, DWI)') parser.add_argument('bids_dir', help='The directory with the input dataset ' 'formatted according to the BIDS standard.') parser.add_argument('output_dir', help='The directory where the output files ' @@ -194,10 +191,8 @@ def run_diffusion_processsing(**args): default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', 'fMRIVolume', 'fMRISurface', 'DiffusionPreprocessing']) -parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', - required=True) parser.add_argument('-v', '--version', action='version', - version='HCP Pielines BIDS App version {}'.format(__version__)) + version='HCP Pipelines BIDS App version {}'.format(__version__)) args = parser.parse_args() @@ -236,7 +231,7 @@ def run_diffusion_processsing(**args): t2_res = float(min(t2_zooms[:3])) t2_template_res = min(available_resolutions, key=lambda x:abs(float(x)-t2_res)) - fieldmap_set = layout.get_fieldmap(t1ws[0]) + fieldmap_set = layout.get_fieldmap(t1ws[0],return_list=True) fmap_args = {"fmapmag": "NONE", "fmapphase": "NONE", "echodiff": "NONE", @@ -250,6 +245,11 @@ def run_diffusion_processsing(**args): "seunwarpdir": "NONE"} if fieldmap_set: + if len(fieldmap_set)>1: + fieldmap_trans=dict(zip(fieldmap_set[0],zip(*[d.values() for d in fieldmap_set]))) + else: + fieldmap_trans = {k:[v] for k,v in fieldmap_set[0].iteritems()} + t1_spacing = layout.get_metadata(t1ws[0])["EffectiveEchoSpacing"] t2_spacing = layout.get_metadata(t2ws[0])["EffectiveEchoSpacing"] @@ -262,58 +262,67 @@ def run_diffusion_processsing(**args): "t2samplespacing": "%.8f"%t2_spacing, "unwarpdir": unwarpdir}) - if fieldmap_set["type"] == "phasediff": + if set(fieldmap_trans["type"]) == set(["phasediff"]): merged_file = "%s/tmp/%s/magfile.nii.gz"%(args.output_dir, subject_label) run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s"%(args.output_dir, subject_label, merged_file, - fieldmap_set["magnitude1"], - fieldmap_set["magnitude2"])) + " ".join(fieldmap_trans["magnitude1"]), + " ".join(fieldmap_trans["magnitude2"]))) - phasediff_metadata = layout.get_metadata(fieldmap_set["phasediff"]) + phasediff_metadata = layout.get_metadata(fieldmap_trans["phasediff"][0]) te_diff = phasediff_metadata["EchoTime2"] - phasediff_metadata["EchoTime1"] # HCP expects TE in miliseconds te_diff = te_diff*1000.0 fmap_args.update({"fmapmag": merged_file, - "fmapphase": fieldmap_set["phasediff"], + "fmapphase": fieldmap_trans["phasediff"][0], "echodiff": "%.6f"%te_diff, "avgrdcmethod": "SiemensFieldMap"}) - elif fieldmap_set["type"] == "epi": + elif set(fieldmap_trans["type"]) == set(["epi"]): SEPhaseNeg = None SEPhasePos = None - for fieldmap in fieldmap_set["epi"]: + seunwarpdir = None + for fieldmap in fieldmap_trans["epi"]: enc_dir = layout.get_metadata(fieldmap)["PhaseEncodingDirection"] if "-" in enc_dir: SEPhaseNeg = fieldmap else: SEPhasePos = fieldmap - seunwarpdir = layout.get_metadata(fieldmap_set["epi"][0])["PhaseEncodingDirection"] - seunwarpdir = seunwarpdir.replace("-", "").replace("i","x").replace("j", "y").replace("k", "z") - - #TODO check consistency of echo spacing instead of assuming it's all the same - if "EffectiveEchoSpacing" in layout.get_metadata(fieldmap_set["epi"][0]): - echospacing = layout.get_metadata(fieldmap_set["epi"][0])["EffectiveEchoSpacing"] - elif "TotalReadoutTime" in layout.get_metadata(fieldmap_set["epi"][0]): - # HCP Pipelines do not allow users to specify total readout time directly - # Hence we need to reverse the calculations to provide echo spacing that would - # result in the right total read out total read out time - # see https://github.com/Washington-University/Pipelines/blob/master/global/scripts/TopupPreprocessingAll.sh#L202 - print("BIDS App wrapper: Did not find EffectiveEchoSpacing, calculating it from TotalReadoutTime") - # TotalReadoutTime = EffectiveEchoSpacing * (len(PhaseEncodingDirection) - 1) - total_readout_time = layout.get_metadata(fieldmap_set["epi"][0])["TotalReadoutTime"] - phase_len = nibabel.load(fieldmap_set["epi"][0]).shape[{"x": 0, "y": 1}[seunwarpdir]] - echospacing = total_readout_time / float(phase_len - 1) - else: - raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime not defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") + unwarpdir = enc_dir.replace('-', '').replace('i','x').replace('j','y').replace('k','z') + if seunwarpdir and not seunwarpdir == unwarpdir: + raise RuntimeError("Inconsistent unwarp directions.") + else: + #raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime not defined for the fieldmap intended for T1w. Please fix your dataset.") + seunwarpdir = copy.deepcopy(unwarpdir) + + fmap_arguments = {} + for fieldmap in fieldmap_trans['epi']: + if "EffectiveEchoSpacing" in layout.get_metadata(fieldmap): + echospacing = layout.get_metadata(fieldmap)["EffectiveEchoSpacing"] + elif "TotalReadoutTime" in layout.get_metadata(fieldmap): + # HCP Pipelines do not allow users to specify total readout time directly + # Hence we need to reverse the calculations to provide echo spacing that would + # result in the right total read out total read out time + # see https://github.com/Washington-University/Pipelines/blob/master/global/scripts/TopupPreprocessingAll.sh#L202 + print("BIDS App wrapper: Did not find EffectiveEchoSpacing, calculating it from TotalReadoutTime") + # TotalReadoutTime = EffectiveEchoSpacing * (len(PhaseEncodingDirection) - 1) + total_readout_time = layout.get_metadata(fieldmap)["TotalReadoutTime"] + phase_len = nibabel.load(fieldmap).shape[{"x": 0, "y": 1}[seunwarpdir]] + echospacing = total_readout_time / float(phase_len - 1) + else: + raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime not defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") + if 'echospacing' in fmap_arguments.keys() and not fmap_arguments['echospacing']==echospacing: + raise RuntimeError("Inconsistent echospacing.") fmap_args.update({"SEPhaseNeg": SEPhaseNeg, "SEPhasePos": SEPhasePos, "echospacing": "%.6f"%echospacing, "seunwarpdir": seunwarpdir, "avgrdcmethod": "TOPUP"}) - #TODO add support for GE fieldmaps + else: + raise RuntimeError("Inconsistent fieldmap types or unknown type.") struct_stages_dict = OrderedDict([("PreFreeSurfer", partial(run_pre_freesurfer, path=args.output_dir, @@ -350,16 +359,18 @@ def run_diffusion_processsing(**args): if not os.path.exists(fmriscout): fmriscout = "NONE" - fieldmap_set = layout.get_fieldmap(fmritcs) - if fieldmap_set and fieldmap_set["type"] == "epi": - SEPhaseNeg = None - SEPhasePos = None - for fieldmap in fieldmap_set["epi"]: - enc_dir = layout.get_metadata(fieldmap)["PhaseEncodingDirection"] - if "-" in enc_dir: - SEPhaseNeg = fieldmap - else: - SEPhasePos = fieldmap + fieldmap_set = layout.get_fieldmap(fmritcs,return_list=True) + if fieldmap_set: + fieldmap_trans = dict(zip(fieldmap_set[0],zip(*[d.values() for d in fieldmap_set]))) + if set(fieldmap_trans['type']) == set(['epi']): + SEPhaseNeg = None + SEPhasePos = None + for fieldmap in fieldmap_trans["epi"]: + enc_dir = layout.get_metadata(fieldmap)["PhaseEncodingDirection"] + if "-" in enc_dir: + SEPhaseNeg = fieldmap + else: + SEPhasePos = fieldmap echospacing = layout.get_metadata(fmritcs)["EffectiveEchoSpacing"] unwarpdir = layout.get_metadata(fmritcs)["PhaseEncodingDirection"] unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") @@ -412,28 +423,42 @@ def run_diffusion_processsing(**args): dwis = layout.get(subject=subject_label, type='dwi', extensions=["nii.gz", "nii"]) - # print(dwis) - # acqs = set(layout.get(target='acquisition', return_type='id', - # subject=subject_label, type='dwi', - # extensions=["nii.gz", "nii"])) - # print(acqs) - # posData = [] - # negData = [] - # for acq in acqs: - # pos = "EMPTY" - # neg = "EMPTY" - # dwis = layout.get(subject=subject_label, - # type='dwi', acquisition=acq, - # extensions=["nii.gz", "nii"]) - # assert len(dwis) <= 2 - # for dwi in dwis: - # dwi = dwi.filename - # if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: - # neg = dwi - # else: - # pos = dwi - # posData.append(pos) - # negData.append(neg) - # - # print(negData) - # print(posData) + pos = []; neg = [] + PEdir = None; echospacing = None + + for idx,dwi in enumerate(dwis): + metadata = layout.get_metadata(dwi.filename) + # get phaseencodingdirection + phaseenc = metadata['PhaseEncodingDirection'] + acq = 1 if phaseenc[0]=='i' else 2 + if not PEdir: + PEdir = acq + if PEdir != acq: + raise RuntimeError("Not all dwi images have the same encoding direction (both LR and AP). Not implemented.") + # get pos/neg + if len(phaseenc)>1: + neg.append(dwi.filename) + else: + pos.append(dwi.filename) + # get echospacing + if not echospacing: + echospacing = metadata['EffectiveEchoSpacing']*1000. + if echospacing != metadata['EffectiveEchoSpacing']*1000.: + raise RuntimeError("Not all dwi images have the same echo spacing. Not implemented.") + + posdata = "@".join(pos) + negdata = "@".join(neg) + + dif_stages_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, + path=args.output_dir, + subject="sub-%s"%subject_label, + posData=posdata, + negData=negdata, + echospacing=echospacing, + n_cpus=args.n_cpus, + PEdir=PEdir)) + ]) + + for stage, stage_func in dif_stages_dict.iteritems(): + if stage in args.stages: + stage_func()