From 30bee04f5581d0b67234cd00af85d6cad4bcb915 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 18 Jun 2024 11:27:19 -0700 Subject: [PATCH 01/24] first stab at removing chirality correction from src/postbibsnet.py --- src/postbibsnet.py | 559 ++++++++++----------------------------------- src/prebibsnet.py | 5 +- 2 files changed, 129 insertions(+), 435 deletions(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index 8fa786b..2934d8f 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -22,11 +22,11 @@ ) SCRIPT_DIR = os.path.dirname(os.path.dirname(__file__)) -LR_REGISTR_PATH = os.path.join(SCRIPT_DIR, "bin", "LR_mask_registration.sh") -# Chirality-checking constants -CHIRALITY_CONST = dict(UNKNOWN=0, LEFT=1, RIGHT=2, BILATERAL=3) -LEFT = "Left-" -RIGHT = "Right-" +# LR_REGISTR_PATH = os.path.join(SCRIPT_DIR, "bin", "LR_mask_registration.sh") +# # Chirality-checking constants +# CHIRALITY_CONST = dict(UNKNOWN=0, LEFT=1, RIGHT=2, BILATERAL=3) +# LEFT = "Left-" +# RIGHT = "Right-" def run_postBIBSnet(j_args): @@ -37,69 +37,105 @@ def run_postBIBSnet(j_args): sub_ses = get_subj_ID_and_session(j_args) list_files(j_args["common"]["work_dir"]) - # Template selection values - age_months = j_args["ID"]["age_months"] - LOGGER.verbose("Age of participant: {} months".format(age_months)) + # # Template selection values + # age_months = j_args["ID"]["age_months"] + # LOGGER.verbose("Age of participant: {} months".format(age_months)) - # Get template closest to age - tmpl_age = get_template_age_closest_to( - age_months, os.path.join(SCRIPT_DIR, "data", "chirality_masks") - ) - LOGGER.verbose("Closest template-age is {} months".format(tmpl_age)) + # # Get template closest to age + # tmpl_age = get_template_age_closest_to( + # age_months, os.path.join(SCRIPT_DIR, "data", "chirality_masks") + # ) + # LOGGER.verbose("Closest template-age is {} months".format(tmpl_age)) # For left/right registration, use T1 for T1-only and T2 for T2-only, but # for T1-and-T2 combined use T2 for <22 months otherwise T1 (img quality) - if j_args["ID"]["has_T1w"] and j_args["ID"]["has_T2w"]: - t1or2 = 2 if int(age_months) < 22 else 1 # NOTE 22 cutoff might change - elif j_args["ID"]["has_T1w"]: - t1or2 = 1 - else: # if j_args["ID"]["has_T2w"]: - t1or2 = 2 - - LOGGER.info("Generating crude L/R mask for first iteration of chirality correction") - # Generate crude chirality correction mask file first - crude_left_right_mask_nifti_fpath = create_crude_LR_mask( - sub_ses, j_args - ) - - LOGGER.info("Generating L/R mask from registration using templates for second iteration of chirality correction") - # Run left/right registration script and chirality correction - left_right_mask_nifti_fpath = run_left_right_registration( - sub_ses, tmpl_age, t1or2, j_args - ) - LOGGER.info("Left/right image registration completed") + # if j_args["ID"]["has_T1w"] and j_args["ID"]["has_T2w"]: + # t1or2 = 2 if int(age_months) < 22 else 1 # NOTE 22 cutoff might change + # elif j_args["ID"]["has_T1w"]: + # t1or2 = 1 + # else: # if j_args["ID"]["has_T2w"]: + # t1or2 = 2 + + # LOGGER.info("Generating crude L/R mask for first iteration of chirality correction") + # # Generate crude chirality correction mask file first + # crude_left_right_mask_nifti_fpath = create_crude_LR_mask( + # sub_ses, j_args + # ) + + # LOGGER.info("Generating L/R mask from registration using templates for second iteration of chirality correction") + # # Run left/right registration script and chirality correction + # left_right_mask_nifti_fpath = run_left_right_registration( + # sub_ses, tmpl_age, t1or2, j_args + # ) + # LOGGER.info("Left/right image registration completed") # Dilate the L/R mask and feed the dilated mask into chirality correction - LOGGER.info("Now dilating left/right mask") - dilated_LRmask_fpath = dilate_LR_mask( - os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses), - left_right_mask_nifti_fpath - ) - LOGGER.info("Finished dilating left/right segmentation mask") + # LOGGER.info("Now dilating left/right mask") + # dilated_LRmask_fpath = dilate_LR_mask( + # os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses), + # left_right_mask_nifti_fpath + # ) + # LOGGER.info("Finished dilating left/right segmentation mask") - LOGGER.info("Running chirality correction") - nifti_file_paths, chiral_out_dir, xfm_ref_img_dict = run_correct_chirality(crude_left_right_mask_nifti_fpath, dilated_LRmask_fpath, j_args) + # LOGGER.info("Running chirality correction") + # nifti_file_paths, chiral_out_dir, xfm_ref_img_dict = run_correct_chirality(crude_left_right_mask_nifti_fpath, dilated_LRmask_fpath, j_args) LOGGER.info("Reverting corrected segmentation to native space") + out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "output", "*.nii.gz") + for t in only_Ts_needed_for_bibsnet_model(j_args["ID"]): - nii_outfpath = reverse_regn_revert_to_native( - nifti_file_paths, chiral_out_dir, xfm_ref_img_dict[t], t, j_args - ) - - LOGGER.info("The BIBSnet segmentation has had its chirality checked and " - "registered if needed. Now making aseg-derived mask.") - - # TODO Skip mask creation if outputs already exist and not j_args[common][overwrite] - aseg_mask = make_asegderived_mask(j_args, chiral_out_dir, t, nii_outfpath) # NOTE Mask must be in native T1 space too - LOGGER.info(f"A mask of the BIBSnet T{t} segmentation has been produced") - - # Make nibabies input dirs + # Get preBIBSNet working directories in order to reference average image files + preBIBSnet_paths = {"parent": os.path.join( + j_args["optional_out_dirs"]["prebibsnet"], *sub_ses + )} + preBIBSnet_paths["averaged"] = os.path.join( + preBIBSnet_paths["parent"], "averaged") + preBIBSnet_paths["avg"] = dict() + + # Luci: generate derivatives folders to output final files to bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], "bibsnet") derivs_dir = os.path.join(bibsnet_derivs_dir, *sub_ses, "anat") os.makedirs(derivs_dir, exist_ok=True) - copy_to_derivatives_dir(nii_outfpath, derivs_dir, sub_ses, t, "aseg_dseg") - copy_to_derivatives_dir(aseg_mask, derivs_dir, sub_ses, t, "brain_mask") + + LOGGER.info("Now registering BIBSnet segmentation to native space to generate derivatives.") + + # Luci note: take inverse of .mat file from prebibsnet + seg2native = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], f"seg_reg_to_T{t}w_native.mat") + preBIBSnet_mat_glob = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, + f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat") + + preBIBSnet_mat = glob(preBIBSnet_mat_glob).pop() + run_FSL_sh_script(j_args, "convert_xfm", "-omat", + seg2native, "-inverse", preBIBSnet_mat) + + # Luci note: apply inverse mat to aseg from bibsnet stage and write out to derivatives folder + # Luci - this block of code could certainly be simplified + #for t in (1, 2): + preBIBSnet_paths["avg"][f"T{t}w_input"] = list() + for eachfile in glob(os.path.join(j_args["common"]["bids_dir"], + *sub_ses, "anat", + f"*T{t}w*.nii.gz")): + preBIBSnet_paths["avg"][f"T{t}w_input"].append(eachfile) + avg_img_name = "{}_000{}{}".format("_".join(sub_ses), t-1, ".nii.gz") + preBIBSnet_paths["avg"][f"T{t}w_avg"] = os.path.join( + preBIBSnet_paths["averaged"], avg_img_name + ) + + # Define path to aseg derivative output and revert to native space + aseg=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) + run_FSL_sh_script(j_args, "flirt", "-applyxfm", + "-ref", preBIBSnet_paths["avg"][f"T{t}w_avg"], "-in", out_BIBSnet_seg, + "-init", seg2native, "-o", aseg, + "-interp", "nearestneighbour") + + LOGGER.info("Now generating segmentation-derived masks.") + mask=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) + make_asegderived_mask(j_args, aseg, t, mask) + + LOGGER.info(f"A mask of the BIBSnet T{t} segmentation has been produced") + + # Generate sidecar jsons for derivatives input_path = os.path.join(j_args["common"]["bids_dir"], *sub_ses, "anat", f"*T{t}w.nii.gz") @@ -125,179 +161,10 @@ def run_postBIBSnet(j_args): # Write j_args out to logs LOGGER.debug(j_args) -def run_correct_chirality(crude_l_r_mask_nifti_fpath, l_r_mask_nifti_fpath, j_args): - """ - :param crude_l_r_mask_nifti_fpath: String, valid path to existing crude left/right - output mask file - :param l_r_mask_nifti_fpath: String, valid path to existing left/right - registration output mask file - :param j_args: Dictionary containing all args - :return nii_fpaths: Dictionary output of correct_chirality - :return chiral_out_dir: String file path to output directory - :return chiral_ref_img_fpaths_dict: Dictionary containing T1w and T2w file paths - """ - sub_ses = get_subj_ID_and_session(j_args) - - # Define paths to dirs/files used in chirality correction script - chiral_out_dir = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], - *sub_ses, "chirality_correction") # subj_ID, session, - os.makedirs(chiral_out_dir, exist_ok=True) - segment_lookup_table_path = os.path.join(SCRIPT_DIR, "data", "look_up_tables", - "FreeSurferColorLUT.txt") - - # Get BIBSnet output file, and if there are multiple, then raise an error - out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], - *sub_ses, "output", "*.nii.gz") - seg_BIBSnet_outfiles = glob(out_BIBSnet_seg) - if len(seg_BIBSnet_outfiles) != 1: - LOGGER.error(f"There must be exactly one BIBSnet segmentation file: " - "{}\nResume at postBIBSnet stage once this is fixed." - .format(out_BIBSnet_seg)) - sys.exit() - - # Select an arbitrary T1w image path to use to get T1w space - # (unless in T2w-only mode, in which case use an arbitrary T2w image) - chiral_ref_img_fpaths_dict = {} - for t in only_Ts_needed_for_bibsnet_model(j_args["ID"]): - chiral_ref_img_fpaths = glob(os.path.join( - j_args["common"]["bids_dir"], *sub_ses, "anat", f"*_T{t}w.nii.gz" - )) - chiral_ref_img_fpaths.sort() - chiral_ref_img_fpaths_dict[t] = chiral_ref_img_fpaths[0] - - # Run chirality correction first using the crude LR mask applied to the segmentation output from nnUNet in the BIBSNet stage - msg = "{} running chirality correction on " + seg_BIBSnet_outfiles[0] - LOGGER.info(msg.format("Now")) - nii_fpaths = correct_chirality( - seg_BIBSnet_outfiles[0], segment_lookup_table_path, - crude_l_r_mask_nifti_fpath, chiral_out_dir, 1 - ) - - # Run chirality correction a second time using the refined LR mask generated from registration with template files applied to the segmentation corrected with the crude LR mask - msg = "{} running chirality correction on " + nii_fpaths["crudecorrected"] - LOGGER.info(msg.format("Now")) - nii_fpaths = correct_chirality( - nii_fpaths["crudecorrected"], segment_lookup_table_path, - l_r_mask_nifti_fpath, chiral_out_dir, 2 - ) - - LOGGER.info(msg.format("Finished")) - - return nii_fpaths, chiral_out_dir, chiral_ref_img_fpaths_dict - -def create_crude_LR_mask(sub_ses, j_args): - # Define paths to dirs/files used in chirality correction script - outdir_LR_reg = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], - *sub_ses) - os.makedirs(outdir_LR_reg, exist_ok=True) - - chiral_out_dir = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], - *sub_ses, "chirality_correction") # subj_ID, session, - os.makedirs(chiral_out_dir, exist_ok=True) - - # Get BIBSnet output file, and if there are multiple, then raise an error - out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], - *sub_ses, "output", "*.nii.gz") - seg_BIBSnet_outfiles = glob(out_BIBSnet_seg) - if len(seg_BIBSnet_outfiles) != 1: - LOGGER.error(f"There must be exactly one BIBSnet segmentation file: " - "{}\nResume at postBIBSnet stage once this is fixed." - .format(out_BIBSnet_seg)) - sys.exit() - - crude_left_right_mask_nifti_fpath = os.path.join(outdir_LR_reg, "crude_LRmask.nii.gz") - - img = nib.load(seg_BIBSnet_outfiles[0]) - data = img.get_fdata() - affine = img.affine - - # Determine the midpoint of X-axis and make new image - midpoint_x = data.shape[0] // 2 - modified_data = np.zeros_like(data) - - # Assign value 1 to right-side voxels with values greater than 0 value 2 to left-side voxels with values greater than 0 (note that these actually correspond to left and right brain hemispheres respectively) - modified_data[midpoint_x:, :, :][data[midpoint_x:, :, :] > 0] = 1 - modified_data[:midpoint_x, :, :][data[:midpoint_x, :, :] > 0] = 2 - - #nib.save(img, seg_BIBSnet_outfiles[0]) - save_nifti(modified_data, affine, crude_left_right_mask_nifti_fpath) - - return crude_left_right_mask_nifti_fpath - def save_nifti(data, affine, file_path): img = nib.Nifti1Image(data, affine) nib.save(img, file_path) - -def run_left_right_registration(sub_ses, age_months, t1or2, j_args): - """ - :param sub_ses: List with either only the subject ID str or the session too - :param age_months: String or int, the subject's age [range] in months - :param t1or2: Int, 1 to use T1w image for registration or 2 to use T2w - :param j_args: Dictionary containing all args - :return: String, path to newly created left/right registration output file - """ - # Paths for left & right registration - chiral_in_dir = os.path.join(SCRIPT_DIR, "data", "chirality_masks") - tmpl_head = os.path.join(chiral_in_dir, "{}mo_T{}w_acpc_dc_restore.nii.gz") - tmpl_mask = os.path.join(chiral_in_dir, "{}mo_template_LRmask.nii.gz") - - # Grab the first resized T?w from preBIBSnet to use for L/R registration - last_digit = (t1or2 - 1 if j_args["ID"]["has_T1w"] - and j_args["ID"]["has_T2w"] else 0) - first_subject_head = glob(os.path.join( - j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "input", - "*{}*_000{}.nii.gz".format("_".join(sub_ses), last_digit) - ))[0] - - # Make postBIBSnet output directory for this subject/session - outdir_LR_reg = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], - *sub_ses) - os.makedirs(outdir_LR_reg, exist_ok=True) - - # Left/right registration output file path (this function's return value) - left_right_mask_nifti_fpath = os.path.join(outdir_LR_reg, "LRmask.nii.gz") - - # Run left & right registration - msg = "{} left/right registration on {}" - if (j_args["common"]["overwrite"] or not - os.path.exists(left_right_mask_nifti_fpath)): - try: - # In bin/LR_mask_registration.sh, the last 4 vars in cmd_LR_reg are - # named SubjectHead, TemplateHead, TemplateMask, and OutputMaskFile - cmd_LR_reg = (LR_REGISTR_PATH, first_subject_head, - tmpl_head.format(age_months, t1or2), - tmpl_mask.format(age_months), - left_right_mask_nifti_fpath) - LOGGER.verbose(msg.format("Now running", "\n".join( - (first_subject_head, " ".join(cmd_LR_reg)) - ))) - process = subprocess.Popen(cmd_LR_reg, stdout=subprocess.PIPE, universal_newlines=True) - with process.stdout: - for line in process.stdout: - LOGGER.subprocess(line, extra={'id': 'ANTS'}) - exitcode = process.wait() - if exitcode == 0: - LOGGER.verbose("LR Registration completed") - else: - LOGGER.error(f"LR Registration failed to complete, exitcode {exitcode}") - - - # Tell the user if ANTS crashes due to a memory error - except subprocess.CalledProcessError as e: - if e.returncode == 143: - LOGGER.error(msg.format("ANTS", first_subject_head) - + " failed because it ran without enough memory." - " Try running it again, but with more memory.\n") - sys.exit(e) - else: - LOGGER.info(msg.format("Skipping", "{} because output already exists at {}".format( - first_subject_head, left_right_mask_nifti_fpath - ))) - LOGGER.info(msg.format("Finished", first_subject_head)) # TODO Only print this message if not skipped (and do the same for all other stages) - return left_right_mask_nifti_fpath - - def make_asegderived_mask(j_args, aseg_dir, t, nii_outfpath): """ Create mask file(s) derived from aseg file(s) in aseg_dir @@ -338,154 +205,6 @@ def copy_to_derivatives_dir(file_to_copy, derivs_dir, sub_ses, space, new_fname_ "{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), space, new_fname_pt) ))) - -def correct_chirality(nifti_input_file_path, segment_lookup_table, - nii_fpath_LR_mask, chiral_out_dir, iteration): - """ - Creates an output file with chirality corrections fixed. - :param nifti_input_file_path: String, path to a segmentation file with - possible chirality problems - :param segment_lookup_table: String, path to FreeSurfer-style look-up table - :param nii_fpath_LR_mask: String, path to a mask file that - distinguishes between left and right - :param xfm_ref_img: String, path to (T1w, unless running in T2w-only mode) - image to use as a reference when applying transform - :param j_args: Dictionary containing all args - :param iteration: either 1 or 2 for iteration1 or iteration2 of chirality correction - :return: Dict with paths to native and chirality-corrected images - """ - if iteration==1: - nifti_file_paths = dict() - for which_nii in ("native-T1", "native-T2", "crudecorrected"): - nifti_file_paths[which_nii] = os.path.join(chiral_out_dir, "_".join(( - which_nii, os.path.basename(nifti_input_file_path) - ))) - - free_surfer_label_to_region = get_id_to_region_mapping(segment_lookup_table) - segment_name_to_number = {v: k for k, v in free_surfer_label_to_region.items()} - img = nib.load(nifti_input_file_path) - data = img.get_data() - left_right_img = nib.load(nii_fpath_LR_mask) - left_right_data = left_right_img.get_data() - - new_data = data.copy() - data_shape = img.header.get_data_shape() - left_right_data_shape = left_right_img.header.get_data_shape() - width = data_shape[0] - height = data_shape[1] - depth = data_shape[2] - assert \ - width == left_right_data_shape[0] and height == left_right_data_shape[1] and depth == left_right_data_shape[2] - for i in range(width): - for j in range(height): - for k in range(depth): - voxel = data[i][j][k] - region = free_surfer_label_to_region[voxel] - chirality_voxel = int(left_right_data[i][j][k]) - if not (region.startswith(LEFT) or region.startswith(RIGHT)): - continue - if chirality_voxel == CHIRALITY_CONST["LEFT"] or chirality_voxel == CHIRALITY_CONST["RIGHT"]: - check_and_correct_region( - chirality_voxel == CHIRALITY_CONST["LEFT"], region, segment_name_to_number, new_data, i, j, k) - fixed_img = nib.Nifti1Image(new_data, img.affine, img.header) - nib.save(fixed_img, nifti_file_paths["crudecorrected"]) - - elif iteration==2: - # Drop "crudecorrected_" from nifti_input_file_path to make filenames cleaner - nifti_input_file_path_mod=(os.path.basename(nifti_input_file_path)).split('_', 1)[1] - nifti_file_paths = dict() - for which_nii in ("native-T1", "native-T2", "corrected"): - nifti_file_paths[which_nii] = os.path.join(chiral_out_dir, "_".join(( - which_nii, nifti_input_file_path_mod - ))) - - free_surfer_label_to_region = get_id_to_region_mapping(segment_lookup_table) - segment_name_to_number = {v: k for k, v in free_surfer_label_to_region.items()} - img = nib.load(nifti_input_file_path) - data = img.get_data() - left_right_img = nib.load(nii_fpath_LR_mask) - left_right_data = left_right_img.get_data() - - new_data = data.copy() - data_shape = img.header.get_data_shape() - left_right_data_shape = left_right_img.header.get_data_shape() - width = data_shape[0] - height = data_shape[1] - depth = data_shape[2] - assert \ - width == left_right_data_shape[0] and height == left_right_data_shape[1] and depth == left_right_data_shape[2] - for i in range(width): - for j in range(height): - for k in range(depth): - voxel = data[i][j][k] - region = free_surfer_label_to_region[voxel] - chirality_voxel = int(left_right_data[i][j][k]) - if not (region.startswith(LEFT) or region.startswith(RIGHT)): - continue - if chirality_voxel == CHIRALITY_CONST["LEFT"] or chirality_voxel == CHIRALITY_CONST["RIGHT"]: - check_and_correct_region( - chirality_voxel == CHIRALITY_CONST["LEFT"], region, segment_name_to_number, new_data, i, j, k) - fixed_img = nib.Nifti1Image(new_data, img.affine, img.header) - nib.save(fixed_img, nifti_file_paths["corrected"]) - return nifti_file_paths - - -def get_id_to_region_mapping(mapping_file_name, separator=None): - """ - Author: Paul Reiners - Create a map from region ID to region name from a from a FreeSurfer-style - look-up table. This function parses a FreeSurfer-style look-up table. It - then returns a map that maps region IDs to their names. - :param mapping_file_name: String, the name or path to the look-up table - :param separator: String delimiter separating parts of look-up table lines - :return: Dictionary, a map from the ID of a region to its name - """ - with open(mapping_file_name, 'r') as infile: - lines = infile.readlines() - - id_to_region = {} - for line in lines: - line = line.strip() - if line.startswith('#') or line == '': - continue - if separator: - parts = line.split(separator) - else: - parts = line.split() - region_id = int(parts[0]) - region = parts[1] - id_to_region[region_id] = region - return id_to_region - - -def check_and_correct_region(should_be_left, region, segment_name_to_number, - new_data, chirality, floor_ceiling, scanner_bore): - """ - Ensures that a voxel in NIFTI data is in the correct region by flipping - the label if it's mislabeled - :param should_be_left (Boolean): This voxel *should be on the head's LHS - :param region: String naming the anatomical region - :param segment_name_to_number (map): Map from anatomical regions - to identifying numbers - :param new_data (3-d in array): segmentation data passed by reference to - be fixed if necessary - :param chirality: x-coordinate into new_data - :param floor_ceiling: y-coordinate into new_data - :param scanner_bore: z-coordinate into new_data - """ - # expected_prefix, wrong_prefix = (LEFT, RIGHT) if should_be_left else (RIGHT, LEFT) - if should_be_left: - expected_prefix = LEFT - wrong_prefix = RIGHT - else: - expected_prefix = RIGHT - wrong_prefix = LEFT - if region.startswith(wrong_prefix): - flipped_region = expected_prefix + region[len(wrong_prefix):] - flipped_id = segment_name_to_number[flipped_region] - new_data[chirality][floor_ceiling][scanner_bore] = flipped_id - - def dilate_LR_mask(sub_LRmask_dir, anatfile): """ Taken from https://github.com/DCAN-Labs/SynthSeg/blob/master/SynthSeg/dcan/img_processing/chirality_correction/dilate_LRmask.py @@ -616,69 +335,41 @@ def generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, desc): json.dump(sidecar, file, indent = 4) -def get_template_age_closest_to(age, templates_dir): - """ - :param age: Int, participant age in months - :param templates_dir: String, valid path to existing directory which - contains template image files - :return: String, the age (or range of ages) in months closest to the - participant's with a template image file in templates_dir - """ - template_ages = list() - template_ranges = dict() - - # Get list of all int ages (in months) that have template files - for tmpl_path in glob(os.path.join(templates_dir, - "*mo_template_LRmask.nii.gz")): - tmpl_age = os.path.basename(tmpl_path).split("mo", 1)[0] - if "-" in tmpl_age: # len(tmpl_age) <3: - for each_age in tmpl_age.split("-"): - template_ages.append(int(each_age)) - template_ranges[template_ages[-1]] = tmpl_age - else: - template_ages.append(int(tmpl_age)) - - # Get template age closest to subject age, then return template age - closest_age = get_age_closest_to(age, template_ages) - return (template_ranges[closest_age] if closest_age - in template_ranges else str(closest_age)) - - -def reverse_regn_revert_to_native(nifti_file_paths, chiral_out_dir, - xfm_ref_img, t, j_args): - """ - :param nifti_file_paths: Dict with valid paths to native and - chirality-corrected images - :param chiral_out_dir: String, valid path to existing directory to save - chirality-corrected images into - :param xfm_ref_img: String, path to (T1w, unless running in T2w-only mode) - image to use as a reference when applying transform - :param t: 1 or 2, whether running on T1 or T2 - :param j_args: Dictionary containing all args - :return: String, valid path to existing image reverted to native - """ - sub_ses = get_subj_ID_and_session(j_args) - - # Undo resizing right here (do inverse transform) using RobustFOV so - # padding isn't necessary; revert aseg to native space - dummy_copy = "_dummy".join(split_2_exts(nifti_file_paths["corrected"])) - shutil.copy2(nifti_file_paths["corrected"], dummy_copy) - - seg2native = os.path.join(chiral_out_dir, f"seg_reg_to_T{t}w_native.mat") - preBIBSnet_mat_glob = os.path.join( - j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, - f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat" # TODO Name this outside of pre- and postBIBSnet then pass it to both - ) - preBIBSnet_mat = glob(preBIBSnet_mat_glob).pop() - run_FSL_sh_script(j_args, "convert_xfm", "-omat", - seg2native, "-inverse", preBIBSnet_mat) - # TODO Define preBIBSnet_mat path outside of stages because it's used by preBIBSnet and postBIBSnet - - run_FSL_sh_script(j_args, "flirt", "-applyxfm", - "-ref", xfm_ref_img, "-in", dummy_copy, - "-init", seg2native, "-o", nifti_file_paths[f"native-T{t}"], - "-interp", "nearestneighbour") - return nifti_file_paths[f"native-T{t}"] +# def reverse_regn_revert_to_native(nifti_file_paths, chiral_out_dir, +# xfm_ref_img, t, j_args): +# """ +# :param nifti_file_paths: Dict with valid paths to native and +# chirality-corrected images +# :param chiral_out_dir: String, valid path to existing directory to save +# chirality-corrected images into +# :param xfm_ref_img: String, path to (T1w, unless running in T2w-only mode) +# image to use as a reference when applying transform +# :param t: 1 or 2, whether running on T1 or T2 +# :param j_args: Dictionary containing all args +# :return: String, valid path to existing image reverted to native +# """ +# sub_ses = get_subj_ID_and_session(j_args) + +# # Undo resizing right here (do inverse transform) using RobustFOV so +# # padding isn't necessary; revert aseg to native space +# dummy_copy = "_dummy".join(split_2_exts(nifti_file_paths["corrected"])) +# shutil.copy2(nifti_file_paths["corrected"], dummy_copy) + +# seg2native = os.path.join(chiral_out_dir, f"seg_reg_to_T{t}w_native.mat") +# preBIBSnet_mat_glob = os.path.join( +# j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, +# f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat" # TODO Name this outside of pre- and postBIBSnet then pass it to both +# ) +# preBIBSnet_mat = glob(preBIBSnet_mat_glob).pop() +# run_FSL_sh_script(j_args, "convert_xfm", "-omat", +# seg2native, "-inverse", preBIBSnet_mat) +# # TODO Define preBIBSnet_mat path outside of stages because it's used by preBIBSnet and postBIBSnet + +# run_FSL_sh_script(j_args, "flirt", "-applyxfm", +# "-ref", xfm_ref_img, "-in", dummy_copy, +# "-init", seg2native, "-o", nifti_file_paths[f"native-T{t}"], +# "-interp", "nearestneighbour") +# return nifti_file_paths[f"native-T{t}"] def remove_extra_clusters_from_mask(path_to_mask, path_to_aseg = None): diff --git a/src/prebibsnet.py b/src/prebibsnet.py index 65da432..ac4ba3f 100755 --- a/src/prebibsnet.py +++ b/src/prebibsnet.py @@ -146,10 +146,13 @@ def run_preBIBSnet(j_args): # is chosen, so postBIBSnet can use the correct/chosen .mat file concat_mat = transformed_images[f"T{t}w_crop2BIBS_mat"] LOGGER.debug(f"concat_mat: {concat_mat}") - out_mat_fpath = os.path.join( # TODO Pass this in (or out) from the beginning so we don't have to build the path twice (once here and once in postBIBSnet) + + # TODO Pass this in (or out) from the beginning so we don't have to build the path twice (once here and once in postBIBSnet) + out_mat_fpath = os.path.join( j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, "preBIBSnet_" + os.path.basename(concat_mat) ) + list_files(j_args["optional_out_dirs"]["postbibsnet"]) list_files(j_args["common"]["work_dir"]) LOGGER.debug(f"out_mat_fath: {out_mat_fpath}") From 65793550fe9845ad26f6bf27f56bb41d75cb48a2 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Fri, 28 Jun 2024 17:52:47 -0700 Subject: [PATCH 02/24] cleaned up postbibsnet a bit more --- src/postbibsnet.py | 59 ++++------------------------------------------ 1 file changed, 4 insertions(+), 55 deletions(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index 2934d8f..c47d0f8 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -15,19 +15,11 @@ from src.utilities import ( list_files, get_subj_ID_and_session, - get_age_closest_to, only_Ts_needed_for_bibsnet_model, - run_FSL_sh_script, - split_2_exts + run_FSL_sh_script ) SCRIPT_DIR = os.path.dirname(os.path.dirname(__file__)) -# LR_REGISTR_PATH = os.path.join(SCRIPT_DIR, "bin", "LR_mask_registration.sh") -# # Chirality-checking constants -# CHIRALITY_CONST = dict(UNKNOWN=0, LEFT=1, RIGHT=2, BILATERAL=3) -# LEFT = "Left-" -# RIGHT = "Right-" - def run_postBIBSnet(j_args): """ @@ -37,49 +29,6 @@ def run_postBIBSnet(j_args): sub_ses = get_subj_ID_and_session(j_args) list_files(j_args["common"]["work_dir"]) - # # Template selection values - # age_months = j_args["ID"]["age_months"] - # LOGGER.verbose("Age of participant: {} months".format(age_months)) - - # # Get template closest to age - # tmpl_age = get_template_age_closest_to( - # age_months, os.path.join(SCRIPT_DIR, "data", "chirality_masks") - # ) - # LOGGER.verbose("Closest template-age is {} months".format(tmpl_age)) - - # For left/right registration, use T1 for T1-only and T2 for T2-only, but - # for T1-and-T2 combined use T2 for <22 months otherwise T1 (img quality) - # if j_args["ID"]["has_T1w"] and j_args["ID"]["has_T2w"]: - # t1or2 = 2 if int(age_months) < 22 else 1 # NOTE 22 cutoff might change - # elif j_args["ID"]["has_T1w"]: - # t1or2 = 1 - # else: # if j_args["ID"]["has_T2w"]: - # t1or2 = 2 - - # LOGGER.info("Generating crude L/R mask for first iteration of chirality correction") - # # Generate crude chirality correction mask file first - # crude_left_right_mask_nifti_fpath = create_crude_LR_mask( - # sub_ses, j_args - # ) - - # LOGGER.info("Generating L/R mask from registration using templates for second iteration of chirality correction") - # # Run left/right registration script and chirality correction - # left_right_mask_nifti_fpath = run_left_right_registration( - # sub_ses, tmpl_age, t1or2, j_args - # ) - # LOGGER.info("Left/right image registration completed") - - # Dilate the L/R mask and feed the dilated mask into chirality correction - # LOGGER.info("Now dilating left/right mask") - # dilated_LRmask_fpath = dilate_LR_mask( - # os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses), - # left_right_mask_nifti_fpath - # ) - # LOGGER.info("Finished dilating left/right segmentation mask") - - # LOGGER.info("Running chirality correction") - # nifti_file_paths, chiral_out_dir, xfm_ref_img_dict = run_correct_chirality(crude_left_right_mask_nifti_fpath, dilated_LRmask_fpath, j_args) - LOGGER.info("Reverting corrected segmentation to native space") out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "output", "*.nii.gz") @@ -92,7 +41,7 @@ def run_postBIBSnet(j_args): preBIBSnet_paths["parent"], "averaged") preBIBSnet_paths["avg"] = dict() - # Luci: generate derivatives folders to output final files to + # Generate derivatives folders to output final files to bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], "bibsnet") derivs_dir = os.path.join(bibsnet_derivs_dir, *sub_ses, "anat") @@ -100,7 +49,7 @@ def run_postBIBSnet(j_args): LOGGER.info("Now registering BIBSnet segmentation to native space to generate derivatives.") - # Luci note: take inverse of .mat file from prebibsnet + # Take inverse of .mat file from prebibsnet seg2native = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], f"seg_reg_to_T{t}w_native.mat") preBIBSnet_mat_glob = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat") @@ -109,7 +58,7 @@ def run_postBIBSnet(j_args): run_FSL_sh_script(j_args, "convert_xfm", "-omat", seg2native, "-inverse", preBIBSnet_mat) - # Luci note: apply inverse mat to aseg from bibsnet stage and write out to derivatives folder + # Apply inverse mat to aseg from bibsnet stage and write out to derivatives folder # Luci - this block of code could certainly be simplified #for t in (1, 2): preBIBSnet_paths["avg"][f"T{t}w_input"] = list() From 76899cfbbd73e7b0904c3766717a6fd3150add0d Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Mon, 1 Jul 2024 18:15:43 -0500 Subject: [PATCH 03/24] fixed minor error and cleaned up code a bit in postbibsnet - seems to run successfully now --- src/postbibsnet.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index c47d0f8..5d30017 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -1,14 +1,11 @@ import os import shutil from glob import glob -import sys -import subprocess from nipype.interfaces import fsl import nibabel as nib import numpy as np import json from scipy import ndimage -import csv from src.logger import LOGGER @@ -30,8 +27,8 @@ def run_postBIBSnet(j_args): list_files(j_args["common"]["work_dir"]) LOGGER.info("Reverting corrected segmentation to native space") - out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "output", "*.nii.gz") - + out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "output", "{}_optimal_resized.nii.gz".format("_".join(sub_ses))) + for t in only_Ts_needed_for_bibsnet_model(j_args["ID"]): # Get preBIBSNet working directories in order to reference average image files preBIBSnet_paths = {"parent": os.path.join( @@ -59,8 +56,6 @@ def run_postBIBSnet(j_args): seg2native, "-inverse", preBIBSnet_mat) # Apply inverse mat to aseg from bibsnet stage and write out to derivatives folder - # Luci - this block of code could certainly be simplified - #for t in (1, 2): preBIBSnet_paths["avg"][f"T{t}w_input"] = list() for eachfile in glob(os.path.join(j_args["common"]["bids_dir"], *sub_ses, "anat", @@ -108,7 +103,7 @@ def run_postBIBSnet(j_args): return j_args # Write j_args out to logs - LOGGER.debug(j_args) + #LOGGER.debug(j_args) def save_nifti(data, affine, file_path): img = nib.Nifti1Image(data, affine) From 46f40a3722e290c2c1eae2ac8cd2189176047eb8 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Mon, 1 Jul 2024 19:00:22 -0500 Subject: [PATCH 04/24] updated postbibsnet.py to nest intermediate postbibsnet output files appropriately under sub/ses --- src/postbibsnet.py | 143 +-------------------------------------------- 1 file changed, 3 insertions(+), 140 deletions(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index 5d30017..e90f2de 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -39,15 +39,14 @@ def run_postBIBSnet(j_args): preBIBSnet_paths["avg"] = dict() # Generate derivatives folders to output final files to - bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], - "bibsnet") + bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], "bibsnet") derivs_dir = os.path.join(bibsnet_derivs_dir, *sub_ses, "anat") os.makedirs(derivs_dir, exist_ok=True) LOGGER.info("Now registering BIBSnet segmentation to native space to generate derivatives.") - + # Take inverse of .mat file from prebibsnet - seg2native = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], f"seg_reg_to_T{t}w_native.mat") + seg2native = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, f"seg_reg_to_T{t}w_native.mat") preBIBSnet_mat_glob = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat") @@ -149,106 +148,6 @@ def copy_to_derivatives_dir(file_to_copy, derivs_dir, sub_ses, space, new_fname_ "{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), space, new_fname_pt) ))) -def dilate_LR_mask(sub_LRmask_dir, anatfile): - """ - Taken from https://github.com/DCAN-Labs/SynthSeg/blob/master/SynthSeg/dcan/img_processing/chirality_correction/dilate_LRmask.py - :param sub_LRmask_dir: String, path to real directory to make subdirectory - in; the subdirectory will contain mask files - :param anatfile: String, valid path to existing anatomical image file - """ - # Make subdirectory to save masks in & generic mask file name format-string - parent_dir = os.path.join(sub_LRmask_dir, "lrmask_dil_wd") - os.makedirs(parent_dir, exist_ok=True) - mask = os.path.join(parent_dir, "{}mask{}.nii.gz") - - # Make left, right, and middle masks using FSL - maths = fsl.ImageMaths(in_file=anatfile, op_string='-thr 1 -uthr 1', - out_file=mask.format("L", "")) - maths.run() - maths = fsl.ImageMaths(in_file=anatfile, op_string='-thr 2 -uthr 2', - out_file=mask.format("R", "")) - maths.run() - maths.run() - maths = fsl.ImageMaths(in_file=anatfile, op_string='-thr 3 -uthr 3', - out_file=mask.format("M", "")) - maths.run() - - # dilate, fill, and erode each mask in order to get rid of holes - # (also binarize L and M images in order to perform binary operations) - maths = fsl.ImageMaths(in_file=mask.format("L", ""), - op_string='-dilM -dilM -dilM -fillh -ero', - out_file=mask.format("L", "_holes_filled")) - maths.run() - maths = fsl.ImageMaths(in_file=mask.format("R", ""), - op_string='-bin -dilM -dilM -dilM -fillh -ero', - out_file=mask.format("R", "_holes_filled")) - maths.run() - maths = fsl.ImageMaths(in_file=mask.format("M", ""), - op_string='-bin -dilM -dilM -dilM -fillh -ero', - out_file=mask.format("M", "_holes_filled")) - maths.run() - - # Reassign values of 2 and 3 to R and M masks (L mask already a value of 1) - label_anat_masks = {"L": mask.format("L", "_holes_filled"), - "R": mask.format("R", "_holes_filled_label2"), - "M": mask.format("M", "_holes_filled_label3")} - maths = fsl.ImageMaths(in_file=mask.format("R", "_holes_filled"), - op_string='-mul 2', out_file=label_anat_masks["R"]) - maths.run() - - maths = fsl.ImageMaths(in_file=mask.format("M", "_holes_filled"), - op_string='-mul 3', out_file=label_anat_masks["M"]) - maths.run() - - # recombine new L, R, and M mask files and then dilate the result - masks_LR = {"dilated": mask.format("dilated_LR", ""), - "recombined": mask.format("recombined_", "_LR")} - maths = fsl.ImageMaths(in_file=label_anat_masks["L"], - op_string='-add {}'.format(label_anat_masks["R"]), - out_file=masks_LR["recombined"]) - maths.run() - maths = fsl.ImageMaths(in_file=label_anat_masks["M"], - op_string="-add {}".format(masks_LR["recombined"]), - out_file=masks_LR["dilated"]) - maths.run() - - # Fix incorrect values resulting from recombining dilated components - orig_LRmask_img = nib.load(os.path.join(sub_LRmask_dir, "LRmask.nii.gz")) - orig_LRmask_data = orig_LRmask_img.get_fdata() - - fill_LRmask_img = nib.load(masks_LR["dilated"]) - fill_LRmask_data = fill_LRmask_img.get_fdata() - - # Flatten numpy arrays - orig_LRmask_data_2D = orig_LRmask_data.reshape((182, 39676), order='C') - orig_LRmask_data_1D = orig_LRmask_data_2D.reshape(7221032, order='C') - - fill_LRmask_data_2D = fill_LRmask_data.reshape((182, 39676), order='C') - fill_LRmask_data_1D = fill_LRmask_data_2D.reshape(7221032, order='C') - - # grab index values of voxels with a value greater than 2.0 in filled L/R mask - voxel_check = np.where(fill_LRmask_data_1D > 2.0) - - # Replace possible overlapping label values with corresponding label values from initial mask - for i in voxel_check[:]: - fill_LRmask_data_1D[i] = orig_LRmask_data_1D[i] - - # reshape numpy array - fill_LRmask_data_2D = fill_LRmask_data_1D.reshape((182, 39676), order='C') - fill_LRmask_data_3D = fill_LRmask_data_2D.reshape((182, 218, 182), order='C') - - # save new numpy array as image - empty_header = nib.Nifti1Header() - out_img = nib.Nifti1Image(fill_LRmask_data_3D, orig_LRmask_img.affine, empty_header) - out_fpath = mask.format("LR", "_dil") # os.path.join(sub_LRmask_dir, 'LRmask_dil.nii.gz') - nib.save(out_img, out_fpath) - - #remove working directory with intermediate outputs - #shutil.rmtree('lrmask_dil_wd') - - return out_fpath - - def generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, desc): """ :param sub_ses: List with either only the subject ID str or the session too @@ -279,42 +178,6 @@ def generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, desc): json.dump(sidecar, file, indent = 4) -# def reverse_regn_revert_to_native(nifti_file_paths, chiral_out_dir, -# xfm_ref_img, t, j_args): -# """ -# :param nifti_file_paths: Dict with valid paths to native and -# chirality-corrected images -# :param chiral_out_dir: String, valid path to existing directory to save -# chirality-corrected images into -# :param xfm_ref_img: String, path to (T1w, unless running in T2w-only mode) -# image to use as a reference when applying transform -# :param t: 1 or 2, whether running on T1 or T2 -# :param j_args: Dictionary containing all args -# :return: String, valid path to existing image reverted to native -# """ -# sub_ses = get_subj_ID_and_session(j_args) - -# # Undo resizing right here (do inverse transform) using RobustFOV so -# # padding isn't necessary; revert aseg to native space -# dummy_copy = "_dummy".join(split_2_exts(nifti_file_paths["corrected"])) -# shutil.copy2(nifti_file_paths["corrected"], dummy_copy) - -# seg2native = os.path.join(chiral_out_dir, f"seg_reg_to_T{t}w_native.mat") -# preBIBSnet_mat_glob = os.path.join( -# j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, -# f"preBIBSnet_*crop_T{t}w_to_BIBS_template.mat" # TODO Name this outside of pre- and postBIBSnet then pass it to both -# ) -# preBIBSnet_mat = glob(preBIBSnet_mat_glob).pop() -# run_FSL_sh_script(j_args, "convert_xfm", "-omat", -# seg2native, "-inverse", preBIBSnet_mat) -# # TODO Define preBIBSnet_mat path outside of stages because it's used by preBIBSnet and postBIBSnet - -# run_FSL_sh_script(j_args, "flirt", "-applyxfm", -# "-ref", xfm_ref_img, "-in", dummy_copy, -# "-init", seg2native, "-o", nifti_file_paths[f"native-T{t}"], -# "-interp", "nearestneighbour") -# return nifti_file_paths[f"native-T{t}"] - def remove_extra_clusters_from_mask(path_to_mask, path_to_aseg = None): '''Function that removes smaller/unconnected clusters from brain mask From 1e89743d3ced021016fb43ee3391c8cc3814e209 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 15:03:35 -0500 Subject: [PATCH 05/24] updated Dockerfile to use zip file on s3 that unclides new model 540 that prevents chirality errors for T1+T2 processing and also removed data/chirality_masks folder --- Dockerfile | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index fa2b789..8e23fe6 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM nvcr.io/nvidia/pytorch:21.11-py3 # Manually update the BIBSnet version when building -ENV BIBSNET_VERSION="3.2.0" +ENV BIBSNET_VERSION="3.3.0" # Prepare environment RUN apt-get update && \ @@ -49,7 +49,7 @@ RUN apt-get update -qq \ && rm -rf /var/lib/apt/lists/* \ && echo "Downloading FSL ..." \ && mkdir -p /opt/fsl-6.0.5.1 \ - && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" \ + && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" \ | tar -xzpf - fsl-6.0.5.1-centos7_64.tar.gz -O | tar -xzpC /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 ENV FSLDIR="/opt/fsl-6.0.5.1" \ @@ -89,16 +89,17 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RESULTS_FOLDER="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models" RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data -#COPY trained_models/Task512_BCP_ABCD_Neonates_SynthSegDownsample.zip /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task552_uniform_distribution_synthseg.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task526_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 + +# RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task552_uniform_distribution_synthseg.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ +# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ +# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ +# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task526_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src COPY bin /home/bibsnet/bin -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt From 0af73a54f1d352de2890e71d4050f43fdab6c03d Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 17:25:57 -0500 Subject: [PATCH 06/24] updated Dockerfile to use different tar.gz file on s3 because the one I made keeps failing when it tries to find fsl tar.gz --- Dockerfile | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8e23fe6..4a2aa23 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM nvcr.io/nvidia/pytorch:21.11-py3 # Manually update the BIBSnet version when building -ENV BIBSNET_VERSION="3.3.0" +ENV BIBSNET_VERSION="3.3.1" # Prepare environment RUN apt-get update && \ @@ -49,7 +49,7 @@ RUN apt-get update -qq \ && rm -rf /var/lib/apt/lists/* \ && echo "Downloading FSL ..." \ && mkdir -p /opt/fsl-6.0.5.1 \ - && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" \ + && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ | tar -xzpf - fsl-6.0.5.1-centos7_64.tar.gz -O | tar -xzpC /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 ENV FSLDIR="/opt/fsl-6.0.5.1" \ @@ -89,17 +89,16 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RESULTS_FOLDER="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models" RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 - -# RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task552_uniform_distribution_synthseg.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ -# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ -# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ -# curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task526_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 +#COPY trained_models/Task512_BCP_ABCD_Neonates_SynthSegDownsample.zip /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src COPY bin /home/bibsnet/bin -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0-rmCC.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt @@ -110,4 +109,4 @@ RUN cp /home/bibsnet/run.py /home/bibsnet/bibsnet RUN cd /home/bibsnet/ && pip install -r requirements.txt RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet -ENTRYPOINT ["bibsnet"] +ENTRYPOINT ["bibsnet"] \ No newline at end of file From 12d4f1fd987dcc871ba5e0624f49b7f51ff4d1af Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 18:56:35 -0500 Subject: [PATCH 07/24] polished code and added more logging --- src/postbibsnet.py | 51 ++++++++++++++++------------------------------ 1 file changed, 18 insertions(+), 33 deletions(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index e90f2de..a759c9a 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -23,28 +23,19 @@ def run_postBIBSnet(j_args): :param j_args: Dictionary containing all args :return: j_args, unchanged """ + # Define variables and paths sub_ses = get_subj_ID_and_session(j_args) list_files(j_args["common"]["work_dir"]) - - LOGGER.info("Reverting corrected segmentation to native space") out_BIBSnet_seg = os.path.join(j_args["optional_out_dirs"]["bibsnet"], *sub_ses, "output", "{}_optimal_resized.nii.gz".format("_".join(sub_ses))) - for t in only_Ts_needed_for_bibsnet_model(j_args["ID"]): - # Get preBIBSNet working directories in order to reference average image files - preBIBSnet_paths = {"parent": os.path.join( - j_args["optional_out_dirs"]["prebibsnet"], *sub_ses - )} - preBIBSnet_paths["averaged"] = os.path.join( - preBIBSnet_paths["parent"], "averaged") - preBIBSnet_paths["avg"] = dict() - - # Generate derivatives folders to output final files to - bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], "bibsnet") - derivs_dir = os.path.join(bibsnet_derivs_dir, *sub_ses, "anat") - os.makedirs(derivs_dir, exist_ok=True) - - LOGGER.info("Now registering BIBSnet segmentation to native space to generate derivatives.") + # Generate derivatives folders to output final files to + LOGGER.info("Generating output derivatives folders") + bibsnet_derivs_dir = os.path.join(j_args["optional_out_dirs"]["derivatives"], "bibsnet") + derivs_dir = os.path.join(bibsnet_derivs_dir, *sub_ses, "anat") + os.makedirs(derivs_dir, exist_ok=True) + LOGGER.info("Now registering BIBSnet segmentation to native space to generate derivatives.") + for t in only_Ts_needed_for_bibsnet_model(j_args["ID"]): # Take inverse of .mat file from prebibsnet seg2native = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, f"seg_reg_to_T{t}w_native.mat") preBIBSnet_mat_glob = os.path.join(j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, @@ -54,27 +45,21 @@ def run_postBIBSnet(j_args): run_FSL_sh_script(j_args, "convert_xfm", "-omat", seg2native, "-inverse", preBIBSnet_mat) - # Apply inverse mat to aseg from bibsnet stage and write out to derivatives folder - preBIBSnet_paths["avg"][f"T{t}w_input"] = list() - for eachfile in glob(os.path.join(j_args["common"]["bids_dir"], - *sub_ses, "anat", - f"*T{t}w*.nii.gz")): - preBIBSnet_paths["avg"][f"T{t}w_input"].append(eachfile) - avg_img_name = "{}_000{}{}".format("_".join(sub_ses), t-1, ".nii.gz") - preBIBSnet_paths["avg"][f"T{t}w_avg"] = os.path.join( - preBIBSnet_paths["averaged"], avg_img_name - ) - - # Define path to aseg derivative output and revert to native space + # Revert segmentation to native space using average anatomical as reference image and write out to derivatives folder + av_filename="{}_000{}.nii.gz".format("_".join(sub_ses), t-1) + avg_anat = os.path.join(j_args["optional_out_dirs"]["prebibsnet"], *sub_ses, "averaged", av_filename) aseg=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) + run_FSL_sh_script(j_args, "flirt", "-applyxfm", - "-ref", preBIBSnet_paths["avg"][f"T{t}w_avg"], "-in", out_BIBSnet_seg, + "-ref", avg_anat, "-in", out_BIBSnet_seg, "-init", seg2native, "-o", aseg, "-interp", "nearestneighbour") + + LOGGER.info(f"BIBSNet segmentation has been trasnformed into native T{t} space") - LOGGER.info("Now generating segmentation-derived masks.") - mask=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) - make_asegderived_mask(j_args, aseg, t, mask) + # Generate brainmask from segmentation and write out to derivatives folder + mask_temp=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) + make_asegderived_mask(j_args, aseg, t, mask_temp) LOGGER.info(f"A mask of the BIBSnet T{t} segmentation has been produced") From 183d8bc5036f5a5be82048d5be1d7902b34a68c8 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 19:05:58 -0500 Subject: [PATCH 08/24] delete bin/LR_mask_registration.sh since it's no longer needed --- bin/LR_mask_registration.sh | 40 ------------------------------------- 1 file changed, 40 deletions(-) delete mode 100755 bin/LR_mask_registration.sh diff --git a/bin/LR_mask_registration.sh b/bin/LR_mask_registration.sh deleted file mode 100755 index 1be3f9b..0000000 --- a/bin/LR_mask_registration.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -set -e - -# This script generates a L/R mask for a subject. It first registers a template head (eg "1mo_T1w_acpc_dc_restore.nii.gz") to the subject's head. Next it applies -# the resulting transformation matrix ("antsregAffine.txt") using Nearest Neighbor interpolation to register the template LR mask (eg -# "1mo_template_LRmask.nii.gz") to the subject head. - -# Important notes: -# (1) Templates are age-specific, so make sure to select a template matching the age range of the subject. The 1mo template can be used for neonates. -# (2) If available, it is recommended that one use T2w subject/template anatomicals for subjects 0-21 months of age and T1w subject/template anatomicals for -# subjects over 21 months old. The contrast in infant T2w scans makes the pial surface more visible and increases the quality of registration. - -SubjectHead=$1;shift -TemplateHead=$1;shift -TemplateMask=$1;shift -OutputMaskFile=$1;shift - -#module load ants -WD=$(dirname $OutputMaskFile)/wd -if [ ! -d "$WD" ]; then - mkdir "$WD" -fi - -# TODO Skip ANTS if its outputs exist? - -# Register the template head to the subject head -ANTS 3 -m CC["$SubjectHead","$TemplateHead",1,5] -t SyN[0.25] -r Gauss[3,0] -o "$WD"/antsreg -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 - -echo "Finished running ANTS, now running antsApplyTransforms" - -# Apply resulting transformation to template L/R mask to generate subject L/R mask -antsApplyTransforms -d 3 \ - --output "$OutputMaskFile" \ - --reference-image "$SubjectHead" \ - --transform "$WD"/antsregWarp.nii.gz "$WD"/antsregAffine.txt \ - --input "$TemplateMask" \ - --interpolation NearestNeighbor - -#delete wd -# rm -r "$WD" From 46baccd264c7ab2c33b7ab82bec474296df5ff1b Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 19:35:02 -0500 Subject: [PATCH 09/24] updated Dockerfile to comment out references to bin directory --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4a2aa23..4f001cb 100755 --- a/Dockerfile +++ b/Dockerfile @@ -97,7 +97,7 @@ RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src -COPY bin /home/bibsnet/bin +# COPY bin /home/bibsnet/bin RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt @@ -107,6 +107,7 @@ ENV PATH="${PATH}:/home/bibsnet/" RUN cp /home/bibsnet/run.py /home/bibsnet/bibsnet RUN cd /home/bibsnet/ && pip install -r requirements.txt -RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet +# RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet +RUN cd /home/bibsnet/ && chmod 555 -R run.py src bibsnet ENTRYPOINT ["bibsnet"] \ No newline at end of file From 2e8755f27a149efabe24a645d0a20210c76ebfca Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Tue, 2 Jul 2024 20:02:52 -0500 Subject: [PATCH 10/24] reverted to 3.2.1 tar.gz file on s3 for now because there are issues porting in new model --- Dockerfile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4f001cb..be35dad 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM nvcr.io/nvidia/pytorch:21.11-py3 # Manually update the BIBSnet version when building -ENV BIBSNET_VERSION="3.3.1" +ENV BIBSNET_VERSION="3.2.0" # Prepare environment RUN apt-get update && \ @@ -49,7 +49,7 @@ RUN apt-get update -qq \ && rm -rf /var/lib/apt/lists/* \ && echo "Downloading FSL ..." \ && mkdir -p /opt/fsl-6.0.5.1 \ - && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ + && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" \ | tar -xzpf - fsl-6.0.5.1-centos7_64.tar.gz -O | tar -xzpC /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 ENV FSLDIR="/opt/fsl-6.0.5.1" \ @@ -90,15 +90,15 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data #COPY trained_models/Task512_BCP_ABCD_Neonates_SynthSegDownsample.zip /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 + # curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src # COPY bin /home/bibsnet/bin -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt From 6d810585f8e5bf3241c58d2498be725b300c37ae Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Wed, 3 Jul 2024 11:34:00 -0700 Subject: [PATCH 11/24] added additional flags to nnUNet_predict that may be needed in order to prevent mirroring when using updated models --- src/bibsnet.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/bibsnet.py b/src/bibsnet.py index 120ac88..3e13f80 100755 --- a/src/bibsnet.py +++ b/src/bibsnet.py @@ -87,7 +87,10 @@ def run_nnUNet_predict(cli_args): """ to_run = [cli_args["nnUNet"], "-i", cli_args["input"], "-o", cli_args["output"], "-t", - str(cli_args["task"]), "-m", cli_args["model"]] + str(cli_args["task"]), "-m", cli_args["model"], + "-tr nnUNetTrainerV2_noMirroring", + "--disable_tta" + ] LOGGER.verbose(f"Now running nnUNet with these parameters: {to_run}") process = subprocess.Popen(to_run, stdout=subprocess.PIPE, universal_newlines=True) with process.stdout: From e223c4608f847f8c46ebb1e11955db7db1dcf189 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Wed, 3 Jul 2024 20:53:14 -0500 Subject: [PATCH 12/24] fixed typo in logging --- src/postbibsnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index a759c9a..b63b13d 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -55,7 +55,7 @@ def run_postBIBSnet(j_args): "-init", seg2native, "-o", aseg, "-interp", "nearestneighbour") - LOGGER.info(f"BIBSNet segmentation has been trasnformed into native T{t} space") + LOGGER.info(f"BIBSNet segmentation has been transformed into native T{t} space") # Generate brainmask from segmentation and write out to derivatives folder mask_temp=os.path.join(derivs_dir, ("{}_space-T{}w_desc-{}.nii.gz".format("_".join(sub_ses), t, "aseg_dseg"))) From bd43523d81af4cc233928b34203392498cca34f2 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Wed, 3 Jul 2024 20:57:16 -0500 Subject: [PATCH 13/24] finalized flags needed for no mirroring with new nnUNet models --- src/bibsnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bibsnet.py b/src/bibsnet.py index 3e13f80..cbe8cf5 100755 --- a/src/bibsnet.py +++ b/src/bibsnet.py @@ -88,9 +88,9 @@ def run_nnUNet_predict(cli_args): to_run = [cli_args["nnUNet"], "-i", cli_args["input"], "-o", cli_args["output"], "-t", str(cli_args["task"]), "-m", cli_args["model"], - "-tr nnUNetTrainerV2_noMirroring", "--disable_tta" ] + LOGGER.verbose(f"Now running nnUNet with these parameters: {to_run}") process = subprocess.Popen(to_run, stdout=subprocess.PIPE, universal_newlines=True) with process.stdout: From 07010d68e0716f1324b8d39bab216f1c03ccc3c8 Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Wed, 3 Jul 2024 21:14:24 -0500 Subject: [PATCH 14/24] updated Dockerfile with correct version - will need further edits after T1- and T2-only models are updated --- Dockerfile | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index be35dad..54674cd 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM nvcr.io/nvidia/pytorch:21.11-py3 # Manually update the BIBSnet version when building -ENV BIBSNET_VERSION="3.2.0" +ENV BIBSNET_VERSION="3.3.0" # Prepare environment RUN apt-get update && \ @@ -49,7 +49,7 @@ RUN apt-get update -qq \ && rm -rf /var/lib/apt/lists/* \ && echo "Downloading FSL ..." \ && mkdir -p /opt/fsl-6.0.5.1 \ - && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" \ + && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ | tar -xzpf - fsl-6.0.5.1-centos7_64.tar.gz -O | tar -xzpC /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 ENV FSLDIR="/opt/fsl-6.0.5.1" \ @@ -90,15 +90,15 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data #COPY trained_models/Task512_BCP_ABCD_Neonates_SynthSegDownsample.zip /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 - # curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.1.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src -# COPY bin /home/bibsnet/bin -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.2.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 +COPY bin /home/bibsnet/bin +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt @@ -107,7 +107,6 @@ ENV PATH="${PATH}:/home/bibsnet/" RUN cp /home/bibsnet/run.py /home/bibsnet/bibsnet RUN cd /home/bibsnet/ && pip install -r requirements.txt -# RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet -RUN cd /home/bibsnet/ && chmod 555 -R run.py src bibsnet +RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet ENTRYPOINT ["bibsnet"] \ No newline at end of file From ef41cc53b9d9de4154f70e23ebfecb880ce650ac Mon Sep 17 00:00:00 2001 From: LuciMoore Date: Fri, 5 Jul 2024 12:07:36 -0500 Subject: [PATCH 15/24] updated Dockerfile w/ correct version, tar.gz file on s3, and reference to bin removed --- Dockerfile | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 54674cd..73ee42f 100755 --- a/Dockerfile +++ b/Dockerfile @@ -89,15 +89,11 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RESULTS_FOLDER="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models" RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data -#COPY trained_models/Task512_BCP_ABCD_Neonates_SynthSegDownsample.zip /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task526_previous_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task514_BCP_ABCD_Neonates_SynthSeg_T1Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 &&\ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task515_BCP_ABCD_Neonates_SynthSeg_T2Only.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 + +RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src -COPY bin /home/bibsnet/bin RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 COPY requirements.txt /home/bibsnet/requirements.txt @@ -107,6 +103,6 @@ ENV PATH="${PATH}:/home/bibsnet/" RUN cp /home/bibsnet/run.py /home/bibsnet/bibsnet RUN cd /home/bibsnet/ && pip install -r requirements.txt -RUN cd /home/bibsnet/ && chmod 555 -R run.py bin src bibsnet +RUN cd /home/bibsnet/ && chmod 555 -R run.py src bibsnet ENTRYPOINT ["bibsnet"] \ No newline at end of file From 3c603b5abee9f392068d16ad46c21675a88fa8f2 Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Tue, 9 Jul 2024 11:24:38 -0500 Subject: [PATCH 16/24] Update Dockerfile utilizing wget instead of curl for task540 tar --- Dockerfile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 73ee42f..61f8130 100755 --- a/Dockerfile +++ b/Dockerfile @@ -90,11 +90,17 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - Task540_BIBSNet_Production_Model.tar.gz -O | tar -xzpC /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --no-same-owner --strip-components 1 +RUN wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ + tar -xzf bibsnet-v3.3.0.tar.gz Task540_BIBSNet_Production_Model.tar.gz && \ + tar -xzf Task540_BIBSNet_Production_Model.tar.gz -C /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz Task540_BIBSNet_Production_Model.tar.gz COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src -RUN curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" | tar -xzpf - data.tar.gz -O | tar -xzpC /home/bibsnet/data --no-same-owner --strip-components 1 +RUN wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ + tar -xzf bibsnet-v3.3.0.tar.gz data.tar.gz && \ + tar -xzf data.tar.gz -C /home/bibsnet/data --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz data.tar.gz COPY requirements.txt /home/bibsnet/requirements.txt @@ -105,4 +111,4 @@ RUN cp /home/bibsnet/run.py /home/bibsnet/bibsnet RUN cd /home/bibsnet/ && pip install -r requirements.txt RUN cd /home/bibsnet/ && chmod 555 -R run.py src bibsnet -ENTRYPOINT ["bibsnet"] \ No newline at end of file +ENTRYPOINT ["bibsnet"] From 7903eac9f67a9ca64fc61cc966db3b9afece089d Mon Sep 17 00:00:00 2001 From: Timothy Hendrickson Date: Tue, 30 Jul 2024 15:45:21 -0500 Subject: [PATCH 17/24] including make_per_region_volume_from_segmentation function --- requirements.txt | 4 +- src/postbibsnet.py | 95 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 97 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9a7553d..4cdd795 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ nibabel==3.2.1 nipype==1.8.6 pandas==1.3.5 -scipy==1.10.0 -niworkflows==1.9.0 \ No newline at end of file +scipy==0.9.3 +niworkflows==1.9.0 diff --git a/src/postbibsnet.py b/src/postbibsnet.py index b63b13d..116556d 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -6,6 +6,7 @@ import numpy as np import json from scipy import ndimage +import csv from src.logger import LOGGER @@ -71,6 +72,14 @@ def run_postBIBSnet(j_args): generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, "aseg_dseg") generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, "brain_mask") + # make per region volumes from segmentation + make_per_region_volume_from_segmentation(path_to_aseg=aseg, + derivs_dir=derivs_dir, + sub_ses=sub_ses, + t=t, + desc='aseg_volumes') + + # Copy dataset_description.json into bibsnet_derivs_dir directory for use in nibabies list_files(j_args["common"]["work_dir"]) new_data_desc_json = os.path.join(bibsnet_derivs_dir, "dataset_description.json") @@ -162,7 +171,93 @@ def generate_sidecar_json(sub_ses, reference_path, derivs_dir, t, desc): with open(file_path, "w+") as file: json.dump(sidecar, file, indent = 4) +def make_per_region_volume_from_segmentation(path_to_aseg,derivs_dir,sub_ses,t,desc): + """ + Author: Tim Hendrickson + + Produces volumes (in mm^3) for each segmentated structure within the aseg and + produces a BIDS derivative compliant TSV file within the derivative folder + + Parameters + ---------- + path_to_aseg : str + The file path to the anatomical segmentation (aseg) file. + derivs_dir : str + The directory where derivative files are stored. + sub_ses : str + The subject and session identifier, typically in the format 'sub-XX_ses-YY'. + t : float + The anatomical image type, typically '1' for T1-weighted or '2' for T2-weighted + desc : str + A string representing the description of the output. + + Returns + ------- + Does not return a value. Generates TSV file within derivs_dir + + """ + segmentation_lookup_table = os.path.join(SCRIPT_DIR, "data", "look_up_tables", + "Freesurfer_LUT_DCAN.txt") + free_surfer_label_to_region = get_id_to_region_mapping(segmentation_lookup_table) + + # load aseg into nibabel + aseg_img = nib.load(path_to_aseg) + aseg_data = aseg_img.get_fdata() + + # get voxel dimensions (mm) and volume of single voxel (mm^3) + voxel_dims = aseg_img.header.get_zooms() + voxel_volume = np.prod(voxel_dims) + + # get unique labels from aseg + unique_labels = np.unique(aseg_data) + + region_volumes={} + for label in unique_labels: + try: + # not all values are labelled within lookup table, particularly 0 + label_name = free_surfer_label_to_region[label] + except: + "label {} is not in lookup table".format(label) + continue + else: + voxel_count = np.sum(aseg_data==label) + volume = voxel_count * voxel_volume + region_volumes[label_name] = volume + + # write out region names and values to TSV file in BIDS derivative format + tsvFileName='{deriv_dir}/{ID}_space-T{image_type}w_desc-{desc}.tsv'.format(deriv_dir=derivs_dir,ID=sub_ses,image_type=t,desc=desc) + with open(tsvFileName,'w') as tsvfile: + tsv_writer = csv.writer(tsvfile,delimiter='\t') + tsv_writer.writerow(list(region_volumes.keys())) # write header + tsv_writer.writerow(list(region_volumes.values())) # write out values + +def get_id_to_region_mapping(mapping_file_name, separator=None): + """ + Author: Paul Reiners + Create a map from region ID to region name from a from a FreeSurfer-style + look-up table. This function parses a FreeSurfer-style look-up table. It + then returns a map that maps region IDs to their names. + :param mapping_file_name: String, the name or path to the look-up table + :param separator: String delimiter separating parts of look-up table lines + :return: Dictionary, a map from the ID of a region to its name + """ + with open(mapping_file_name, 'r') as infile: + lines = infile.readlines() + + id_to_region = {} + for line in lines: + line = line.strip() + if line.startswith('#') or line == '': + continue + if separator: + parts = line.split(separator) + else: + parts = line.split() + region_id = int(parts[0]) + region = parts[1] + id_to_region[region_id] = region + return id_to_region def remove_extra_clusters_from_mask(path_to_mask, path_to_aseg = None): '''Function that removes smaller/unconnected clusters from brain mask From 6da91d495cf989bfe4cb6d552e4758f56d042f2a Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Mon, 5 Aug 2024 09:58:33 -0500 Subject: [PATCH 18/24] Update requirements.txt changed back scipy version to avoid docker build errors --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4cdd795..cef6ab5 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ nibabel==3.2.1 nipype==1.8.6 pandas==1.3.5 -scipy==0.9.3 +scipy==1.10.0 niworkflows==1.9.0 From 846b1d6b2e732c5e8a3f304d219c334723543d5f Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Tue, 6 Aug 2024 12:42:50 -0500 Subject: [PATCH 19/24] Update Dockerfile loading in ANTs from tier2 destination --- Dockerfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 61f8130..5f1b485 100755 --- a/Dockerfile +++ b/Dockerfile @@ -66,9 +66,11 @@ ENV FSLDIR="/opt/fsl-6.0.5.1" \ # Installing ANTs 2.3.3 (NeuroDocker build) # Note: the URL says 2.3.4 but it is actually 2.3.3 -RUN mkdir -p /opt/ants \ - && curl -sSL --retry 5 "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \ - | tar -xzC /opt/ants --strip-components 1 +# TESTING: installing ANTS from tier2 tar.gz +RUN echo "Downloading ANTs ..." && \ + mkdir -p /opt/ants && \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \ + | tar -xzpf - ants-Linux-centos6_x86_64-v2.3.4.tar.gz -O | tar -xzpC /opt/ants --no-same-owner --strip-components 1 # Create a shared $HOME directory RUN useradd -m -s /bin/bash -G users -u 1000 bibsnet From a12fc0457604b8133da9bb37eab8e12e2c2f3d7e Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Tue, 6 Aug 2024 12:55:57 -0500 Subject: [PATCH 20/24] Update Dockerfile fixing source dir of ANTs tar on tier2 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 5f1b485..3a2c120 100755 --- a/Dockerfile +++ b/Dockerfile @@ -69,7 +69,7 @@ ENV FSLDIR="/opt/fsl-6.0.5.1" \ # TESTING: installing ANTS from tier2 tar.gz RUN echo "Downloading ANTs ..." && \ mkdir -p /opt/ants && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \ + curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ | tar -xzpf - ants-Linux-centos6_x86_64-v2.3.4.tar.gz -O | tar -xzpC /opt/ants --no-same-owner --strip-components 1 # Create a shared $HOME directory From 4e99b755675eb60f7101866087940868a5ab49a7 Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Wed, 7 Aug 2024 10:23:20 -0500 Subject: [PATCH 21/24] Update Dockerfile updating to get fsl and ants with wget --- Dockerfile | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3a2c120..13cdb62 100755 --- a/Dockerfile +++ b/Dockerfile @@ -49,8 +49,10 @@ RUN apt-get update -qq \ && rm -rf /var/lib/apt/lists/* \ && echo "Downloading FSL ..." \ && mkdir -p /opt/fsl-6.0.5.1 \ - && curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ - | tar -xzpf - fsl-6.0.5.1-centos7_64.tar.gz -O | tar -xzpC /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 + && wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ + && tar -xzf bibsnet-v3.3.0.tar.gz fsl-6.0.5.1-centos7_64.tar.gz \ + && tar -xzf fsl-6.0.5.1-centos7_64.tar.gz -C /opt/fsl-6.0.5.1 --no-same-owner --strip-components 1 \ + && rm bibsnet-v3.3.0.tar.gz fsl-6.0.5.1-centos7_64.tar.gz ENV FSLDIR="/opt/fsl-6.0.5.1" \ PATH="/opt/afni-latest:/opt/ants:/opt/fsl-6.0.5.1/bin:$PATH" \ @@ -69,8 +71,10 @@ ENV FSLDIR="/opt/fsl-6.0.5.1" \ # TESTING: installing ANTS from tier2 tar.gz RUN echo "Downloading ANTs ..." && \ mkdir -p /opt/ants && \ - curl -sSL "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" \ - | tar -xzpf - ants-Linux-centos6_x86_64-v2.3.4.tar.gz -O | tar -xzpC /opt/ants --no-same-owner --strip-components 1 + wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ + tar -xzf bibsnet-v3.3.0.tar.gz ants-Linux-centos6_x86_64-v2.3.4.tar.gz && \ + tar -xzf ants-Linux-centos6_x86_64-v2.3.4.tar.gz -C /opt/ants --no-same-owner --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz ants-Linux-centos6_x86_64-v2.3.4.tar.gz # Create a shared $HOME directory RUN useradd -m -s /bin/bash -G users -u 1000 bibsnet From bebdd4714cbec870cbb50e8e879e55f10323380e Mon Sep 17 00:00:00 2001 From: Timothy Hendrickson Date: Fri, 9 Aug 2024 10:45:24 -0500 Subject: [PATCH 22/24] modifying how sub_ses is referred to within 'make_per_region_volume_from_segmentation' --- src/postbibsnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index 116556d..e664154 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -75,7 +75,7 @@ def run_postBIBSnet(j_args): # make per region volumes from segmentation make_per_region_volume_from_segmentation(path_to_aseg=aseg, derivs_dir=derivs_dir, - sub_ses=sub_ses, + sub_ses=j_args(*sub_ses), t=t, desc='aseg_volumes') From b66efcde9064cb0b802ce2bd83467bbbc23e2298 Mon Sep 17 00:00:00 2001 From: Timothy Hendrickson Date: Wed, 14 Aug 2024 12:19:17 -0500 Subject: [PATCH 23/24] modified argumments to 'make_per_region_volume_from_segmentation' function. --- src/postbibsnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/postbibsnet.py b/src/postbibsnet.py index e664154..912ccce 100755 --- a/src/postbibsnet.py +++ b/src/postbibsnet.py @@ -75,7 +75,7 @@ def run_postBIBSnet(j_args): # make per region volumes from segmentation make_per_region_volume_from_segmentation(path_to_aseg=aseg, derivs_dir=derivs_dir, - sub_ses=j_args(*sub_ses), + sub_ses='_'.join(sub_ses), t=t, desc='aseg_volumes') From b784e3031c98386292e24448b17da7b7fa147737 Mon Sep 17 00:00:00 2001 From: lundq163 <102316699+lundq163@users.noreply.github.com> Date: Wed, 14 Aug 2024 14:43:11 -0500 Subject: [PATCH 24/24] Update Dockerfile including additional production models --- Dockerfile | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 13cdb62..f135509 100755 --- a/Dockerfile +++ b/Dockerfile @@ -97,9 +97,19 @@ ENV nnUNet_preprocessed="/opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed" \ RUN mkdir -p /opt/nnUNet/nnUNet_raw_data_base/ /opt/nnUNet/nnUNet_raw_data_base/nnUNet_preprocessed /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet /home/bibsnet/data RUN wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ - tar -xzf bibsnet-v3.3.0.tar.gz Task540_BIBSNet_Production_Model.tar.gz && \ - tar -xzf Task540_BIBSNet_Production_Model.tar.gz -C /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --strip-components 1 && \ - rm bibsnet-v3.3.0.tar.gz Task540_BIBSNet_Production_Model.tar.gz + tar -xzf bibsnet-v3.3.0.tar.gz Task540_BIBSnet_Production_T1T2_model.tar.gz && \ + tar -xzf Task540_BIBSnet_Production_T1T2_model.tar.gz -C /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz Task540_BIBSnet_Production_T1T2_model.tar.gz + +RUN wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ + tar -xzf bibsnet-v3.3.0.tar.gz Task541_BIBSnet_Production_T1only_model.tar.gz && \ + tar -xzf Task541_BIBSnet_Production_T1only_model.tar.gz -C /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz Task541_BIBSnet_Production_T1only_model.tar.gz + +RUN wget -O bibsnet-v3.3.0.tar.gz "https://s3.msi.umn.edu/bibsnet-data/bibsnet-v3.3.0.tar.gz" && \ + tar -xzf bibsnet-v3.3.0.tar.gz Task542_BIBSnet_Production_T2only_model.tar.gz && \ + tar -xzf Task542_BIBSnet_Production_T2only_model.tar.gz -C /opt/nnUNet/nnUNet_raw_data_base/nnUNet_trained_models/nnUNet --strip-components 1 && \ + rm bibsnet-v3.3.0.tar.gz Task542_BIBSnet_Production_T2only_model.tar.gz COPY run.py /home/bibsnet/run.py COPY src /home/bibsnet/src