Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
sarthakpati authored Sep 25, 2023
2 parents aab8d1a + b230e02 commit 0427482
Show file tree
Hide file tree
Showing 6 changed files with 1,200 additions and 73 deletions.
26 changes: 14 additions & 12 deletions GANDLF/cli/generate_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,18 +253,20 @@ def __percentile_clip(input_tensor, reference_tensor=None, p_min=0.5, p_max=99.5
] = structural_similarity_index(gt_image_infill, output_infill, mask).item()

# ncc metrics
overall_stats_dict[current_subject_id]["ncc_mean"] = ncc_mean(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_std"] = ncc_std(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_max"] = ncc_max(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_min"] = ncc_min(
gt_image_infill, output_infill
)
compute_ncc = parameters.get("compute_ncc", True)
if compute_ncc:
overall_stats_dict[current_subject_id]["ncc_mean"] = ncc_mean(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_std"] = ncc_std(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_max"] = ncc_max(
gt_image_infill, output_infill
)
overall_stats_dict[current_subject_id]["ncc_min"] = ncc_min(
gt_image_infill, output_infill
)

# only voxels that are to be inferred (-> flat array)
# these are required for mse, psnr, etc.
Expand Down
3 changes: 1 addition & 2 deletions GANDLF/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@ def write_header(self, mode="train"):
if os.stat(self.filename).st_size == 0:
mode_lower = mode.lower()
row = "epoch_no," + mode_lower + "_loss,"
for metric in self.metrics:
row += mode_lower + "_" + metric + ","
row += ",".join([mode_lower + "_" + metric for metric in self.metrics]) + ","
row = row[:-1]
row += "\n"
self.csv.write(row)
Expand Down
3 changes: 1 addition & 2 deletions GANDLF/utils/write_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ def writeTrainingCSV(
channelsID_list = channelsID.split(",") # split into list

outputToWrite = "SubjectID,"
for i, n in enumerate(channelsID_list):
outputToWrite += "Channel_" + str(i) + ","
outputToWrite += ",".join(["Channel_" + str(i) for i, n in enumerate(channelsID_list)]) + ","
if labelID is not None:
outputToWrite += "Label"
outputToWrite += "\n"
Expand Down
84 changes: 27 additions & 57 deletions testing/test_full.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,15 @@ def sanitize_outputDir():
Path(outputDir).mkdir(parents=True, exist_ok=True)


def get_temp_config_path():
def write_temp_config_path(parameters_to_write):
print("02_2: Creating path for temporary config file")
temp_config_path = os.path.join(outputDir, "config_temp.yaml")
# if found in previous run, discard.
if os.path.exists(temp_config_path):
os.remove(temp_config_path)
if parameters_to_write is not None:
with open(temp_config_path, "w") as file:
yaml.dump(parameters_to_write, file)
return temp_config_path


Expand Down Expand Up @@ -482,9 +485,7 @@ def test_train_regression_brainage_rad_2d(device):
reset=True,
)

file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(parameters_temp, file)
# file_config_temp = write_temp_config_path(parameters_temp)
model_path = os.path.join(outputDir, "brain_age_best.pth.tar")
config_path = os.path.join(outputDir, "parameters.pkl")
optimization_result = post_training_model_optimization(model_path, config_path)
Expand Down Expand Up @@ -755,10 +756,7 @@ def test_train_inference_optimize_classification_rad_3d(device):
reset=True,
)

file_config_temp = get_temp_config_path()
parameters_temp["model"]["onnx_export"] = True
with open(file_config_temp, "w") as file:
yaml.dump(parameters_temp, file)
# file_config_temp = write_temp_config_path(parameters_temp)
model_path = os.path.join(outputDir, all_models_regression[0] + "_best.pth.tar")
config_path = os.path.join(outputDir, "parameters.pkl")
optimization_result = post_training_model_optimization(model_path, config_path)
Expand Down Expand Up @@ -964,10 +962,7 @@ def test_train_scheduler_classification_rad_2d(device):
parameters["nested_training"]["validation"] = -5
sanitize_outputDir()
## ensure parameters are parsed every single time
file_config_temp = get_temp_config_path()

with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)

parameters = parseConfig(file_config_temp, version_check_flag=False)
TrainingManager(
Expand Down Expand Up @@ -1090,9 +1085,7 @@ def test_train_normtype_segmentation_rad_3d(device):
# these should raise exceptions
for norm_type in ["none", None]:
parameters["model"]["norm_type"] = norm_type
file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)
with pytest.raises(Exception) as exc_info:
parameters = parseConfig(file_config_temp, version_check_flag=False)

Expand Down Expand Up @@ -1151,10 +1144,7 @@ def test_train_metrics_segmentation_rad_2d(device):
parameters["model"]["architecture"] = "resunet"
parameters["model"]["onnx_export"] = False
parameters["model"]["print_summary"] = False
file_config_temp = get_temp_config_path()

with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)

parameters = parseConfig(file_config_temp, version_check_flag=False)
training_data, parameters["headers"] = parseTrainingCSV(
Expand Down Expand Up @@ -1220,9 +1210,7 @@ def get_parameters_after_alteration(loss_type: str) -> dict:
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
parameters["loss_function"] = loss_type
file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)
# read and parse csv
parameters = parseConfig(file_config_temp, version_check_flag=True)
parameters["nested_training"]["testing"] = -5
Expand Down Expand Up @@ -1274,15 +1262,13 @@ def get_parameters_after_alteration(loss_type: str) -> dict:

def test_generic_config_read():
print("24: Starting testing reading configuration")
file_config_temp = get_temp_config_path()
parameters = parseConfig(
os.path.join(baseConfigDir, "config_all_options.yaml"),
version_check_flag=False,
)
parameters["data_preprocessing"]["resize_image"] = [128, 128]

with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)

# read and parse csv
parameters = parseConfig(file_config_temp, version_check_flag=True)
Expand Down Expand Up @@ -1361,7 +1347,6 @@ def test_generic_cli_function_preprocess():
print("25: Starting testing cli function preprocess")
file_config = os.path.join(testingDir, "config_segmentation.yaml")
sanitize_outputDir()
file_config_temp = get_temp_config_path()
file_data = os.path.join(inputDir, "train_2d_rad_segmentation.csv")

input_data_df, _ = parseTrainingCSV(file_data, train=False)
Expand Down Expand Up @@ -1391,9 +1376,7 @@ def test_generic_cli_function_preprocess():
parameters["data_preprocessing"]["to_canonical"] = None
parameters["data_preprocessing"]["rgba_to_rgb"] = None

# store this separately for preprocess testing
with open(file_config_temp, "w") as outfile:
yaml.dump(parameters, outfile, default_flow_style=False)
file_config_temp = write_temp_config_path(parameters)

preprocess_and_save(temp_csv, file_config_temp, outputDir)
training_data, parameters["headers"] = parseTrainingCSV(
Expand Down Expand Up @@ -1459,7 +1442,6 @@ def test_generic_cli_function_mainrun(device):
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
file_config_temp = get_temp_config_path()

parameters["modality"] = "rad"
parameters["patch_size"] = patch_size["2D"]
Expand All @@ -1475,8 +1457,7 @@ def test_generic_cli_function_mainrun(device):
]
parameters["model"]["architecture"] = "unet"

with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)

file_data = os.path.join(inputDir, "train_2d_rad_segmentation.csv")

Expand Down Expand Up @@ -2142,7 +2123,6 @@ def test_train_inference_segmentation_histology_2d(device):
Path(output_dir_patches).mkdir(parents=True, exist_ok=True)
output_dir_patches_output = os.path.join(output_dir_patches, "histo_patches_output")
Path(output_dir_patches_output).mkdir(parents=True, exist_ok=True)
file_config_temp = get_temp_config_path()

parameters_patch = {}
# extracting minimal number of patches to ensure that the test does not take too long
Expand All @@ -2151,8 +2131,7 @@ def test_train_inference_segmentation_histology_2d(device):
# define patches to be extracted in terms of microns
parameters_patch["patch_size"] = ["1000m", "1000m"]

with open(file_config_temp, "w") as file:
yaml.dump(parameters_patch, file)
file_config_temp = write_temp_config_path(parameters_patch)

patch_extraction(
inputDir + "/train_2d_histo_segmentation.csv",
Expand Down Expand Up @@ -2218,7 +2197,6 @@ def test_train_inference_classification_histology_large_2d(device):
Path(output_dir_patches).mkdir(parents=True, exist_ok=True)
output_dir_patches_output = os.path.join(output_dir_patches, "histo_patches_output")
Path(output_dir_patches_output).mkdir(parents=True, exist_ok=True)
file_config_temp = get_temp_config_path()

for sub in ["1", "2"]:
file_to_check = os.path.join(
Expand All @@ -2233,8 +2211,7 @@ def test_train_inference_classification_histology_large_2d(device):
parameters_patch["patch_size"] = [128, 128]
parameters_patch["value_map"] = {0: 0, 255: 255}

with open(file_config_temp, "w") as file:
yaml.dump(parameters_patch, file)
file_config_temp = write_temp_config_path(parameters_patch)

patch_extraction(
inputDir + "/train_2d_histo_classification.csv",
Expand Down Expand Up @@ -2315,9 +2292,7 @@ def resize_for_ci(filename, scale):
)
parameters["modality"] = "histo"
parameters["patch_size"] = parameters_patch["patch_size"][0]
file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)
parameters = parseConfig(file_config_temp, version_check_flag=False)
parameters["model"]["dimension"] = 2
# read and parse csv
Expand Down Expand Up @@ -2394,16 +2369,14 @@ def test_train_inference_classification_histology_2d(device):
shutil.rmtree(output_dir_patches)
Path(output_dir_patches).mkdir(parents=True, exist_ok=True)
output_dir_patches_output = os.path.join(output_dir_patches, "histo_patches_output")
file_config_temp = get_temp_config_path()

parameters_patch = {}
# extracting minimal number of patches to ensure that the test does not take too long
parameters_patch["patch_size"] = [128, 128]

for num_patches in [-1, 3]:
parameters_patch["num_patches"] = num_patches
with open(file_config_temp, "w") as file:
yaml.dump(parameters_patch, file)
file_config_temp = write_temp_config_path(parameters_patch)

if os.path.exists(output_dir_patches_output):
shutil.rmtree(output_dir_patches_output)
Expand All @@ -2426,9 +2399,7 @@ def test_train_inference_classification_histology_2d(device):
)
parameters["modality"] = "histo"
parameters["patch_size"] = 128
file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)
parameters = parseConfig(file_config_temp, version_check_flag=False)
parameters["model"]["dimension"] = 2
# read and parse csv
Expand Down Expand Up @@ -2846,9 +2817,7 @@ def test_generic_cli_function_configgenerator():
"key_1": {"key_2": "value"}
}

file_config_temp = get_temp_config_path()
with open(file_config_temp, "w") as file:
yaml.dump(generator_config, file)
file_config_temp = write_temp_config_path(generator_config)

# test for failure
with pytest.raises(Exception) as exc_info:
Expand Down Expand Up @@ -2892,7 +2861,7 @@ def test_generic_cli_function_recoverconfig():
resume=False,
reset=True,
)
output_config_path = get_temp_config_path()
output_config_path = write_temp_config_path(None)
assert recover_config(
outputDir, output_config_path
), "recover_config returned false"
Expand Down Expand Up @@ -2960,6 +2929,7 @@ def test_generic_deploy_docker():
if entrypoint_script:
msg += " with custom entrypoint script"
assert result, msg

sanitize_outputDir()

print("passed")
Expand All @@ -2970,7 +2940,6 @@ def test_collision_subjectid_test_segmentation_rad_2d(device):
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
file_config_temp = get_temp_config_path()

parameters["modality"] = "rad"
parameters["patch_size"] = patch_size["2D"]
Expand All @@ -2987,8 +2956,7 @@ def test_collision_subjectid_test_segmentation_rad_2d(device):
parameters["model"]["architecture"] = "unet"
outputDir = os.path.join(testingDir, "data_output")

with open(file_config_temp, "w") as file:
yaml.dump(parameters, file)
file_config_temp = write_temp_config_path(parameters)

# test the case where outputDir is explicitly provided to InferenceManager
train_data_path = inputDir + "/train_2d_rad_segmentation.csv"
Expand Down Expand Up @@ -3092,9 +3060,7 @@ def test_generic_cli_function_metrics_cli_rad_nd():

output_file = os.path.join(outputDir, "output.yaml")

temp_config = get_temp_config_path()
with open(temp_config, "w") as file:
yaml.dump(parameters, file)
temp_config = write_temp_config_path(parameters)

# run the metrics calculation
generate_metrics_dict(temp_infer_csv, temp_config, output_file)
Expand All @@ -3121,3 +3087,7 @@ def test_generic_deploy_metrics_docker():
sanitize_outputDir()

print("passed")

sanitize_outputDir()

print("passed")
Loading

0 comments on commit 0427482

Please sign in to comment.