forked from MIC-DKFZ/nnUNet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
98 lines (93 loc) · 4.17 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
[project]
name = "nnunetv2"
version = "2.5.1"
requires-python = ">=3.9"
description = "nnU-Net is a framework for out-of-the box image segmentation."
readme = "readme.md"
license = { file = "LICENSE" }
authors = [
{ name = "Fabian Isensee", email = "[email protected]"},
{ name = "Helmholtz Imaging Applied Computer Vision Lab" }
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Intended Audience :: Healthcare Industry",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Image Recognition",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
]
keywords = [
'deep learning',
'image segmentation',
'semantic segmentation',
'medical image analysis',
'medical image segmentation',
'nnU-Net',
'nnunet'
]
dependencies = [
"torch>=2.1.2",
"acvl-utils>=0.2,<0.3", # 0.3 may bring breaking changes. Careful!
"dynamic-network-architectures>=0.3.1,<0.4", # 0.3.1 and lower are supported, 0.4 may have breaking changes. Let's be careful here
"tqdm",
"dicom2nifti",
"scipy",
"batchgenerators>=0.25",
"numpy>=1.24",
"scikit-learn",
"scikit-image>=0.19.3",
"SimpleITK>=2.2.1",
"pandas",
"graphviz",
'tifffile',
'requests',
"nibabel",
"matplotlib",
"seaborn",
"imagecodecs",
"yacs",
"batchgeneratorsv2>=0.2",
"einops"
]
[project.urls]
homepage = "https://github.com/MIC-DKFZ/nnUNet"
repository = "https://github.com/MIC-DKFZ/nnUNet"
[project.scripts]
nnUNetv2_plan_and_preprocess = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_and_preprocess_entry"
nnUNetv2_extract_fingerprint = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:extract_fingerprint_entry"
nnUNetv2_plan_experiment = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_experiment_entry"
nnUNetv2_preprocess = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:preprocess_entry"
nnUNetv2_train = "nnunetv2.run.run_training:run_training_entry"
nnUNetv2_predict_from_modelfolder = "nnunetv2.inference.predict_from_raw_data:predict_entry_point_modelfolder"
nnUNetv2_predict = "nnunetv2.inference.predict_from_raw_data:predict_entry_point"
nnUNetv2_convert_old_nnUNet_dataset = "nnunetv2.dataset_conversion.convert_raw_dataset_from_old_nnunet_format:convert_entry_point"
nnUNetv2_find_best_configuration = "nnunetv2.evaluation.find_best_configuration:find_best_configuration_entry_point"
nnUNetv2_determine_postprocessing = "nnunetv2.postprocessing.remove_connected_components:entry_point_determine_postprocessing_folder"
nnUNetv2_apply_postprocessing = "nnunetv2.postprocessing.remove_connected_components:entry_point_apply_postprocessing"
nnUNetv2_ensemble = "nnunetv2.ensembling.ensemble:entry_point_ensemble_folders"
nnUNetv2_accumulate_crossval_results = "nnunetv2.evaluation.find_best_configuration:accumulate_crossval_results_entry_point"
nnUNetv2_plot_overlay_pngs = "nnunetv2.utilities.overlay_plots:entry_point_generate_overlay"
nnUNetv2_download_pretrained_model_by_url = "nnunetv2.model_sharing.entry_points:download_by_url"
nnUNetv2_install_pretrained_model_from_zip = "nnunetv2.model_sharing.entry_points:install_from_zip_entry_point"
nnUNetv2_export_model_to_zip = "nnunetv2.model_sharing.entry_points:export_pretrained_model_entry"
nnUNetv2_move_plans_between_datasets = "nnunetv2.experiment_planning.plans_for_pretraining.move_plans_between_datasets:entry_point_move_plans_between_datasets"
nnUNetv2_evaluate_folder = "nnunetv2.evaluation.evaluate_predictions:evaluate_folder_entry_point"
nnUNetv2_evaluate_simple = "nnunetv2.evaluation.evaluate_predictions:evaluate_simple_entry_point"
nnUNetv2_convert_MSD_dataset = "nnunetv2.dataset_conversion.convert_MSD_dataset:entry_point"
[project.optional-dependencies]
dev = [
"black",
"ruff",
"pre-commit"
]
[build-system]
requires = ["setuptools>=67.8.0"]
build-backend = "setuptools.build_meta"
[tool.codespell]
skip = '.git,*.pdf,*.svg'
#
# ignore-words-list = ''