Skip to content

Commit

Permalink
Merge pull request #11 from Australian-Imaging-Service/add-data-proje…
Browse files Browse the repository at this point in the history
…ct-check

added in new "user-training" data and correctly pulled dicom metadata
  • Loading branch information
tclose authored Apr 12, 2023
2 parents 6c4db15 + b3ff3bf commit 047a12e
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 18 deletions.
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@ __pycache__
.DS_Store
.coverage
.settings/
*.swp
*.swp
/build/
/dist/
.pytest_cache
/.history
.env
.vscode
*.egg-info
/.venv
*.venv
4 changes: 4 additions & 0 deletions scripts/add_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from xnat4tests.data import add_data


add_data("user-training")
102 changes: 86 additions & 16 deletions xnat4tests/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from .utils import logger


AVAILABLE_DATASETS = ["dummydicom"]
AVAILABLE_DATASETS = ["dummydicom", "user-training"]


@contextmanager
Expand Down Expand Up @@ -72,6 +72,72 @@ def add_data(dataset: str, config_name: str or dict = "default"):
session_id="dummydicomsession",
)

elif dataset == "user-training":

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="CONT01",
session_id="CONT01_MR01",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="CONT02",
session_id="CONT02_MR01",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="CONT01",
session_id="CONT01_MR02",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="CONT02",
session_id="CONT02_MR02",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="TEST01",
session_id="TEST01_MR01",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="TEST01",
session_id="TEST01_MR02",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="TEST02",
session_id="TEST02_MR01",
)

_upload_dicom_data(
[t1w_syngo(), fmap_syngo()],
config,
project_id="TRAINING",
subject_id="TEST02",
session_id="TEST02_MR02",
)

else:
raise RuntimeError(
f"Unrecognised dataset '{dataset}', can be one of {AVAILABLE_DATASETS}"
Expand Down Expand Up @@ -100,15 +166,12 @@ def _upload_dicom_data(
try:
login.get(project_uri)
except XNATResponseError:
pass
login.put(project_uri)
else:
logger.warning(
"%s project already exists in test XNAT, skipping add data step",
logger.debug(
"'%s' project already exists in test XNAT, skipping add data project",
project_id,
)
return

login.put(project_uri)

# Create subject
query = {
Expand All @@ -118,6 +181,18 @@ def _upload_dicom_data(
}
login.put(f"{project_uri}/subjects/{subject_id}", query=query)

try:
login.get(f"{project_uri}/subjects/{subject_id}/experiments/{session_id}")
except XNATResponseError:
pass
else:
logger.info(
"'%s' session in '%s' project already exists in test XNAT, skipping",
session_id,
project_id,
)
return

dicoms_dir = work_dir / "dicoms"
dicoms_dir.mkdir()

Expand Down Expand Up @@ -152,13 +227,8 @@ def _upload_dicom_data(
content_type="application/zip",
method="post",
)
experiment_id = login.projects[project_id].experiments[session_id].id
# Pull headers and create OHIF headers
# login.put(
# f"/data/experiments/{session_id}?pullDataFromHeaders=true"
# )
# login.put(
# f"/data/experiments/{session_id}?fixScanTypes=true"
# )
# login.put(
# f"/data/experiments/{session_id}?triggerPipelines=true"
# )
login.put(f"/data/experiments/{experiment_id}?pullDataFromHeaders=true")
login.put(f"/data/experiments/{experiment_id}?fixScanTypes=true")
login.put(f"/data/experiments/{experiment_id}?triggerPipelines=true")

0 comments on commit 047a12e

Please sign in to comment.