diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 44b2d6d..e8a7020 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,10 +13,11 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # os: [ubuntu-latest] - # python-version: ["3.12"] os: [ubuntu-latest, macos-latest, macos-14] - python-version: ["3.12", "3.11"] + python-version: ["3.12", "3.11", "3.10"] + # include: + # - os: ubuntu-latest + # python-version: "3.9" steps: - uses: actions/checkout@v3 @@ -122,12 +123,6 @@ jobs: with: fetch-depth: 0 - - name: Install poetry - uses: snok/install-poetry@v1 - - - name: Install package - run: poetry install - # This action uses Python Semantic Release v8 # What this action does: # - Determines the next version number based on the commit history @@ -140,154 +135,97 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Test Outputs of semantic release step - run: | - echo "${{ steps.release.outputs.released }}" - echo "${{ steps.release.outputs.version }}" - echo "${{ steps.release.outputs.tag }}" - - - name: Install packaging-related tool - run: - python3 -m pip install build twine - - # Build Package step: - # After semantic release, we should have a new tag if the commit history - # has been updated. If there isnt a new tag, then we dont need to build - # a new package. If there is a new tag, then we need to build a new package - # and publish it to PyPI - - name: Build package - if: steps.release.outputs.released == 'true' - run: | - poetry version ${{ steps.release.outputs.version }} - python -m build --sdist --wheel --outdir dist/ . - - - name: Publish package distributions to PyPI - if: steps.release.outputs.released == 'true' - uses: pypa/gh-action-pypi-publish@release/v1 - with: - verbose: true - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - - - name: Set up QEMU - if: steps.release.outputs.released == 'true' - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - if: steps.release.outputs.released == 'true' - uses: docker/setup-buildx-action@v3 - - - name: Login to Docker Hub - if: steps.release.outputs.released == 'true' - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to the GitHub Container Registry - if: steps.release.outputs.released == 'true' - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v3 - with: - images: | - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ steps.release.outputs.tag }} - ghcr.io/${{ github.repository }}/nbiatoolkit:${{ steps.release.outputs.tag }} - - - name: Build - if: steps.release.outputs.released == 'true' - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - file: ./Dockerfile - push: true - tags: | - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ steps.release.outputs.tag }} - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:latest - ghcr.io/${{ github.repository }}/nbiatoolkit:${{ steps.release.outputs.tag }} - ghcr.io/${{ github.repository }}/nbiatoolkit:latest - labels: ${{ steps.meta.outputs.labels }} - - test_install_withPyPi: + Publish-To-PyPi: needs: Continuous-Deployment - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest, macos-latest, macos-14] - python-version: ["3.12", "3.11", "3.10"] - + if: needs.Continuous-Deployment.outputs.released == 'true' + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - name: Checkout code + uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python-version }} + python-version: 3.12 - - name: Install using PyPi - run: | - pip install nbiatoolkit; - NBIAToolkit + - name: Install Package-Building Tool + run: + python3 -m pip install build twine + - name: Build package + run: + python3 -m build --sdist --wheel --outdir dist/ - test_image_with_new_tag: + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + verbose: true + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + + Build-Docker-Images: needs: Continuous-Deployment + if: needs.Continuous-Deployment.outputs.released == 'true' runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v3 - - name: Test Image With new Tag - run: | - # test image with latest tag - docker run --rm \ - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ needs.Continuous-Deployment.outputs.tag }} \ - NBIAToolkit + - name: Set up QEMU + if: steps.release.outputs.released == 'true' + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + if: steps.release.outputs.released == 'true' + uses: docker/setup-buildx-action@v3 - test_image_with_latest_tag: - needs: Continuous-Deployment - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Test Image with "latest" Tag - run: | - docker run --rm \ - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:latest \ - NBIAToolkit + - name: Login to Docker Hub + if: steps.release.outputs.released == 'true' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - Update-README: - needs: Continuous-Deployment - runs-on: ubuntu-latest - # if: jobs.Continuous-Deployment.outputs.released == 'true' - steps: - - name: Checkout code - uses: actions/checkout@v3 + - name: Login to the GitHub Container Registry + if: steps.release.outputs.released == 'true' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - - name: Get Current branch - run: | - echo "Current branch is: ${{ github.ref }}" - echo "Current branch is: ${{ github.head_ref }}" - echo "Current branch is: ${{ github.base_ref }}" - echo "Current branch is: ${{ github.event_name }}" + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v3 + with: + images: | + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ needs.Continuous-Deployment.outputs.tag }} + ghcr.io/${{ github.repository }}/nbiatoolkit:${{ needs.Continuous-Deployment.outputs.tag }} - # if main, then git pull main - if [ "${{ github.ref }}" == "refs/heads/main" ]; then - git pull origin main - fi + - name: Build and push Docker images + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + file: ./Dockerfile + push: true + tags: | + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ needs.Continuous-Deployment.outputs.tag }} + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:latest + ghcr.io/${{ github.repository }}/nbiatoolkit:${{ needs.Continuous-Deployment.outputs.tag }} + ghcr.io/${{ github.repository }}/nbiatoolkit:latest + labels: ${{ steps.meta.outputs.labels }} + + Test-PyPi-Install: + needs: Publish-To-PyPi + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, macos-14] + python-version: ["3.12", "3.11", "3.10"] - # fix diverged branch - git fetch origin ${{ github.head_ref }} - git checkout ${{ github.head_ref }} + steps: + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 @@ -296,57 +234,21 @@ jobs: - name: Install using PyPi run: | - # update pip - pip install . + pip install nbiatoolkit; NBIAToolkit - - name: Update README code block - run: | - awk '/``` bash NBIAToolkit-Output/ { - print "``` bash NBIAToolkit-Output"; - print "> NBIAToolkit --version"; - system("NBIAToolkit --version"); - f=1; - next - } f && /```/ { - print "```"; - f=0; - next - } !f' README.md > temp && mv temp README.md - - - name: Commit and push changes + Test-Docker-Image: + needs: Build-Docker-Images + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + docker_tag: ["latest", "${{ needs.Continuous-Deployment.outputs.tag }}"] + + steps: + - uses: actions/checkout@v3 + + - name: Install using Docker run: | - LATEST_TAG=$(curl -s \ - "https://api.github.com/repos/${{ github.repository }}/releases/latest" \ - | jq -r .tag_name | sed 's/^v//') - echo "LATEST_TAG=${LATEST_TAG}" - # Check for changes - if [[ $(git status --porcelain) ]]; then - # Changes are present - echo "Changes found. Committing and pushing..." - - git config --global user.name 'jjjermiah' - git config --global user.email 'jjjermiah@users.noreply.github.com' - - # Add all changes - git add . - - # Commit with a timestamp - git commit -m "chore: Update README: $LATEST_TAG" - - # Push changes to the remote repository - # if github.head_ref is not null - # then push to the branch - # else push to the base branch - if [ -n "${{ github.head_ref }}" ]; then - git push origin HEAD:${{ github.head_ref }} - else - git push origin HEAD:${{ github.ref }} - fi - - - echo "Changes committed and pushed successfully." - else - # No changes - echo "No changes found. Nothing to commit or push." - fi + docker pull ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ matrix.docker_tag }}; + docker run --rm ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ matrix.docker_tag }} NBIAToolkit diff --git a/README.md b/README.md index b38b701..d4a2c73 100644 --- a/README.md +++ b/README.md @@ -16,11 +16,11 @@ ![GitHub repo size](https://img.shields.io/github/repo-size/jjjermiah/nbia-toolkit) [![Docker Pulls](https://img.shields.io/docker/pulls/jjjermiah/nbiatoolkit)](https://hub.docker.com/r/jjjermiah/nbiatoolkit) -![GitHub milestone details](https://img.shields.io/github/milestones/progress-percent/jjjermiah/nbia-toolkit/1?style=flat-square&label=1.0.0%20Stable%20Release%20Milestone&link=https%3A%2F%2Fgithub.com%2Fjjjermiah%2Fnbia-toolkit%2Fmilestone%2F1)![GitHub milestone details](https://img.shields.io/github/milestones/progress/jjjermiah/nbia-toolkit/1?style=flat-square&label=%20&link=https%3A%2F%2Fgithub.com%2Fjjjermiah%2Fnbia-toolkit%2Fmilestone%2F1) [![GitHub issues](https://img.shields.io/github/issues/jjjermiah/nbia-toolkit)](https://github.com/jjjermiah/nbia-toolkit/issues) ![GitHub last commit](https://img.shields.io/github/last-commit/jjjermiah/nbia-toolkit) ## Table of Contents + - [Features](#features) - [Installation](#installation) - [Python Usage](#python-usage) @@ -43,7 +43,7 @@ - ***Validate doownloads with MD5 checksums*** for downloaded images - **Auto-sort** DICOM files using a user-defined pattern of DICOM tags with specialized ***DICOMSorter class*** -![SequenceDiagram](https://www.mermaidchart.com/raw/ce7f489f-bf58-4827-aedb-e379ed7bffd3?theme=dark&version=v0.1&format=svg) +See [Developer Notes](devnotes/README.md) for more details on the features and the development process. ## Installation @@ -93,10 +93,10 @@ For quick access to the NBIA, the toolkit also provides a command line interface / |/ / __ |/ // /| | / / / __ \/ __ \/ / //_/ / __/ / /| / /_/ // // ___ |/ / / /_/ / /_/ / / ,< / / /_ /_/ |_/_____/___/_/ |_/_/ \____/\____/_/_/|_/_/\__/ - + Version: 1.0.1 -Available CLI tools: +Available CLI tools: getCollections [-h] [-u USERNAME] [-pw PASSWORD] [-p PREFIX] [-o OUTPUTFILE] [--version] diff --git a/devnotes/CI-CD_2024-03-17.md b/devnotes/CI-CD_2024-03-17.md new file mode 100644 index 0000000..c9ee4ce --- /dev/null +++ b/devnotes/CI-CD_2024-03-17.md @@ -0,0 +1,14 @@ +# Continuous Integration and Continuous Deployment Diagram + +```mermaid +graph LR + A[Push or Pull Request\nto main] --> B[Unit-Tests] + B --> C[Codecov] + B --> D[Continuous-Deployment] + D --> E[Publish-To-PyPi] + D --> F[Build-Docker-Images] + E --> G[Test-PyPi-Install] + F --> H[Test-Docker-Image] + A --> I[Build-Documentation] + +``` diff --git a/devnotes/README.md b/devnotes/README.md new file mode 100644 index 0000000..8bf1b16 --- /dev/null +++ b/devnotes/README.md @@ -0,0 +1,4 @@ +# Developer Notes + +[CI-CD](CI-CD_2024-03-17.md) +[Sequence Diagram](Sequence-diagram_2024-03-17.md) diff --git a/devnotes/class.svg b/devnotes/class.svg new file mode 100644 index 0000000..205beca --- /dev/null +++ b/devnotes/class.svg @@ -0,0 +1,151 @@ + + + + + + +classes_nbiatoolkit + + + +nbiatoolkit.dicomsort.dicomsort.DICOMSorter + +DICOMSorter + +destinationDir : str +sanitizeFilename : bool +sourceDir : str +targetPattern : str +truncateUID : bool + +sortDICOMFiles(shutil_option: str, overwrite: bool, n_parallel: int, progressbar): bool + + + +nbiatoolkit.utils.md5.MD5HashMismatchError + +MD5HashMismatchError + + + + + + +nbiatoolkit.nbia.NBIAClient + +NBIAClient + +OAuth_client +base_url +headers +logger +return_type + +downloadSeries(SeriesInstanceUID: Union[str, list], downloadDir: str, filePattern: str, overwrite: bool, nParallel: int, Progressbar: bool): bool +getBodyPartCounts(Collection: str, Modality: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getCollectionDescriptions(collectionName: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getCollectionPatientCount(prefix: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getCollections(prefix: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getDICOMTags(SeriesInstanceUID: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getModalityValues(Collection: str, BodyPartExamined: str, Counts: bool, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getNewPatients(Collection: str, Date: Union[str, datetime], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getNewSeries(Date: Union[str, datetime], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getPatients(Collection: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getPatientsByCollectionAndModality(Collection: str, Modality: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getSeries(Collection: str, PatientID: str, StudyInstanceUID: str, Modality: str, SeriesInstanceUID: str, BodyPartExamined: str, ManufacturerModelName: str, Manufacturer: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getSeriesMetadata(SeriesInstanceUID: Union[str, list[str]], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +getStudies(Collection: str, PatientID: str, StudyInstanceUID: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] | pd.DataFrame +parsePARAMS(params: dict): dict +query_api(endpoint: NBIA_ENDPOINTS, params: dict): List[dict[Any, Any]] + + + +nbiatoolkit.utils.nbia_endpoints.NBIA_BASE_URLS + +NBIA_BASE_URLS + +name + + + + + +nbiatoolkit.utils.nbia_endpoints.NBIA_BASE_URLS->nbiatoolkit.nbia.NBIAClient + + +_base_url + + + +nbiatoolkit.utils.nbia_endpoints.NBIA_ENDPOINTS + +NBIA_ENDPOINTS + +name + + + + + +nbiatoolkit.auth.OAuth2 + +OAuth2 + +access_token +api_headers +base_url : NBIA, str +client_id : str +expiry_time : NoneType +fernet_key +password : str +refresh_expiration_time +refresh_expiry : NoneType +refresh_token : str +scope : NoneType +token_expiration_time +token_scope +username : str + +is_logged_out(): bool +is_token_expired(): bool +logout(): None +refresh_token_or_request_new(): None +request_new_access_token() +set_token_data(token_data: dict) + + + +nbiatoolkit.auth.OAuth2->nbiatoolkit.nbia.NBIAClient + + +_oauth2_client + + + +nbiatoolkit.utils.nbia_endpoints.ReturnType + +ReturnType + +name + + + + + +nbiatoolkit.utils.nbia_endpoints.ReturnType->nbiatoolkit.nbia.NBIAClient + + +_return_type + + + +nbiatoolkit.utils.nbia_endpoints.ReturnType->nbiatoolkit.nbia.NBIAClient + + +_return_type + + + diff --git a/devnotes/classes_nbiatoolkit.dot b/devnotes/classes_nbiatoolkit.dot new file mode 100644 index 0000000..36e8385 --- /dev/null +++ b/devnotes/classes_nbiatoolkit.dot @@ -0,0 +1,12 @@ +digraph "classes_nbiatoolkit" { +rankdir=BT +charset="utf-8" +"nbiatoolkit.nbia.NBIAClient" [color="#77AADD", fontcolor="black", label=<{NBIAClient|OAuth_client
base_url
headers
logger
return_type
|downloadSeries(SeriesInstanceUID: Union[str, list], downloadDir: str, filePattern: str, overwrite: bool, nParallel: int, Progressbar: bool): bool
getBodyPartCounts(Collection: str, Modality: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getCollectionDescriptions(collectionName: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getCollectionPatientCount(prefix: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getCollections(prefix: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getDICOMTags(SeriesInstanceUID: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getModalityValues(Collection: str, BodyPartExamined: str, Counts: bool, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getNewPatients(Collection: str, Date: Union[str, datetime], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getNewSeries(Date: Union[str, datetime], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getPatients(Collection: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getPatientsByCollectionAndModality(Collection: str, Modality: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getSeries(Collection: str, PatientID: str, StudyInstanceUID: str, Modality: str, SeriesInstanceUID: str, BodyPartExamined: str, ManufacturerModelName: str, Manufacturer: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getSeriesMetadata(SeriesInstanceUID: Union[str, list[str]], return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
getStudies(Collection: str, PatientID: str, StudyInstanceUID: str, return_type: Optional[Union[ReturnType, str]]): List[dict[Any, Any]] \| pd.DataFrame
parsePARAMS(params: dict): dict
query_api(endpoint: NBIA_ENDPOINTS, params: dict): List[dict[Any, Any]]
}>, shape="record", style="filled"]; +"nbiatoolkit.utils.nbia_endpoints.NBIA_BASE_URLS" [color="#BBCC33", fontcolor="black", label=<{NBIA_BASE_URLS|name
|}>, shape="record", style="filled"]; +"nbiatoolkit.auth.OAuth2" [color="#77AADD", fontcolor="black", label=<{OAuth2|access_token
api_headers
base_url : NBIA, str
client_id : str
expiry_time : NoneType
fernet_key
password : str
refresh_expiration_time
refresh_expiry : NoneType
refresh_token : str
scope : NoneType
token_expiration_time
token_scope
username : str
|is_logged_out(): bool
is_token_expired(): bool
logout(): None
refresh_token_or_request_new(): None
request_new_access_token()
set_token_data(token_data: dict)
}>, shape="record", style="filled"]; +"nbiatoolkit.utils.nbia_endpoints.ReturnType" [color="#BBCC33", fontcolor="black", label=<{ReturnType|name
|}>, shape="record", style="filled"]; +"nbiatoolkit.auth.OAuth2" -> "nbiatoolkit.nbia.NBIAClient" [arrowhead="diamond", arrowtail="none", fontcolor="green", label="_oauth2_client", style="solid"]; +"nbiatoolkit.utils.nbia_endpoints.NBIA_BASE_URLS" -> "nbiatoolkit.nbia.NBIAClient" [arrowhead="diamond", arrowtail="none", fontcolor="green", label="_base_url", style="solid"]; +"nbiatoolkit.utils.nbia_endpoints.ReturnType" -> "nbiatoolkit.nbia.NBIAClient" [arrowhead="diamond", arrowtail="none", fontcolor="green", label="_return_type", style="solid"]; +"nbiatoolkit.utils.nbia_endpoints.ReturnType" -> "nbiatoolkit.nbia.NBIAClient" [arrowhead="diamond", arrowtail="none", fontcolor="green", label="_return_type", style="solid"]; +} diff --git a/devnotes/packages.svg b/devnotes/packages.svg new file mode 100644 index 0000000..776fef9 --- /dev/null +++ b/devnotes/packages.svg @@ -0,0 +1,229 @@ + + + + + + +packages_nbiatoolkit + + + +nbiatoolkit + +nbiatoolkit + + + +nbiatoolkit.auth + +nbiatoolkit.auth + + + +nbiatoolkit->nbiatoolkit.auth + + + + + +nbiatoolkit.logger.logger + +nbiatoolkit.logger.logger + + + +nbiatoolkit->nbiatoolkit.logger.logger + + + + + +nbiatoolkit.nbia + +nbiatoolkit.nbia + + + +nbiatoolkit->nbiatoolkit.nbia + + + + + +nbiatoolkit.nbia_cli + +nbiatoolkit.nbia_cli + + + +nbiatoolkit->nbiatoolkit.nbia_cli + + + + + +nbiatoolkit.utils.nbia_endpoints + +nbiatoolkit.utils.nbia_endpoints + + + +nbiatoolkit->nbiatoolkit.utils.nbia_endpoints + + + + + +nbiatoolkit.utils + +nbiatoolkit.utils + + + +nbiatoolkit.auth->nbiatoolkit.utils + + + + + +nbiatoolkit.dicomsort + +nbiatoolkit.dicomsort + + + +nbiatoolkit.dicomsort->nbiatoolkit.dicomsort + + + + + +nbiatoolkit.dicomsort.helper_functions + +nbiatoolkit.dicomsort.helper_functions + + + +nbiatoolkit.dicomsort->nbiatoolkit.dicomsort.helper_functions + + + + + +nbiatoolkit.dicomsort.dicomsort + +nbiatoolkit.dicomsort.dicomsort + + + +nbiatoolkit.dicomsort.dicomsort->nbiatoolkit.dicomsort.helper_functions + + + + + +nbiatoolkit.logger + +nbiatoolkit.logger + + + +nbiatoolkit.logger->nbiatoolkit.logger + + + + + +nbiatoolkit.nbia->nbiatoolkit.auth + + + + + +nbiatoolkit.nbia->nbiatoolkit.dicomsort + + + + + +nbiatoolkit.nbia->nbiatoolkit.logger.logger + + + + + +nbiatoolkit.nbia->nbiatoolkit.utils + + + + + +nbiatoolkit.utils.conv_response_list + +nbiatoolkit.utils.conv_response_list + + + +nbiatoolkit.nbia->nbiatoolkit.utils.conv_response_list + + + + + +nbiatoolkit.nbia_cli->nbiatoolkit.dicomsort + + + + + +nbiatoolkit.nbia_cli->nbiatoolkit.nbia + + + + + +nbiatoolkit.utils->nbiatoolkit.utils.conv_response_list + + + + + +nbiatoolkit.utils.md5 + +nbiatoolkit.utils.md5 + + + +nbiatoolkit.utils->nbiatoolkit.utils.md5 + + + + + +nbiatoolkit.utils->nbiatoolkit.utils.nbia_endpoints + + + + + +nbiatoolkit.utils.parsers + +nbiatoolkit.utils.parsers + + + +nbiatoolkit.utils->nbiatoolkit.utils.parsers + + + + + +nbiatoolkit.utils.conv_response_list->nbiatoolkit.utils.nbia_endpoints + + + + + diff --git a/devnotes/packages_nbiatoolkit.dot b/devnotes/packages_nbiatoolkit.dot new file mode 100644 index 0000000..f358676 --- /dev/null +++ b/devnotes/packages_nbiatoolkit.dot @@ -0,0 +1,40 @@ +digraph "packages_nbiatoolkit" { +rankdir=BT +charset="utf-8" +"nbiatoolkit" [color="#77AADD", label=, shape="box", style="filled"]; +"nbiatoolkit.auth" [color="#77AADD", label=, shape="box", style="filled"]; +"nbiatoolkit.dicomsort" [color="#99DDFF", label=, shape="box", style="filled"]; +"nbiatoolkit.dicomsort.dicomsort" [color="#99DDFF", label=, shape="box", style="filled"]; +"nbiatoolkit.dicomsort.helper_functions" [color="#99DDFF", label=, shape="box", style="filled"]; +"nbiatoolkit.logger" [color="#44BB99", label=, shape="box", style="filled"]; +"nbiatoolkit.logger.logger" [color="#44BB99", label=, shape="box", style="filled"]; +"nbiatoolkit.nbia" [color="#77AADD", label=, shape="box", style="filled"]; +"nbiatoolkit.nbia_cli" [color="#77AADD", label=, shape="box", style="filled"]; +"nbiatoolkit.utils" [color="#BBCC33", label=, shape="box", style="filled"]; +"nbiatoolkit.utils.conv_response_list" [color="#BBCC33", label=, shape="box", style="filled"]; +"nbiatoolkit.utils.md5" [color="#BBCC33", label=, shape="box", style="filled"]; +"nbiatoolkit.utils.nbia_endpoints" [color="#BBCC33", label=, shape="box", style="filled"]; +"nbiatoolkit.utils.parsers" [color="#BBCC33", label=, shape="box", style="filled"]; +"nbiatoolkit" -> "nbiatoolkit.auth" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit" -> "nbiatoolkit.logger.logger" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit" -> "nbiatoolkit.nbia" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit" -> "nbiatoolkit.nbia_cli" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit" -> "nbiatoolkit.utils.nbia_endpoints" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.auth" -> "nbiatoolkit.utils" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.dicomsort" -> "nbiatoolkit.dicomsort" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.dicomsort" -> "nbiatoolkit.dicomsort.helper_functions" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.dicomsort.dicomsort" -> "nbiatoolkit.dicomsort.helper_functions" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.logger" -> "nbiatoolkit.logger" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia" -> "nbiatoolkit.auth" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia" -> "nbiatoolkit.dicomsort" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia" -> "nbiatoolkit.logger.logger" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia" -> "nbiatoolkit.utils" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia" -> "nbiatoolkit.utils.conv_response_list" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia_cli" -> "nbiatoolkit.dicomsort" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.nbia_cli" -> "nbiatoolkit.nbia" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.utils" -> "nbiatoolkit.utils.conv_response_list" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.utils" -> "nbiatoolkit.utils.md5" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.utils" -> "nbiatoolkit.utils.nbia_endpoints" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.utils" -> "nbiatoolkit.utils.parsers" [arrowhead="open", arrowtail="none"]; +"nbiatoolkit.utils.conv_response_list" -> "nbiatoolkit.utils.nbia_endpoints" [arrowhead="open", arrowtail="none"]; +} diff --git a/devnotes/sequence-diagram_2024-03-17.md b/devnotes/sequence-diagram_2024-03-17.md new file mode 100644 index 0000000..c284411 --- /dev/null +++ b/devnotes/sequence-diagram_2024-03-17.md @@ -0,0 +1,26 @@ +# Sequence Diagram +## Date: 2024-03-17 + +```mermaid +sequenceDiagram + participant User + participant NBIAClient + participant OAuth2 + participant NBIA_API + User->>NBIAClient: Initialize NBIAClient(username, password) + NBIAClient->>OAuth2: Initialize OAuth2Client(username, password, client_id, base_url) + OAuth2->>NBIA_API: POST request to TOKEN_URL + NBIA_API-->>OAuth2: Return access_token, refresh_token, expires_in + OAuth2-->>NBIAClient: Return OAuth2 instance + User->>NBIAClient: getCollections() + NBIAClient->>OAuth2: Access access_token + OAuth2->>OAuth2: Check if token is expired + OAuth2-->>NBIAClient: Return access_token + NBIAClient->>NBIA_API: GET request to COLLECTIONS_URL with access_token in headers + NBIA_API-->>NBIAClient: Return collections data + User->>NBIAClient: logout() + NBIAClient->>OAuth2: logout() + OAuth2->>NBIA_API: GET request to LOGOUT_URL + NBIA_API-->>OAuth2: Response + OAuth2->>OAuth2: Clear all properties +``` diff --git a/devnotes/updateREADME.sh b/devnotes/updateREADME.sh new file mode 100644 index 0000000..a71b493 --- /dev/null +++ b/devnotes/updateREADME.sh @@ -0,0 +1,17 @@ +# This script updates the version information in the README.md file. +# It uses awk to search for a code block starting with "```bash" in the README.md file. +# When it finds the code block, it prints "```bash" and executes the command "NBIAToolkit --version" using the system function. +# After executing the command, it prints "```" to close the code block. +# The script then redirects the modified content to a temporary file named "temp" and renames it back to README.md. +# This ensures that the version information is updated in the README.md file. +awk '/``` bash NBIAToolkit-Output/ { + print "``` bash NBIAToolkit-Output"; + print "> NBIAToolkit --version"; + system("NBIAToolkit --version"); + f=1; + next +} f && /```/ { + print "```"; + f=0; + next +} !f' README.md > temp && mv temp README.md diff --git a/poetry.lock b/poetry.lock index 43fa731..245b1d7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -964,6 +964,9 @@ files = [ {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] @@ -1014,13 +1017,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio [[package]] name = "ipython" -version = "8.21.0" +version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.10" +python-versions = ">=3.9" files = [ - {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, - {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, ] [package.dependencies] @@ -1034,19 +1037,20 @@ prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "ipywidgets" @@ -1246,6 +1250,7 @@ files = [ ] [package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" @@ -1337,6 +1342,7 @@ files = [ ] [package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-server = ">=1.1.2" [[package]] @@ -1407,6 +1413,7 @@ files = [ [package.dependencies] async-lru = ">=1.0.0" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} ipykernel = "*" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1449,6 +1456,7 @@ files = [ [package.dependencies] babel = ">=2.10" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" jsonschema = ">=4.18.0" @@ -1483,6 +1491,9 @@ files = [ {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] @@ -1661,6 +1672,7 @@ files = [ click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" markdown = ">=3.2.1" markupsafe = ">=2.0.1" @@ -1767,6 +1779,7 @@ files = [ beautifulsoup4 = "*" bleach = "!=5.0.0" defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" @@ -3134,6 +3147,7 @@ babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} docutils = ">=0.18.1,<0.21" imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" Pygments = ">=2.14" @@ -3755,5 +3769,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = ">=3.10 || 3.12" -content-hash = "08a55ea078d191fe9148c91630c8792ddf3b032223713050ac6c06d5b342e40f" +python-versions = ">=3.09 || 3.12" +content-hash = "d53786b72809319147f84cd80957abcc0235c847ba90cfe64bf1cf847115088b" diff --git a/pyproject.toml b/pyproject.toml index baf670c..3304de1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ getNewSeries = "nbiatoolkit.nbia_cli:getNewSeries_cli" [tool.poetry.dependencies] -python = ">=3.10 || 3.12" +python = ">=3.09 || 3.12" requests = "2.31.0" pydicom = "^2.4.0" tqdm = "^4.66.1" diff --git a/src/nbiatoolkit/dicomtags/tags.py b/src/nbiatoolkit/dicomtags/tags.py new file mode 100644 index 0000000..41adb5d --- /dev/null +++ b/src/nbiatoolkit/dicomtags/tags.py @@ -0,0 +1,222 @@ +from pydicom.datadict import dictionary_VR +from pydicom.datadict import tag_for_keyword +from pydicom._dicom_dict import DicomDictionary +import pandas as pd +from typing import Any, Union, List + + +def convert_element_to_int(element_str: str) -> int: + """ + Converts a DICOM element string representation to an integer. + + Args: + element_str (str): The DICOM element string representation. + + Returns: + int: The converted integer value. + + Examples: + >>> convert_element_to_int('(0028,0010)') + 2621456 + + Raises: + ValueError: If the element format is invalid. + + """ + if element_str.startswith(">"): + return -1 + + elements: list[str] = element_str.strip("()'").split(",") + + # Check if the element has the correct structure + if len(elements) != 2: + raise ValueError( + f"Invalid element format. Element must have the structure '(,)': {element_str}" + ) + + # Convert each element from string to integer + elements_int: list[int] + elements_int = [int(elem, base=16) for elem in elements] + + # Combine the integers into a single integer + combined_int: int = (elements_int[0] << 16) + elements_int[1] + + return combined_int + + +def convert_int_to_element(combined_int: int) -> str: + """ + Converts a combined integer into a DICOM element string representation. + + Args: + combined_int (int): The combined integer to be converted. + + Returns: + str: The DICOM element string representation of the combined integer. + + Examples: + >>> convert_int_to_element(0x00080060) + (0008,0060) + >>> convert_int_to_element(524384) + (0008,0060) + """ + assert isinstance(combined_int, int), "combined_int must be an integer." + + if combined_int == -1: + return "Unknown" + + # Split the integer into two parts + # i.e 131073 should become (2,1) + part1: int = combined_int >> 16 # shift right by 16 bits + part2: int = combined_int & 0xFFFF # bitwise AND with 0xFFFF (16 bits) + + # Convert the integers to hex strings + part1_str: str = hex(part1)[2:] + part2_str: str = hex(part2)[2:] + + # (2,1) should become (0002,0001) + part1_str = part1_str.zfill(4) + part2_str = part2_str.zfill(4) + + # capitalize any lowercase letters + part1_str = part1_str.upper() + part2_str = part2_str.upper() + + # Combine the hex strings into a single string + combined_str: str = f"({part1_str},{part2_str})" + + return combined_str + + +def LOOKUP_TAG(keyword: str) -> int: + """ + Looks up a DICOM tag based on the keyword. A wrapper around the pydicom's `tag_for_keyword` function. + """ + tag: int | None = tag_for_keyword(keyword=keyword) + if tag is None: + raise (ValueError(f"Tag not found for keyword: {keyword}")) + return tag + + +def element_VR_lookup(element_str: str) -> tuple[int, str]: + """ + Looks up the VR (Value Representation) for a given DICOM element. + + Args: + element_str (str): The DICOM element as a string. + + Returns: + tuple[int, str]: A tuple containing the combined integer representation of the element and its VR. + """ + + combined_int: int = convert_element_to_int(element_str=element_str) + if combined_int == -1: + return (-1, "Unknown") + + VR: str + try: + VR = dictionary_VR(tag=combined_int) + except KeyError: + VR = "Unknown,KeyError" + + return (combined_int, VR) + + +def getSeriesModality(series_tags_df: pd.DataFrame) -> str: + """ + Retrieves the modality of a DICOM series. + + Args: + series_tags_df (pd.DataFrame): A DataFrame containing DICOM series tags. + + Returns: + str: The modality of the DICOM series. + + Raises: + ValueError: If the modality tag is not found in the DICOM dictionary. + """ + modality_tag: int | None + modality_tag = LOOKUP_TAG(keyword="Modality") + + if modality_tag is None: + raise ValueError("Modality tag not found in the DICOM dictionary.") + + modality_tag_element: str = convert_int_to_element(combined_int=modality_tag) + + modality_row: pd.DataFrame = series_tags_df[ + series_tags_df["element"] == modality_tag_element + ] + modality: str = modality_row["data"].values[0] + + return modality + + +def subsetSeriesTags(series_tags_df: pd.DataFrame, element: str) -> pd.DataFrame: + """ + Subsets a DataFrame containing DICOM series tags based on the start and end elements. + """ + + locs: pd.DataFrame + locs = series_tags_df[series_tags_df["element"].str.contains(element)] + + if len(locs) == 0: + raise ValueError("Element not found in the series tags.") + + if len(locs) > 2: + raise ValueError("More than two elements found in the series tags.") + + return series_tags_df.iloc[locs.index[0] : locs.index[1]] + + +def getReferencedFrameOfReferenceSequence(series_tags_df: pd.DataFrame) -> pd.DataFrame: + modality = getSeriesModality(series_tags_df=series_tags_df) + if modality != "RTSTRUCT": + raise ValueError("Series is not an RTSTRUCT.") + + tag: int = LOOKUP_TAG(keyword="ReferencedFrameOfReferenceSequence") + + ReferencedFrameOfReferenceSequence_element: str = convert_int_to_element( + combined_int=tag + ) + + df: pd.DataFrame = subsetSeriesTags( + series_tags_df=series_tags_df, + element=ReferencedFrameOfReferenceSequence_element, + ) + + return df + + +def getReferencedSeriesUIDS(series_tags_df: pd.DataFrame) -> List[str]: + """ + Given a DataFrame containing DICOM series tags, retrieves the SeriesInstanceUIDs of the referenced series. + Useful for RTSTRUCT DICOM files to find the series that the RTSTRUCT references. + TODO:: implement SEG and RTDOSE + + Args: + series_tags_df (pd.DataFrame): A DataFrame containing DICOM series tags. + + Returns: + List[str]: A list of SeriesInstanceUIDs of the referenced series. + + Raises: + ValueError: If the series is not an RTSTRUCT. + """ + + # "SeriesInstanceUID" ---LOOKUP_TAG--> 2097166 ---convert_int_to_element--> (0020,000E) + SeriesInstanceUIDtag: int = LOOKUP_TAG(keyword="SeriesInstanceUID") + SeriesInstanceUID_element: str = convert_int_to_element( + combined_int=SeriesInstanceUIDtag + ) + + search_space: pd.DataFrame = getReferencedFrameOfReferenceSequence( + series_tags_df=series_tags_df + ) + + value: pd.DataFrame = search_space[ + search_space["element"].str.contains(SeriesInstanceUID_element) + ] + + UIDS: list[str] = value["data"].to_list() + + return UIDS diff --git a/src/nbiatoolkit/nbia.py b/src/nbiatoolkit/nbia.py index 324a77a..7da54ed 100644 --- a/src/nbiatoolkit/nbia.py +++ b/src/nbiatoolkit/nbia.py @@ -146,11 +146,6 @@ def __init__( self._log.debug("Setting up OAuth2 client... with username %s", username) self._oauth2_client = OAuth2(username=username, password=password) - self._api_headers: dict[str, str] = { - "Authorization": f"Bearer {self._oauth2_client.access_token}", - "Content-Type": "application/json", - } - self._base_url: NBIA_BASE_URLS = NBIA_BASE_URLS.NBIA self._return_type: ReturnType = ( return_type @@ -170,7 +165,13 @@ def OAuth_client(self) -> OAuth2: @property def headers(self): - return self._api_headers + + API_HEADERS: dict[str, str] = { + "Authorization": f"Bearer {self.OAuth_client.access_token}", + "Content-Type": "application/json", + } + + return API_HEADERS # create a setter for the base_url in case user want to use NLST @property @@ -639,7 +640,7 @@ def downloadSeries( downloadDir, filePattern, overwrite, - self._api_headers, + self.headers, self._base_url, self._log, Progressbar, diff --git a/tests/test_tags.py b/tests/test_tags.py new file mode 100644 index 0000000..b0c6be8 --- /dev/null +++ b/tests/test_tags.py @@ -0,0 +1,156 @@ +import pytest +from src.nbiatoolkit import NBIAClient +from src.nbiatoolkit.dicomtags.tags import convert_int_to_element +from src.nbiatoolkit.dicomtags.tags import convert_element_to_int +from src.nbiatoolkit.dicomtags.tags import LOOKUP_TAG +from src.nbiatoolkit.dicomtags.tags import * + + +def test_convert_int_to_element(): + # Test case 1: combined_int = 0x00080060 + result = convert_int_to_element(0x00080060) + assert result == "(0008,0060)", f"Expected (0008,0060), but got {result}" + + # Test case 2: combined_int = 524384 + result = convert_int_to_element(524384) + assert result == "(0008,0060)", f"Expected (0008,0060), but got {result}" + + # Test case 3: combined_int = -1 + result = convert_int_to_element(-1) + assert result == "Unknown", f"Expected Unknown, but got {result}" + + # Test case 4: combined_int = 0 + result = convert_int_to_element(0) + assert result == "(0000,0000)", f"Expected (0000,0000), but got {result}" + + # Test case 5: combined_int = 65535 + result = convert_int_to_element(65535) + assert result == "(0000,FFFF)", f"Expected (0000,FFFF), but got {result}" + + with pytest.raises(ValueError) as e: + convert_element_to_int("(1,1,1)") + + print("All test cases passed!") + + +def test_convert_element_to_int(): + # Test case 1: element_str = '(0028,0010)' + result = convert_element_to_int("(0028,0010)") + assert result == 2621456, f"Expected 2621456, but got {result}" + + # Test case 2: element_str = '(0008,0060)' + result = convert_element_to_int("(0008,0060)") + assert result == 524384, f"Expected 524384, but got {result}" + + # Test case 3: element_str = '(0000,0000)' + result = convert_element_to_int("(0000,0000)") + assert result == 0, f"Expected 0, but got {result}" + + # Test case 4: element_str = '(0000,FFFF)' + result = convert_element_to_int("(0000,FFFF)") + assert result == 65535, f"Expected 65535, but got {result}" + + # Test case 5: element_str = '>Unknown' + result = convert_element_to_int(">Unknown") + assert result == -1, f"Expected -1, but got {result}" + + print("All test cases passed!") + + +def test_LOOKUP_TAG(): + # Test case 1: keyword = "PatientName" + result = LOOKUP_TAG("PatientName") + assert result == 0x00100010, f"Expected 0x00100010, but got {result}" + + # Test case 2: keyword = "StudyDate" + result = LOOKUP_TAG("StudyDate") + assert result == 0x00080020, f"Expected 0x00080020, but got {result}" + + # Test case 3: keyword = "UnknownKeyword" + try: + LOOKUP_TAG("UnknownKeyword") + except ValueError as e: + assert ( + str(e) == "Tag not found for keyword: UnknownKeyword" + ), f"Expected 'Tag not found for keyword: UnknownKeyword', but got {str(e)}" + else: + assert False, "Expected ValueError to be raised for unknown keyword" + + print("All test cases passed!") + + +def test_element_VR_lookup(): + # Test case 1: element_str = '(0028,0010)' + result = element_VR_lookup("(0028,0010)") + assert result == (2621456, "US"), f"Expected (2621456, 'US'), but got {result}" + + # Test case 2: element_str = '(0008,0060)' + result = element_VR_lookup("(0008,0060)") + assert result == (524384, "CS"), f"Expected (524384, 'CS'), but got {result}" + + # Test case 3: element_str = '(0000,0000)' + result = element_VR_lookup("(0000,0000)") + assert result == (0, "UL"), f"Expected (0, 'UL'), but got {result}" + + # Test case 4: element_str = '(0000,FFFF)' + result = element_VR_lookup("(0000,FFFF)") + assert result == ( + 65535, + "Unknown,KeyError", + ), f"Expected (65535, 'Unknown,KeyError'), but got {result}" + + # Test case 5: element_str = '>Unknown' + result = element_VR_lookup(">Unknown") + assert result == (-1, "Unknown"), f"Expected (-1, 'Unknown'), but got {result}" + + print("All test cases passed!") + + +@pytest.fixture(scope="session") +def client(): + return NBIAClient(return_type="dataframe") + + +@pytest.fixture(scope="session") +def series(client): + return client.getSeries(Collection="Pediatric-CT-SEG", Modality="RTSTRUCT") + + +@pytest.fixture(scope="session") +def RTSTRUCT_Series(series): + return series[series["Modality"] == "RTSTRUCT"] + + +@pytest.fixture(scope="session") +def RTSTRUCT_Tags(client, RTSTRUCT_Series): + seriesUID = RTSTRUCT_Series["SeriesInstanceUID"].values[0] + assert seriesUID is not None, "Expected seriesUID to be returned, but got None" + + tags_df = client.getDICOMTags(seriesUID) + assert tags_df is not None, "Expected tags to be returned, but got None" + + return tags_df + + +def test_getDICOMTags(RTSTRUCT_Tags): + seriesUIDS = getReferencedSeriesUIDS(RTSTRUCT_Tags) + expected = ["1.3.6.1.4.1.14519.5.2.1.133742245714270925254982946723351496764"] + assert seriesUIDS == expected, f"Expected {expected}, but got {seriesUIDS}" + + +def test_getSeriesModality(RTSTRUCT_Tags): + modality = getSeriesModality(RTSTRUCT_Tags) + assert modality == "RTSTRUCT", f"Expected 'RTSTRUCT', but got {modality}" + + # Test case 2: modality tag not found + RTSTRUCT_Tags.drop( + RTSTRUCT_Tags[RTSTRUCT_Tags["element"] == "(0008,0060)"].index, inplace=True + ) + + with pytest.raises(IndexError) as e: + getSeriesModality(RTSTRUCT_Tags) + + +def test_failsubsetSeriesTags(RTSTRUCT_Series): + with pytest.raises(KeyError) as e: + subsetSeriesTags(RTSTRUCT_Series, "(0008,0060)")