diff --git a/docs/generate.sh b/docs/generate.sh index a70952e41..fad20e0d4 100755 --- a/docs/generate.sh +++ b/docs/generate.sh @@ -17,10 +17,10 @@ fi export PDOC_CLASS_MODULES="$(poetry run python get_keyword_mapping.py)" if [ $# -eq 1 ] && [ "$1" = "--api" ]; then - poetry run pdoc ../webknossos/webknossos -t pdoc_templates/pure_pdoc -h 0.0.0.0 -p 8196 + poetry run pdoc ../webknossos/webknossos !webknossos.dataset._utils -t pdoc_templates/pure_pdoc -h 0.0.0.0 -p 8196 else rm -rf src/api - poetry run pdoc ../webknossos/webknossos -t pdoc_templates/with_mkdocs -o src/api + poetry run pdoc ../webknossos/webknossos !webknossos.dataset._utils -t pdoc_templates/with_mkdocs -o src/api # rename .html files to .md find src/api -iname "*.html" -exec sh -c 'mv "$0" "${0%.html}.md"' {} \; # assert that API docs are written diff --git a/webknossos/Changelog.md b/webknossos/Changelog.md index 035750ad3..a29746dfd 100644 --- a/webknossos/Changelog.md +++ b/webknossos/Changelog.md @@ -10,99 +10,115 @@ and this project adheres to [Semantic Versioning](http://semver.org/) `MAJOR.MIN For upgrade instructions, please check the respective _Breaking Changes_ sections. ## Unreleased + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.14.0...HEAD) ### Breaking Changes ### Added +- Adds support for Zarr3-based volume annotations as introduced in [webknossos#7288](https://github.com/scalableminds/webknossos/pull/7288). [#952](https://github.com/scalableminds/webknossos-libs/pull/952) + ### Changed -### Fixed +- The `WK_USE_ZARRITA` env variable is no longer required. [`zarrita`](https://github.com/scalableminds/zarrita) is always installed and now the default for Zarr and Zarr3 datasets. [#950](https://github.com/scalableminds/webknossos-libs/issues/950) +- Updates various dependencies. [#943](https://github.com/scalableminds/webknossos-libs/pull/943) +### Fixed ## [0.14.0](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.14.0) - 2023-10-11 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.7...v0.14.0) ### Breaking Changes + - `wait_and_ensure_success` from `webknossos.utils` now requires an `executor` argument. [#943](https://github.com/scalableminds/webknossos-libs/pull/943) ### Changed -- Updates various dependencies. [#943](https://github.com/scalableminds/webknossos-libs/pull/943) +- Updates various dependencies. [#943](https://github.com/scalableminds/webknossos-libs/pull/943) ## [0.13.7](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.7) - 2023-10-07 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.6...v0.13.7) ### Fixed -- Fixed a bug in writing compressed data. [#942](https://github.com/scalableminds/webknossos-libs/pull/942) - +- Fixed a bug in writing compressed data. [#942](https://github.com/scalableminds/webknossos-libs/pull/942) ## [0.13.6](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.6) - 2023-08-17 -[Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.5...v0.13.6) +[Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.5...v0.13.6) ## [0.13.5](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.5) - 2023-08-15 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.4...v0.13.5) ### Added + - Added `task_type` property to `Task` class. [#938](https://github.com/scalableminds/webknossos-libs/pull/938) ### Fixed -- Fixed a bug where parallel access to the properties json leads to an JsonDecodeError in the webknossos CLI [#919](https://github.com/scalableminds/webknossos-libs/issues/919) - +- Fixed a bug where parallel access to the properties json leads to an JsonDecodeError in the webknossos CLI [#919](https://github.com/scalableminds/webknossos-libs/issues/919) ## [0.13.4](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.4) - 2023-08-14 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.3...v0.13.4) ### Breaking Changes + - Task/Project management: `open` tasks have been renamed to `pending`. Use `Task.status.pending_instance_count` instead of `Task.status.open_instance_count`. [#930](https://github.com/scalableminds/webknossos-libs/pull/930) ### Fixed -- Fixed an infinite loop in the mag calculation during anisotropic downsampling in situations where the target mag cannot possibly be reached while adhering to the anisotropic downsampling scheme. [#934](https://github.com/scalableminds/webknossos-libs/pull/934) - +- Fixed an infinite loop in the mag calculation during anisotropic downsampling in situations where the target mag cannot possibly be reached while adhering to the anisotropic downsampling scheme. [#934](https://github.com/scalableminds/webknossos-libs/pull/934) ## [0.13.3](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.3) - 2023-08-08 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.2...v0.13.3) ### Added + - `View` has a `map_chunk` method now to run a function on each chunk and collect the results in a list. ### Changed + - As WEBKNOSSOS does not require the largest segment id. It is also not mandatory in the WEBKNOSSOS libs anymore. [#917](https://github.com/scalableminds/webknossos-libs/issues/917) The method `SegmentationLayer.refresh_largest_segment_id` was added to lookup the highest value in segmentation data and set `largest_segment_id` accordingly. - The `convert` command of the cli now has a `--category` flag, to select the LayerCategoryType. - ## [0.13.2](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.2) - 2023-07-26 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.1...v0.13.2) ### Changed -- The `convert` command of the cli now has a `--category` flag, to select the LayerCategoryType. +- The `convert` command of the cli now has a `--category` flag, to select the LayerCategoryType. ## [0.13.1](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.1) - 2023-07-17 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.13.0...v0.13.1) ### Changed + - The conversion folder structures to layer names does not allow slashes in the layer name anymore. [#918](https://github.com/scalableminds/webknossos-libs/issues/918) ### Fixed -- Fixed a bug where compression in add_layer_from_images uses too much memory [#900](https://github.com/scalableminds/webknossos-libs/issues/900) - +- Fixed a bug where compression in add_layer_from_images uses too much memory [#900](https://github.com/scalableminds/webknossos-libs/issues/900) ## [0.13.0](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.13.0) - 2023-06-21 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.6...v0.13.0) ### Added + - Added `duration_in_seconds` and `modified` to `AnnotationInfo`. [#914](https://github.com/scalableminds/webknossos-libs/pull/914) - Added [`zarrita`](https://github.com/scalableminds/zarrita) storage backend for arrays. zarrita supports Zarr v2 and v3 including sharding. To activate zarrita, the environment variable `WK_USE_ZARRITA` must be set. [#912](https://github.com/scalableminds/webknossos-libs/pull/912) - Added a `Zarr3` data format which supports sharding. [#912](https://github.com/scalableminds/webknossos-libs/pull/912) ### Changed + - Integrated the `wkcuber` CLI tool into `webknossos` package. [#903](https://github.com/scalableminds/webknossos-libs/pull/903) - To get an overview of all webknossos subcommands type `webknossos --help`. If the usage of a single subcommand is of interest type `webknossos --help` - These commands were changed: @@ -123,55 +139,61 @@ For upgrade instructions, please check the respective _Breaking Changes_ section - `python -m wkcuber.recubing` ### Fixed -- Fixed a bug where upsampling of a layer would fail, if the layer had a bounding box that doesn't align with the from_mag mag. [#915](https://github.com/scalableminds/webknossos-libs/pull/915) - +- Fixed a bug where upsampling of a layer would fail, if the layer had a bounding box that doesn't align with the from_mag mag. [#915](https://github.com/scalableminds/webknossos-libs/pull/915) ## [0.12.6](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.6) - 2023-06-09 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.5...v0.12.6) ### Changed -- Upgrades `wkw`. [#911](https://github.com/scalableminds/webknossos-libs/pull/911) +- Upgrades `wkw`. [#911](https://github.com/scalableminds/webknossos-libs/pull/911) ## [0.12.5](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.5) - 2023-06-01 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.4...v0.12.5) ### Added -- Added support for Python 3.11. [#843](https://github.com/scalableminds/webknossos-libs/pull/843) +- Added support for Python 3.11. [#843](https://github.com/scalableminds/webknossos-libs/pull/843) ## [0.12.4](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.4) - 2023-05-25 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.3...v0.12.4) ### Added + - `Group.add_tree` now also accepts a tree object as a first parameter (instead of only a string). [#891](https://github.com/scalableminds/webknossos-libs/pull/891) - `Group.remove_tree_by_id` was added. [#891](https://github.com/scalableminds/webknossos-libs/pull/891) ### Changed + - Upgrades `black`, `mypy`, `pylint`, `pytest`. [#873](https://github.com/scalableminds/webknossos-libs/pull/873) ### Fixed + - Fixed poetry build backend for new versions of Poetry. [#899](https://github.com/scalableminds/webknossos-libs/pull/899) - Added axis_order fields for Zarr data format. [#902](https://github.com/scalableminds/webknossos-libs/issues/902) - - ## [0.12.3](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.3) - 2023-02-22 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.2...v0.12.3) ### Added + - Added support to import ImageJ Hyperstack tiff files via `Dataset.from_images` and `dataset.add_layer_from_images`. [#877](https://github.com/scalableminds/webknossos-libs/pull/877) ### Changed + - `Dataset.from_images` and `dataset.add_layer_from_images` now automatically convert big endian dtypes to their little endian counterparts by default. [#877](https://github.com/scalableminds/webknossos-libs/pull/877) ### Fixed -- Fixed reading czi files with non-zero axis offsets. [#876](https://github.com/scalableminds/webknossos-libs/pull/876) - +- Fixed reading czi files with non-zero axis offsets. [#876](https://github.com/scalableminds/webknossos-libs/pull/876) ## [0.12.2](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.2) - 2023-02-20 + [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.1...v0.12.2) [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.1...HEAD) @@ -179,7 +201,6 @@ For upgrade instructions, please check the respective _Breaking Changes_ section - Added `RemoteFolder` for assigning remote datasets to a WEBKNOSSOS folder. [#868](https://github.com/scalableminds/webknossos-libs/pull/868) - ## [0.12.1](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.12.1) - 2023-02-16 [Commits](https://github.com/scalableminds/webknossos-libs/compare/v0.12.0...v0.12.1) diff --git a/webknossos/poetry.lock b/webknossos/poetry.lock index c136e82be..b82c5a2fa 100644 --- a/webknossos/poetry.lock +++ b/webknossos/poetry.lock @@ -3657,7 +3657,7 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [extras] -all = ["JPype1", "imagecodecs", "pims", "pylibCZIrw", "tifffile", "zarrita"] +all = ["JPype1", "imagecodecs", "pims", "pylibCZIrw", "tifffile"] bioformats = ["JPype1", "pims"] czi = ["pims", "pylibCZIrw"] imagecodecs = ["imagecodecs", "pims"] @@ -3667,4 +3667,4 @@ tifffile = ["pims", "tifffile"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "a59ed18ce55c4fe7970fea028c54557540687cd1c6aee565d4fa2e779e43f9fc" +content-hash = "fe01697d45931df23b05355d7d9b06a73853cb2381160a5d414b170f1a1ab452" diff --git a/webknossos/pyproject.toml b/webknossos/pyproject.toml index 0bc64a6f6..b7886a410 100644 --- a/webknossos/pyproject.toml +++ b/webknossos/pyproject.toml @@ -54,6 +54,7 @@ typing-extensions = "^4.0" universal-pathlib = "0.1.3" wkw = "1.1.22" zarr = "^2.16.0" +zarrita = "0.2.1" zipp = "^3.5.0" # A list of all of the optional dependencies, some of which are included in the @@ -63,7 +64,6 @@ JPype1 = { version = "^1.3.0", optional = true } pims = { version = "^0.6.0", optional = true } tifffile = { version = ">=2021.11.2", optional = true } pylibCZIrw = { version = "3.5.1", source = "scm", optional = true } -zarrita = "0.2.1" [tool.poetry.extras] pims = ["pims"] @@ -71,7 +71,7 @@ tifffile = ["pims", "tifffile"] imagecodecs = ["pims", "imagecodecs"] bioformats = ["pims","JPype1"] czi = ["pims","pylibCZIrw"] -all = ["pims","tifffile","imagecodecs","JPype1","pylibCZIrw","zarrita"] +all = ["pims","tifffile","imagecodecs","JPype1","pylibCZIrw"] [tool.poetry.dev-dependencies] # autoflake diff --git a/webknossos/test.sh b/webknossos/test.sh index 35658eb6d..63e2eaddf 100755 --- a/webknossos/test.sh +++ b/webknossos/test.sh @@ -11,7 +11,6 @@ export_vars # (which is standard python behavior). This is necessary so that the imports # refer to the checked out (and potentially modified) code. PYTEST="poetry run python -m pytest --suppress-no-test-exit-code" -export WK_USE_ZARRITA=True if [ $# -gt 0 ] && [ "$1" = "--refresh-snapshots" ]; then diff --git a/webknossos/testdata/annotations/l4_sample__explorational__suser__94b271.zip b/webknossos/testdata/annotations/l4_sample__explorational__suser__94b271.zip new file mode 100644 index 000000000..2302210bb Binary files /dev/null and b/webknossos/testdata/annotations/l4_sample__explorational__suser__94b271.zip differ diff --git a/webknossos/tests/test_annotation.py b/webknossos/tests/test_annotation.py index cd37cb8f7..655e4e918 100644 --- a/webknossos/tests/test_annotation.py +++ b/webknossos/tests/test_annotation.py @@ -1,9 +1,11 @@ import tempfile from pathlib import Path +import numpy as np import pytest import webknossos as wk +from webknossos.dataset import DataFormat from webknossos.geometry import BoundingBox, Vec3Int from .constants import TESTDATA_DIR, TESTOUTPUT_DIR @@ -11,7 +13,7 @@ pytestmark = [pytest.mark.with_vcr] -def test_annotation_from_zip_file() -> None: +def test_annotation_from_wkw_zip_file() -> None: annotation = wk.Annotation.load( TESTDATA_DIR / "annotations" @@ -57,6 +59,24 @@ def test_annotation_from_zip_file() -> None: assert voxel_id == 2504698 +def test_annotation_from_zarr3_zip_file() -> None: + annotation = wk.Annotation.load( + TESTDATA_DIR / "annotations" / "l4_sample__explorational__suser__94b271.zip" + ) + + with annotation.temporary_volume_layer_copy() as volume_layer: + assert volume_layer.data_format == DataFormat.Zarr3 + assert volume_layer.bounding_box == BoundingBox( + (3072, 3072, 512), (1024, 1024, 1024) + ) + input_annotation_mag = volume_layer.get_mag("2-2-1") + voxel_id = input_annotation_mag.read( + absolute_offset=Vec3Int(3630, 3502, 1024), size=Vec3Int(2, 2, 1) + ) + + assert np.array_equiv(voxel_id, 1) + + def test_annotation_from_nml_file() -> None: snapshot_path = TESTDATA_DIR / "nmls" / "generated_annotation_snapshot.nml" diff --git a/webknossos/webknossos/__init__.py b/webknossos/webknossos/__init__.py index 91348ec57..86b76deaf 100644 --- a/webknossos/webknossos/__init__.py +++ b/webknossos/webknossos/__init__.py @@ -24,9 +24,9 @@ # The table above contains zero-width spaces in the code examples after each dot to enforce correct line-breaks. -from webknossos.administration import * -from webknossos.annotation import * -from webknossos.client import * -from webknossos.dataset import * -from webknossos.geometry import * -from webknossos.skeleton import * +from .administration import * +from .annotation import * +from .client import * +from .dataset import * +from .geometry import * +from .skeleton import * diff --git a/webknossos/webknossos/_nml/parameters.py b/webknossos/webknossos/_nml/parameters.py index dceef15b5..f7bd4745e 100644 --- a/webknossos/webknossos/_nml/parameters.py +++ b/webknossos/webknossos/_nml/parameters.py @@ -3,9 +3,8 @@ from loxun import XmlWriter -from webknossos.geometry import BoundingBox -from webknossos.geometry.bounding_box import _DEFAULT_BBOX_NAME - +from ..geometry import BoundingBox +from ..geometry.bounding_box import _DEFAULT_BBOX_NAME from .utils import Vector3, enforce_not_null, filter_none_values DEFAULT_BOUNDING_BOX_COLOR = [0.2, 0.5, 0.1, 1] diff --git a/webknossos/webknossos/_nml/segment.py b/webknossos/webknossos/_nml/segment.py index 949ec6eda..b2b2831b3 100644 --- a/webknossos/webknossos/_nml/segment.py +++ b/webknossos/webknossos/_nml/segment.py @@ -3,8 +3,7 @@ from loxun import XmlWriter -from webknossos.geometry import Vec3Int - +from ..geometry import Vec3Int from .utils import Vector4, enforce_not_null, filter_none_values diff --git a/webknossos/webknossos/_nml/volume.py b/webknossos/webknossos/_nml/volume.py index b91b0917e..29faadf11 100644 --- a/webknossos/webknossos/_nml/volume.py +++ b/webknossos/webknossos/_nml/volume.py @@ -17,6 +17,8 @@ class Volume(NamedTuple): # older wk versions did not serialize the name which is why the name is optional: name: Optional[str] segments: List[Segment] + format: Optional[str] = None + largest_segment_id: Optional[int] = None def _dump(self, xf: XmlWriter) -> None: xf.startTag( @@ -27,6 +29,10 @@ def _dump(self, xf: XmlWriter) -> None: "location": self.location, "fallbackLayer": self.fallback_layer, "name": self.name, + "format": self.format, + "largestSegmentId": str(self.largest_segment_id) + if self.largest_segment_id + else None, } ), ) @@ -39,10 +45,15 @@ def _dump(self, xf: XmlWriter) -> None: @classmethod def _parse(cls, nml_volume: Element) -> "Volume": + largest_segment_id_str = nml_volume.get("largestSegmentId", default=None) return cls( id=int(enforce_not_null(nml_volume.get("id"))), location=nml_volume.get("location"), fallback_layer=nml_volume.get("fallbackLayer", default=None), name=nml_volume.get("name", default=None), + format=nml_volume.get("format", default=None), segments=[], + largest_segment_id=None + if largest_segment_id_str is None + else int(largest_segment_id_str), ) diff --git a/webknossos/webknossos/administration/__init__.py b/webknossos/webknossos/administration/__init__.py index deca9b3bd..e0804a6a4 100644 --- a/webknossos/webknossos/administration/__init__.py +++ b/webknossos/webknossos/administration/__init__.py @@ -1,3 +1,3 @@ -from webknossos.administration.project import Project -from webknossos.administration.task import Task, TaskType -from webknossos.administration.user import Team, User +from .project import Project +from .task import Task, TaskType +from .user import Team, User diff --git a/webknossos/webknossos/administration/project.py b/webknossos/webknossos/administration/project.py index 9a7a40df9..d9860cbcc 100644 --- a/webknossos/webknossos/administration/project.py +++ b/webknossos/webknossos/administration/project.py @@ -3,23 +3,23 @@ import attr -from webknossos.administration.user import User -from webknossos.client._generated.api.default import ( +from ..client._generated.api.default import ( project_info_by_id, project_info_by_name, task_infos_by_project_id, ) -from webknossos.client._generated.types import Unset -from webknossos.client.context import _get_generated_client +from ..client._generated.types import Unset +from ..client.context import _get_generated_client +from .user import User if TYPE_CHECKING: - from webknossos.administration import Task - from webknossos.client._generated.models.project_info_by_id_response_200 import ( + from ..client._generated.models.project_info_by_id_response_200 import ( ProjectInfoByIdResponse200, ) - from webknossos.client._generated.models.project_info_by_name_response_200 import ( + from ..client._generated.models.project_info_by_name_response_200 import ( ProjectInfoByNameResponse200, ) + from .task import Task @attr.frozen @@ -59,7 +59,7 @@ def get_tasks(self, fetch_all: bool = False) -> List["Task"]: set parameter pass fetch_all=True to use pagination to fetch all tasks iteratively with pagination. """ - from webknossos.administration import Task + from .task import Task PAGINATION_LIMIT = 1000 pagination_page = 0 diff --git a/webknossos/webknossos/administration/task.py b/webknossos/webknossos/administration/task.py index 08fdc99b7..bf819658b 100644 --- a/webknossos/webknossos/administration/task.py +++ b/webknossos/webknossos/administration/task.py @@ -5,30 +5,25 @@ import attr import httpx -from webknossos.administration import Project -from webknossos.annotation import Annotation, AnnotationInfo -from webknossos.client._generated.api.default import ( - annotation_infos_by_task_id, - task_info, -) -from webknossos.client.context import _get_generated_client -from webknossos.dataset.dataset import RemoteDataset -from webknossos.geometry import BoundingBox, Vec3Int -from webknossos.utils import warn_deprecated +from ..annotation import Annotation, AnnotationInfo +from ..client._generated.api.default import annotation_infos_by_task_id, task_info +from ..client.context import _get_generated_client +from ..dataset.dataset import RemoteDataset +from ..geometry import BoundingBox, Vec3Int +from ..utils import warn_deprecated +from .project import Project logger = logging.getLogger(__name__) if TYPE_CHECKING: - from webknossos.client._generated.models.task_info_response_200 import ( - TaskInfoResponse200, - ) - from webknossos.client._generated.models.task_info_response_200_type import ( + from ..client._generated.models.task_info_response_200 import TaskInfoResponse200 + from ..client._generated.models.task_info_response_200_type import ( TaskInfoResponse200Type, ) - from webknossos.client._generated.models.task_infos_by_project_id_response_200_item import ( + from ..client._generated.models.task_infos_by_project_id_response_200_item import ( TaskInfosByProjectIdResponse200Item, ) - from webknossos.client._generated.models.task_infos_by_project_id_response_200_item_type import ( + from ..client._generated.models.task_infos_by_project_id_response_200_item_type import ( TaskInfosByProjectIdResponse200ItemType, ) @@ -194,7 +189,7 @@ def create( @classmethod def _from_dict(cls, response_dict: Dict) -> "Task": - from webknossos.client._generated.models.task_info_response_200 import ( + from ..client._generated.models.task_info_response_200 import ( TaskInfoResponse200, ) diff --git a/webknossos/webknossos/administration/user.py b/webknossos/webknossos/administration/user.py index 910df18c4..176725b43 100644 --- a/webknossos/webknossos/administration/user.py +++ b/webknossos/webknossos/administration/user.py @@ -2,27 +2,27 @@ import attr -from webknossos.client._generated.api.default import ( +from ..client._generated.api.default import ( current_user_info, team_list, user_info_by_id, user_list, user_logged_time, ) -from webknossos.client._generated.types import Unset +from ..client._generated.types import Unset if TYPE_CHECKING: - from webknossos.client._generated.models.current_user_info_response_200 import ( + from ..client._generated.models.current_user_info_response_200 import ( CurrentUserInfoResponse200, ) - from webknossos.client._generated.models.user_list_response_200_item import ( + from ..client._generated.models.user_list_response_200_item import ( UserListResponse200Item, ) - from webknossos.client._generated.models.user_info_by_id_response_200 import ( + from ..client._generated.models.user_info_by_id_response_200 import ( UserInfoByIdResponse200, ) -from webknossos.client.context import _get_generated_client +from ..client.context import _get_generated_client @attr.frozen diff --git a/webknossos/webknossos/annotation/__init__.py b/webknossos/webknossos/annotation/__init__.py index 411702cd3..6c2950332 100644 --- a/webknossos/webknossos/annotation/__init__.py +++ b/webknossos/webknossos/annotation/__init__.py @@ -1,8 +1,8 @@ -from webknossos.annotation.annotation import ( +from .annotation import ( Annotation, AnnotationState, AnnotationType, SegmentInformation, open_annotation, ) -from webknossos.annotation.annotation_info import AnnotationInfo +from .annotation_info import AnnotationInfo diff --git a/webknossos/webknossos/annotation/_nml_conversion.py b/webknossos/webknossos/annotation/_nml_conversion.py index ccd970bfa..0afdc4b1a 100644 --- a/webknossos/webknossos/annotation/_nml_conversion.py +++ b/webknossos/webknossos/annotation/_nml_conversion.py @@ -7,15 +7,16 @@ import numpy as np import webknossos._nml as wknml -from webknossos.geometry import Vec3Int + +from ..geometry import Vec3Int if TYPE_CHECKING: - from webknossos.annotation import Annotation - from webknossos.skeleton import Group, Skeleton, Tree + from ..annotation import Annotation + from ..skeleton import Group, Skeleton, Tree def nml_to_skeleton(nml: wknml.Nml) -> "Skeleton": - from webknossos.skeleton import Skeleton + from ..skeleton import Skeleton skeleton = Skeleton( dataset_name=nml.parameters.name, @@ -183,6 +184,8 @@ def annotation_to_nml( # pylint: disable=dangerous-default-value ) for segment_id, segment_info in volume.segments.items() ], + format=str(volume.data_format), + largest_segment_id=volume.largest_segment_id, ) ) diff --git a/webknossos/webknossos/annotation/annotation.py b/webknossos/webknossos/annotation/annotation.py index 1ac79991d..f1b5a478c 100644 --- a/webknossos/webknossos/annotation/annotation.py +++ b/webknossos/webknossos/annotation/annotation.py @@ -18,6 +18,7 @@ annotation data programmatically is discouraged therefore. """ +import json import re import warnings from contextlib import contextmanager, nullcontext @@ -25,6 +26,7 @@ from io import BytesIO from os import PathLike from pathlib import Path +from shutil import copyfileobj from tempfile import TemporaryDirectory from typing import ( BinaryIO, @@ -48,13 +50,22 @@ from zipp import Path as ZipPath import webknossos._nml as wknml -from webknossos.annotation._nml_conversion import annotation_to_nml, nml_to_skeleton -from webknossos.client._generated.api.default import dataset_info -from webknossos.dataset import SEGMENTATION_CATEGORY, Dataset, Layer, SegmentationLayer -from webknossos.dataset.dataset import RemoteDataset -from webknossos.geometry import BoundingBox, Vec3Int -from webknossos.skeleton import Skeleton -from webknossos.utils import time_since_epoch_in_ms, warn_deprecated + +from ..client._generated.api.default import dataset_info +from ..dataset import ( + SEGMENTATION_CATEGORY, + DataFormat, + Dataset, + Layer, + RemoteDataset, + SegmentationLayer, +) +from ..dataset.defaults import PROPERTIES_FILE_NAME +from ..dataset.properties import DatasetProperties, dataset_converter +from ..geometry import BoundingBox, Vec3Int +from ..skeleton import Skeleton +from ..utils import time_since_epoch_in_ms, warn_deprecated +from ._nml_conversion import annotation_to_nml, nml_to_skeleton Vector3 = Tuple[float, float, float] Vector4 = Tuple[float, float, float, float] @@ -78,13 +89,26 @@ class _VolumeLayer: id: int name: str fallback_layer_name: Optional[str] + data_format: DataFormat zip: Optional[ZipPath] segments: Dict[int, SegmentInformation] + largest_segment_id: Optional[int] def _default_zip_name(self) -> str: return f"data_{self.id}_{self.name}.zip" +def _extract_zip_folder(zip_file: ZipFile, out_path: Path, prefix: str) -> None: + for zip_entry in zip_file.filelist: + if zip_entry.filename.startswith(prefix) and not zip_entry.is_dir(): + out_file_path = out_path / (zip_entry.filename[len(prefix) :]) + out_file_path.parent.mkdir(parents=True, exist_ok=True) + with zip_file.open(zip_entry, "r") as zip_f, out_file_path.open( + "wb" + ) as out_f: + copyfileobj(zip_f, out_f) + + @attr.define class Annotation: name: str @@ -270,9 +294,9 @@ def download( They can still be streamed from WEBKNOSSOS using `annotation.get_remote_annotation_dataset()`. * `_return_context` should not be set. """ - from webknossos.client._generated.api.default import annotation_download - from webknossos.client._resolve_short_link import resolve_short_link - from webknossos.client.context import ( + from ..client._generated.api.default import annotation_download + from ..client._resolve_short_link import resolve_short_link + from ..client.context import ( _get_context, _get_generated_client, webknossos_context, @@ -431,8 +455,12 @@ def _parse_volumes( id=volume.id, name="Volume" if volume.name is None else volume.name, fallback_layer_name=volume.fallback_layer, + data_format=DataFormat(volume.format) + if volume.format is not None + else DataFormat.WKW, zip=volume_path, segments=segments, + largest_segment_id=volume.largest_segment_id, ) ) assert len(set(i.id for i in volume_layers)) == len( @@ -491,7 +519,7 @@ def save(self, path: Union[str, PathLike]) -> None: def upload(self) -> str: """Uploads the annotation to your current `webknossos_context`.""" - from webknossos.client.context import _get_generated_client + from ..client.context import _get_generated_client client = _get_generated_client(enforce_auth=True) url = f"{client.base_url}/api/annotations/upload" @@ -549,7 +577,7 @@ def get_remote_annotation_dataset(self) -> Dataset: as the first volume editing action is done. Note that this behavior might change in the future. """ - from webknossos.client.context import _get_context + from ..client.context import _get_context if self.annotation_id is None: raise ValueError( @@ -641,8 +669,10 @@ def add_volume_layer( id=volume_layer_id, name=name, fallback_layer_name=fallback_layer_name, + data_format=DataFormat.Zarr3, zip=None, segments={}, + largest_segment_id=None, ) ) @@ -721,7 +751,6 @@ def export_volume_layer_to_dataset( self, dataset: Dataset, layer_name: str = "volume_layer", - largest_segment_id: Optional[int] = None, volume_layer_name: Optional[str] = None, volume_layer_id: Optional[int] = None, ) -> SegmentationLayer: @@ -736,10 +765,13 @@ def export_volume_layer_to_dataset( if the annotation contains multiple volume layers. Use `get_volume_layer_names()` to look up available layers. """ - volume_zip_path = self._get_volume_layer( + volume_layer = self._get_volume_layer( volume_layer_name=volume_layer_name, volume_layer_id=volume_layer_id, - ).zip + ) + volume_zip_path = volume_layer.zip + + largest_segment_id = volume_layer.largest_segment_id assert ( volume_zip_path is not None @@ -747,21 +779,44 @@ def export_volume_layer_to_dataset( with volume_zip_path.open(mode="rb") as f: data_zip = ZipFile(f) - wrong_files = [ - i.filename - for i in data_zip.filelist - if ANNOTATION_WKW_PATH_RE.search(i.filename) is None - ] - assert ( - len(wrong_files) == 0 - ), f"The annotation contains unexpected files: {wrong_files}" - data_zip.extractall(dataset.path / layer_name) - layer = cast( - SegmentationLayer, - dataset.add_layer_for_existing_files( - layer_name, category="segmentation", largest_segment_id=0 - ), - ) + if volume_layer.data_format == DataFormat.WKW: + wrong_files = [ + i.filename + for i in data_zip.filelist + if ANNOTATION_WKW_PATH_RE.search(i.filename) is None + ] + assert ( + len(wrong_files) == 0 + ), f"The annotation contains unexpected files: {wrong_files}" + data_zip.extractall(dataset.path / layer_name) + layer = cast( + SegmentationLayer, + dataset.add_layer_for_existing_files( + layer_name, + category=SEGMENTATION_CATEGORY, + largest_segment_id=largest_segment_id, + ), + ) + elif volume_layer.data_format == DataFormat.Zarr3: + datasource_properties = dataset_converter.structure( + json.loads(data_zip.read(PROPERTIES_FILE_NAME)), DatasetProperties + ) + assert ( + len(datasource_properties.data_layers) == 1 + ), f"Volume data zip must contain exactly one layer, got {len(datasource_properties.data_layers)}" + layer_properties = datasource_properties.data_layers[0] + internal_layer_name = layer_properties.name + layer_properties.name = layer_name + + _extract_zip_folder( + data_zip, dataset.path / layer_name, f"{internal_layer_name}/" + ) + + layer = cast( + SegmentationLayer, + dataset._add_existing_layer(layer_properties), + ) + best_mag_view = layer.get_finest_mag() if largest_segment_id is None: diff --git a/webknossos/webknossos/annotation/annotation_info.py b/webknossos/webknossos/annotation/annotation_info.py index 07bd11aa3..080c8bc93 100644 --- a/webknossos/webknossos/annotation/annotation_info.py +++ b/webknossos/webknossos/annotation/annotation_info.py @@ -2,15 +2,15 @@ import attr -from webknossos.annotation.annotation import Annotation, AnnotationState, AnnotationType -from webknossos.client._generated.types import Unset -from webknossos.utils import warn_deprecated +from ..client._generated.types import Unset +from ..utils import warn_deprecated +from .annotation import Annotation, AnnotationState, AnnotationType if TYPE_CHECKING: - from webknossos.client._generated.models.annotation_info_response_200 import ( + from ..client._generated.models.annotation_info_response_200 import ( AnnotationInfoResponse200, ) - from webknossos.client._generated.models.annotation_infos_by_task_id_response_200_item import ( + from ..client._generated.models.annotation_infos_by_task_id_response_200_item import ( AnnotationInfosByTaskIdResponse200Item, ) diff --git a/webknossos/webknossos/cli/_utils.py b/webknossos/webknossos/cli/_utils.py index 8db31b4c6..bd69eb535 100644 --- a/webknossos/webknossos/cli/_utils.py +++ b/webknossos/webknossos/cli/_utils.py @@ -8,8 +8,7 @@ import numpy as np from upath import UPath -from webknossos import BoundingBox, Mag -from webknossos.geometry.vec3_int import Vec3Int +from ..geometry import BoundingBox, Mag, Vec3Int VoxelSize = namedtuple("VoxelSize", ("x", "y", "z")) Vec2Int = namedtuple("Vec2Int", ("x", "y")) diff --git a/webknossos/webknossos/cli/check_equality.py b/webknossos/webknossos/cli/check_equality.py index e95654d09..3b10fd50f 100644 --- a/webknossos/webknossos/cli/check_equality.py +++ b/webknossos/webknossos/cli/check_equality.py @@ -8,10 +8,9 @@ import typer from typing_extensions import Annotated -from webknossos import Dataset -from webknossos.cli._utils import DistributionStrategy, parse_path -from webknossos.dataset.layer import Layer -from webknossos.utils import get_executor_for_args +from ..dataset import Dataset, Layer +from ..utils import get_executor_for_args +from ._utils import DistributionStrategy, parse_path logger = logging.getLogger(__name__) diff --git a/webknossos/webknossos/cli/compress.py b/webknossos/webknossos/cli/compress.py index d0a3d96b8..b4c45593d 100644 --- a/webknossos/webknossos/cli/compress.py +++ b/webknossos/webknossos/cli/compress.py @@ -7,9 +7,9 @@ import typer from typing_extensions import Annotated -from webknossos import Dataset -from webknossos.cli._utils import DistributionStrategy, parse_path -from webknossos.utils import get_executor_for_args +from ..dataset import Dataset +from ..utils import get_executor_for_args +from ._utils import DistributionStrategy, parse_path def main( diff --git a/webknossos/webknossos/cli/convert.py b/webknossos/webknossos/cli/convert.py index 33681cb3c..e61a18b30 100644 --- a/webknossos/webknossos/cli/convert.py +++ b/webknossos/webknossos/cli/convert.py @@ -7,15 +7,15 @@ import typer from typing_extensions import Annotated -from webknossos import DataFormat, Dataset -from webknossos.cli._utils import ( +from ..dataset import DataFormat, Dataset +from ..utils import get_executor_for_args +from ._utils import ( DistributionStrategy, LayerCategory, VoxelSize, parse_path, parse_voxel_size, ) -from webknossos.utils import get_executor_for_args def main( diff --git a/webknossos/webknossos/cli/convert_knossos.py b/webknossos/webknossos/cli/convert_knossos.py index 44b5b7bcb..51184c342 100644 --- a/webknossos/webknossos/cli/convert_knossos.py +++ b/webknossos/webknossos/cli/convert_knossos.py @@ -15,16 +15,11 @@ import typer from typing_extensions import Annotated -from webknossos import ( - COLOR_CATEGORY, - BoundingBox, - DataFormat, - Dataset, - Mag, - Vec3Int, - View, -) -from webknossos.cli._utils import ( +from ..dataset import COLOR_CATEGORY, DataFormat, Dataset, View +from ..dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD +from ..geometry import BoundingBox, Mag, Vec3Int +from ..utils import get_executor_for_args, time_start, time_stop +from ._utils import ( DistributionStrategy, VoxelSize, parse_mag, @@ -32,8 +27,6 @@ parse_vec3int, parse_voxel_size, ) -from webknossos.dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD -from webknossos.utils import get_executor_for_args, time_start, time_stop KNOSSOS_CUBE_EDGE_LEN = 128 KNOSSOS_CUBE_SIZE = KNOSSOS_CUBE_EDGE_LEN**3 diff --git a/webknossos/webknossos/cli/convert_nifti.py b/webknossos/webknossos/cli/convert_nifti.py index 3e104f3b2..823537da2 100644 --- a/webknossos/webknossos/cli/convert_nifti.py +++ b/webknossos/webknossos/cli/convert_nifti.py @@ -10,8 +10,11 @@ from sklearn.preprocessing import LabelEncoder from typing_extensions import Annotated -from webknossos import BoundingBox, DataFormat, Dataset, LayerCategoryType, Vec3Int -from webknossos.cli._utils import ( +from ..dataset import DataFormat, Dataset, LayerCategoryType +from ..dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD +from ..geometry import BoundingBox, Vec3Int +from ..utils import time_start, time_stop +from ._utils import ( Vec2Int, VoxelSize, pad_or_crop_to_size_and_topleft, @@ -20,8 +23,6 @@ parse_vec3int, parse_voxel_size, ) -from webknossos.dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD -from webknossos.utils import time_start, time_stop def to_target_datatype( diff --git a/webknossos/webknossos/cli/convert_raw.py b/webknossos/webknossos/cli/convert_raw.py index a75b02a53..36d5f398a 100644 --- a/webknossos/webknossos/cli/convert_raw.py +++ b/webknossos/webknossos/cli/convert_raw.py @@ -11,8 +11,16 @@ import typer from typing_extensions import Annotated -from webknossos import BoundingBox, DataFormat, Dataset, Mag, MagView, Vec3Int -from webknossos.cli._utils import ( +from ..dataset import DataFormat, Dataset, MagView, SamplingModes +from ..dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD +from ..geometry import BoundingBox, Mag, Vec3Int +from ..utils import ( + get_executor_for_args, + time_start, + time_stop, + wait_and_ensure_success, +) +from ._utils import ( DistributionStrategy, Order, SamplingMode, @@ -22,14 +30,6 @@ parse_vec3int, parse_voxel_size, ) -from webknossos.dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD -from webknossos.dataset.sampling_modes import SamplingModes -from webknossos.utils import ( - get_executor_for_args, - time_start, - time_stop, - wait_and_ensure_success, -) logger = logging.getLogger(__name__) diff --git a/webknossos/webknossos/cli/convert_zarr.py b/webknossos/webknossos/cli/convert_zarr.py index bdabbe36c..cd3140767 100644 --- a/webknossos/webknossos/cli/convert_zarr.py +++ b/webknossos/webknossos/cli/convert_zarr.py @@ -13,16 +13,12 @@ import zarr from typing_extensions import Annotated -from webknossos import ( - BoundingBox, - DataFormat, - Dataset, - Mag, - MagView, - SegmentationLayer, - Vec3Int, -) -from webknossos.cli._utils import ( +from ..dataset import DataFormat, Dataset, MagView, SegmentationLayer +from ..dataset._array import _fsstore_from_path +from ..dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD +from ..geometry import BoundingBox, Mag, Vec3Int +from ..utils import get_executor_for_args, wait_and_ensure_success +from ._utils import ( DistributionStrategy, SamplingMode, VoxelSize, @@ -31,9 +27,6 @@ parse_vec3int, parse_voxel_size, ) -from webknossos.dataset._array import _fsstore_from_path -from webknossos.dataset.defaults import DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD -from webknossos.utils import get_executor_for_args, wait_and_ensure_success logger = logging.getLogger(__name__) diff --git a/webknossos/webknossos/cli/download.py b/webknossos/webknossos/cli/download.py index 4e9970f6f..15fd7fa61 100644 --- a/webknossos/webknossos/cli/download.py +++ b/webknossos/webknossos/cli/download.py @@ -5,8 +5,11 @@ import typer from typing_extensions import Annotated -from webknossos import Annotation, BoundingBox, Dataset, Mag, webknossos_context -from webknossos.cli._utils import parse_bbox, parse_mag, parse_path +from ..annotation import Annotation +from ..client import webknossos_context +from ..dataset import Dataset +from ..geometry import BoundingBox, Mag +from ._utils import parse_bbox, parse_mag, parse_path def main( diff --git a/webknossos/webknossos/cli/downsample.py b/webknossos/webknossos/cli/downsample.py index 260c1cef8..776f2c308 100644 --- a/webknossos/webknossos/cli/downsample.py +++ b/webknossos/webknossos/cli/downsample.py @@ -7,9 +7,9 @@ import typer from typing_extensions import Annotated -from webknossos import Dataset, SamplingModes -from webknossos.cli._utils import DistributionStrategy, SamplingMode, parse_path -from webknossos.utils import get_executor_for_args +from ..dataset import Dataset, SamplingModes +from ..utils import get_executor_for_args +from ._utils import DistributionStrategy, SamplingMode, parse_path def main( diff --git a/webknossos/webknossos/cli/export_wkw_as_tiff.py b/webknossos/webknossos/cli/export_wkw_as_tiff.py index 4e907aec6..f52ace872 100644 --- a/webknossos/webknossos/cli/export_wkw_as_tiff.py +++ b/webknossos/webknossos/cli/export_wkw_as_tiff.py @@ -14,8 +14,11 @@ from scipy.ndimage.interpolation import zoom from typing_extensions import Annotated -from webknossos import BoundingBox, Dataset, Mag, MagView -from webknossos.cli._utils import ( +from ..dataset import Dataset, MagView, View +from ..dataset.defaults import DEFAULT_CHUNK_SHAPE +from ..geometry import BoundingBox, Mag, Vec3Int +from ..utils import get_executor_for_args, wait_and_ensure_success +from ._utils import ( DistributionStrategy, Vec2Int, parse_bbox, @@ -23,10 +26,6 @@ parse_path, parse_vec2int, ) -from webknossos.dataset.defaults import DEFAULT_CHUNK_SHAPE -from webknossos.dataset.view import View -from webknossos.geometry.vec3_int import Vec3Int -from webknossos.utils import get_executor_for_args, wait_and_ensure_success def _make_tiff_name(name: str, slice_index: int) -> str: diff --git a/webknossos/webknossos/cli/main.py b/webknossos/webknossos/cli/main.py index 567116002..973cbf3cc 100644 --- a/webknossos/webknossos/cli/main.py +++ b/webknossos/webknossos/cli/main.py @@ -2,7 +2,7 @@ import typer -from webknossos.cli import ( +from . import ( check_equality, compress, convert, diff --git a/webknossos/webknossos/cli/upload.py b/webknossos/webknossos/cli/upload.py index 8d820f73e..d324612c0 100644 --- a/webknossos/webknossos/cli/upload.py +++ b/webknossos/webknossos/cli/upload.py @@ -5,10 +5,11 @@ import typer from typing_extensions import Annotated -from webknossos import Dataset, webknossos_context -from webknossos.cli._utils import parse_path -from webknossos.client._defaults import DEFAULT_WEBKNOSSOS_URL -from webknossos.client._upload_dataset import DEFAULT_SIMULTANEOUS_UPLOADS +from ..client import webknossos_context +from ..client._defaults import DEFAULT_WEBKNOSSOS_URL +from ..client._upload_dataset import DEFAULT_SIMULTANEOUS_UPLOADS +from ..dataset import Dataset +from ._utils import parse_path def main( diff --git a/webknossos/webknossos/cli/upsample.py b/webknossos/webknossos/cli/upsample.py index c0a59afa7..fa84add00 100644 --- a/webknossos/webknossos/cli/upsample.py +++ b/webknossos/webknossos/cli/upsample.py @@ -7,15 +7,10 @@ import typer from typing_extensions import Annotated -from webknossos import Dataset, Mag -from webknossos.cli._utils import ( - DistributionStrategy, - SamplingMode, - parse_mag, - parse_path, -) -from webknossos.dataset.sampling_modes import SamplingModes -from webknossos.utils import get_executor_for_args +from ..dataset import Dataset, SamplingModes +from ..geometry import Mag +from ..utils import get_executor_for_args +from ._utils import DistributionStrategy, SamplingMode, parse_mag, parse_path def main( diff --git a/webknossos/webknossos/client/_download_dataset.py b/webknossos/webknossos/client/_download_dataset.py index a8142861c..10fcfb243 100644 --- a/webknossos/webknossos/client/_download_dataset.py +++ b/webknossos/webknossos/client/_download_dataset.py @@ -6,13 +6,13 @@ import numpy as np from rich.progress import track -from webknossos.client._generated.api.datastore import dataset_download -from webknossos.client._generated.api.default import dataset_info -from webknossos.client._generated.types import Unset -from webknossos.client.context import _get_context -from webknossos.dataset import Dataset, LayerCategoryType -from webknossos.dataset.properties import LayerViewConfiguration, dataset_converter -from webknossos.geometry import BoundingBox, Mag, Vec3Int +from ..dataset import Dataset, LayerCategoryType +from ..dataset.properties import LayerViewConfiguration, dataset_converter +from ..geometry import BoundingBox, Mag, Vec3Int +from ._generated.api.datastore import dataset_download +from ._generated.api.default import dataset_info +from ._generated.types import Unset +from .context import _get_context logger = logging.getLogger(__name__) diff --git a/webknossos/webknossos/client/_resolve_short_link.py b/webknossos/webknossos/client/_resolve_short_link.py index 350deb8a8..47ee1e7e1 100644 --- a/webknossos/webknossos/client/_resolve_short_link.py +++ b/webknossos/webknossos/client/_resolve_short_link.py @@ -2,8 +2,8 @@ import re from urllib.parse import urlparse -from webknossos.client._generated.api.default import short_link_by_key -from webknossos.client.context import _get_generated_client, webknossos_context +from ._generated.api.default import short_link_by_key +from .context import _get_generated_client, webknossos_context logger = logging.getLogger(__name__) diff --git a/webknossos/webknossos/client/_upload_dataset.py b/webknossos/webknossos/client/_upload_dataset.py index 4ca8ccc06..56196925d 100644 --- a/webknossos/webknossos/client/_upload_dataset.py +++ b/webknossos/webknossos/client/_upload_dataset.py @@ -9,19 +9,12 @@ import httpx -from webknossos.client._generated.api.datastore import ( - dataset_finish_upload, - dataset_reserve_upload, -) -from webknossos.client._generated.api.default import ( - datastore_list, - new_dataset_name_is_valid, -) -from webknossos.client._resumable import Resumable -from webknossos.client.context import _get_context, _WebknossosContext -from webknossos.dataset import Dataset, Layer -from webknossos.dataset.dataset import RemoteDataset -from webknossos.utils import get_rich_progress +from ..dataset import Dataset, Layer, RemoteDataset +from ..utils import get_rich_progress +from ._generated.api.datastore import dataset_finish_upload, dataset_reserve_upload +from ._generated.api.default import datastore_list, new_dataset_name_is_valid +from ._resumable import Resumable +from .context import _get_context, _WebknossosContext DEFAULT_SIMULTANEOUS_UPLOADS = 5 MAXIMUM_RETRY_COUNT = 5 @@ -88,7 +81,7 @@ def upload_dataset( layers_to_link: Optional[List[LayerToLink]] = None, jobs: Optional[int] = None, ) -> str: - from webknossos.client._generated.models import ( + from ._generated.models import ( DatasetFinishUploadJsonBody, DatasetReserveUploadJsonBody, ) diff --git a/webknossos/webknossos/client/context.py b/webknossos/webknossos/client/context.py index f635881e6..c17f472c1 100644 --- a/webknossos/webknossos/client/context.py +++ b/webknossos/webknossos/client/context.py @@ -56,8 +56,8 @@ from dotenv import load_dotenv from rich.prompt import Prompt -from webknossos.client._defaults import DEFAULT_HTTP_TIMEOUT, DEFAULT_WEBKNOSSOS_URL -from webknossos.client._generated import Client as GeneratedClient +from ._defaults import DEFAULT_HTTP_TIMEOUT, DEFAULT_WEBKNOSSOS_URL +from ._generated import Client as GeneratedClient load_dotenv() @@ -78,7 +78,7 @@ def _cached_ask_for_token(webknossos_url: str) -> str: @lru_cache(maxsize=None) def _cached_get_org(context: "_WebknossosContext") -> str: - from webknossos.client._generated.api.default import current_user_info + from ._generated.api.default import current_user_info current_user_info_response = current_user_info.sync( client=context.generated_auth_client @@ -90,7 +90,7 @@ def _cached_get_org(context: "_WebknossosContext") -> str: # TODO reset invalid tokens e.g. using cachetools pylint: disable=fixme @lru_cache(maxsize=None) def _cached_get_datastore_token(context: "_WebknossosContext") -> str: - from webknossos.client._generated.api.default import generate_token_for_data_store + from ._generated.api.default import generate_token_for_data_store generate_token_for_data_store_response = generate_token_for_data_store.sync( client=context.generated_auth_client diff --git a/webknossos/webknossos/client/download_dataset.py b/webknossos/webknossos/client/download_dataset.py index de57642bb..97f9fe862 100644 --- a/webknossos/webknossos/client/download_dataset.py +++ b/webknossos/webknossos/client/download_dataset.py @@ -1,10 +1,9 @@ from os import PathLike from typing import List, Optional, Union -from webknossos.dataset import Dataset -from webknossos.geometry import BoundingBox, Mag -from webknossos.utils import warn_deprecated - +from ..dataset import Dataset +from ..geometry import BoundingBox, Mag +from ..utils import warn_deprecated from ._download_dataset import download_dataset as actual_download_dataset diff --git a/webknossos/webknossos/dataset/__init__.py b/webknossos/webknossos/dataset/__init__.py index b405e93fc..64cacf6cf 100644 --- a/webknossos/webknossos/dataset/__init__.py +++ b/webknossos/webknossos/dataset/__init__.py @@ -9,7 +9,7 @@ Each dataset consists of one or more layers (webknossos.dataset.layer.Layer), which themselves can comprise multiple magnifications (webknossos.dataset.mag_view.MagView). """ -from ._array import DataFormat +from .data_format import DataFormat from .dataset import Dataset, RemoteDataset from .layer import Layer, SegmentationLayer from .layer_categories import COLOR_CATEGORY, SEGMENTATION_CATEGORY, LayerCategoryType diff --git a/webknossos/webknossos/dataset/_array.py b/webknossos/webknossos/dataset/_array.py index c3e8f2d99..51cbf56c2 100644 --- a/webknossos/webknossos/dataset/_array.py +++ b/webknossos/webknossos/dataset/_array.py @@ -3,7 +3,6 @@ from abc import ABC, abstractmethod from contextlib import contextmanager from dataclasses import dataclass -from enum import Enum from os.path import relpath from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Type, Union @@ -15,10 +14,9 @@ from upath import UPath from zarr.storage import FSStore -from webknossos.dataset.defaults import WK_USE_ZARRITA - from ..geometry import BoundingBox, Vec3Int, Vec3IntLike from ..utils import warn_deprecated +from .data_format import DataFormat if TYPE_CHECKING: import zarrita @@ -55,15 +53,6 @@ class ArrayException(Exception): pass -class DataFormat(Enum): - WKW = "wkw" - Zarr = "zarr" - Zarr3 = "zarr3" - - def __str__(self) -> str: - return self.value - - @dataclass class ArrayInfo: data_format: DataFormat @@ -99,11 +88,7 @@ def info(self) -> ArrayInfo: @classmethod @abstractmethod def open(_cls, path: Path) -> "BaseArray": - classes = ( - (WKWArray, ZarritaArray, ZarrArray) - if WK_USE_ZARRITA - else (WKWArray, ZarrArray) - ) + classes = (WKWArray, ZarritaArray, ZarrArray) for cls in classes: try: array = cls.open(path) @@ -143,10 +128,10 @@ def close(self) -> None: def get_class(data_format: DataFormat) -> Type["BaseArray"]: if data_format == DataFormat.WKW: return WKWArray - if WK_USE_ZARRITA and data_format in (DataFormat.Zarr, DataFormat.Zarr3): + if data_format == DataFormat.Zarr3: return ZarritaArray if data_format == DataFormat.Zarr: - return ZarrArray + return ZarritaArray raise ValueError(f"Array format `{data_format}` is invalid.") @@ -559,12 +544,13 @@ def create(cls, path: Path, array_info: ArrayInfo) -> "ZarritaArray": + array_info.shard_shape.to_tuple(), chunk_key_encoding=("default", "/"), dtype=array_info.voxel_type, + dimension_names=["c", "x", "y", "z"], codecs=[ zarrita.codecs.sharding_codec( chunk_shape=(array_info.num_channels,) + array_info.chunk_shape.to_tuple(), codecs=[ - zarrita.codecs.transpose_codec("F"), + zarrita.codecs.transpose_codec([3, 2, 1, 0]), zarrita.codecs.bytes_codec(), zarrita.codecs.blosc_codec( typesize=array_info.voxel_type.itemsize @@ -572,7 +558,7 @@ def create(cls, path: Path, array_info: ArrayInfo) -> "ZarritaArray": ] if array_info.compression_mode else [ - zarrita.codecs.transpose_codec("F"), + zarrita.codecs.transpose_codec([3, 2, 1, 0]), zarrita.codecs.bytes_codec(), ], ) diff --git a/webknossos/webknossos/dataset/_downsampling_utils.py b/webknossos/webknossos/dataset/_downsampling_utils.py index 268cf61dd..ba88c413d 100644 --- a/webknossos/webknossos/dataset/_downsampling_utils.py +++ b/webknossos/webknossos/dataset/_downsampling_utils.py @@ -11,9 +11,7 @@ if TYPE_CHECKING: from .dataset import Dataset, Layer -from webknossos.geometry import Mag, Vec3Int, Vec3IntLike -from webknossos.geometry.bounding_box import BoundingBox - +from ..geometry import BoundingBox, Mag, Vec3Int, Vec3IntLike from ._array import ArrayInfo from .layer_categories import LayerCategoryType from .view import View diff --git a/webknossos/webknossos/dataset/_upsampling_utils.py b/webknossos/webknossos/dataset/_upsampling_utils.py index 8fbe3a625..cd6ea56b1 100644 --- a/webknossos/webknossos/dataset/_upsampling_utils.py +++ b/webknossos/webknossos/dataset/_upsampling_utils.py @@ -5,8 +5,7 @@ import numpy as np -from webknossos.geometry import BoundingBox, Vec3Int - +from ..geometry import BoundingBox, Vec3Int from .view import View diff --git a/webknossos/webknossos/dataset/_utils/__init__.py b/webknossos/webknossos/dataset/_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/webknossos/webknossos/dataset/_utils/buffered_slice_reader.py b/webknossos/webknossos/dataset/_utils/buffered_slice_reader.py index 42b1fb6ec..d1329e95f 100644 --- a/webknossos/webknossos/dataset/_utils/buffered_slice_reader.py +++ b/webknossos/webknossos/dataset/_utils/buffered_slice_reader.py @@ -7,10 +7,10 @@ import numpy as np if TYPE_CHECKING: - from webknossos.dataset import View + from ..view import View -from webknossos.geometry import BoundingBox, Vec3IntLike -from webknossos.utils import get_chunks +from ...geometry import BoundingBox, Vec3IntLike +from ...utils import get_chunks class BufferedSliceReader: diff --git a/webknossos/webknossos/dataset/_utils/buffered_slice_writer.py b/webknossos/webknossos/dataset/_utils/buffered_slice_writer.py index 14095ca43..a3c179d80 100644 --- a/webknossos/webknossos/dataset/_utils/buffered_slice_writer.py +++ b/webknossos/webknossos/dataset/_utils/buffered_slice_writer.py @@ -9,10 +9,10 @@ import numpy as np import psutil -from webknossos.geometry import BoundingBox, Vec3Int, Vec3IntLike +from ...geometry import BoundingBox, Vec3Int, Vec3IntLike if TYPE_CHECKING: - from webknossos.dataset import View + from ..view import View def log_memory_consumption(additional_output: str = "") -> None: diff --git a/webknossos/webknossos/dataset/_utils/infer_bounding_box_existing_files.py b/webknossos/webknossos/dataset/_utils/infer_bounding_box_existing_files.py index 291ae9bfa..177042e01 100644 --- a/webknossos/webknossos/dataset/_utils/infer_bounding_box_existing_files.py +++ b/webknossos/webknossos/dataset/_utils/infer_bounding_box_existing_files.py @@ -1,7 +1,7 @@ from functools import reduce -from webknossos.dataset.mag_view import MagView -from webknossos.geometry import BoundingBox +from ...geometry import BoundingBox +from ..mag_view import MagView def infer_bounding_box_existing_files(mag_view: MagView) -> BoundingBox: diff --git a/webknossos/webknossos/dataset/_utils/pims_images.py b/webknossos/webknossos/dataset/_utils/pims_images.py index 41bda3455..5dd0a8914 100644 --- a/webknossos/webknossos/dataset/_utils/pims_images.py +++ b/webknossos/webknossos/dataset/_utils/pims_images.py @@ -26,23 +26,28 @@ # pylint: disable=unused-import try: - from webknossos.dataset._utils.pims_czi_reader import PimsCziReader + from .pims_czi_reader import PimsCziReader except ImportError: PimsCziReader = type(None) # type: ignore[misc,assignment] try: - import webknossos.dataset._utils.pims_dm_readers + from .pims_dm_readers import ( # pylint: disable=unused-import + PimsDm3Reader, + PimsDm4Reader, + ) except ImportError: pass try: - import webknossos.dataset._utils.pims_imagej_tiff_reader + from .pims_imagej_tiff_reader import ( # pylint: disable=unused-import + PimsImagejTiffReader, + ) except ImportError: pass # pylint: enable=unused-import -from webknossos.dataset.mag_view import MagView -from webknossos.geometry.vec3_int import Vec3Int +from ...geometry.vec3_int import Vec3Int +from ..mag_view import MagView try: import pims diff --git a/webknossos/webknossos/dataset/data_format.py b/webknossos/webknossos/dataset/data_format.py new file mode 100644 index 000000000..d19cba281 --- /dev/null +++ b/webknossos/webknossos/dataset/data_format.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class DataFormat(Enum): + WKW = "wkw" + Zarr = "zarr" + Zarr3 = "zarr3" + + def __str__(self) -> str: + return self.value diff --git a/webknossos/webknossos/dataset/dataset.py b/webknossos/webknossos/dataset/dataset.py index 7a1d1409a..e895cad1f 100644 --- a/webknossos/webknossos/dataset/dataset.py +++ b/webknossos/webknossos/dataset/dataset.py @@ -35,14 +35,20 @@ from numpy.typing import DTypeLike from upath import UPath -from webknossos.dataset.defaults import ( +from ..geometry.vec3_int import Vec3Int, Vec3IntLike +from ._array import ArrayException, ArrayInfo, BaseArray +from .defaults import ( + DEFAULT_BIT_DEPTH, DEFAULT_CHUNK_SHAPE, DEFAULT_CHUNKS_PER_SHARD_FROM_IMAGES, DEFAULT_CHUNKS_PER_SHARD_ZARR, + DEFAULT_DATA_FORMAT, + PROPERTIES_FILE_NAME, + ZARR_JSON_FILE_NAME, + ZATTRS_FILE_NAME, + ZGROUP_FILE_NAME, ) - -from ..geometry.vec3_int import Vec3Int, Vec3IntLike -from ._array import ArrayException, ArrayInfo, BaseArray, DataFormat +from .ome_metadata import write_ome_0_4_metadata from .remote_dataset_registry import RemoteDatasetRegistry from .remote_folder import RemoteFolder from .sampling_modes import SamplingModes @@ -67,6 +73,7 @@ ) from ._utils.from_images import guess_if_segmentation_path from ._utils.infer_bounding_box_existing_files import infer_bounding_box_existing_files +from .data_format import DataFormat from .layer import ( Layer, SegmentationLayer, @@ -90,11 +97,6 @@ logger = logging.getLogger(__name__) -DEFAULT_BIT_DEPTH = 8 -DEFAULT_DATA_FORMAT = DataFormat.WKW -PROPERTIES_FILE_NAME = "datasource-properties.json" -ZGROUP_FILE_NAME = ".zgroup" -ZATTRS_FILE_NAME = ".zattrs" _DATASET_URL_REGEX = re.compile( r"^(?Phttps?://.*)/datasets/" @@ -382,8 +384,8 @@ def _parse_remote( * organization_id, * sharing_token. """ - from webknossos.client._resolve_short_link import resolve_short_link - from webknossos.client.context import _get_context, webknossos_context + from ..client._resolve_short_link import resolve_short_link + from ..client.context import _get_context, webknossos_context caller = inspect.stack()[1].function @@ -454,8 +456,8 @@ def open_remote( and allows to specifiy in which webknossos instance to search for the dataset. It defaults to the url from your current `webknossos_context`, using https://webknossos.org as a fallback. """ - from webknossos.client._generated.api.default import dataset_info - from webknossos.client.context import _get_context + from ..client._generated.api.default import dataset_info + from ..client.context import _get_context ( context_manager, @@ -521,7 +523,7 @@ def download( if the `path` exists. """ - from webknossos.client._download_dataset import download_dataset + from ..client._download_dataset import download_dataset ( context_manager, @@ -729,7 +731,7 @@ def upload( Returns the `RemoteDataset` upon successful upload. """ - from webknossos.client._upload_dataset import LayerToLink, upload_dataset + from ..client._upload_dataset import LayerToLink, upload_dataset converted_layers_to_link = ( None @@ -764,6 +766,7 @@ def add_layer( dtype_per_channel: Optional[DTypeLike] = None, num_channels: Optional[int] = None, data_format: Union[str, DataFormat] = DEFAULT_DATA_FORMAT, + bounding_box: Optional[BoundingBox] = None, **kwargs: Any, ) -> Layer: """ @@ -823,7 +826,7 @@ def add_layer( layer_properties = LayerProperties( name=layer_name, category=category, - bounding_box=BoundingBox((0, 0, 0), (0, 0, 0)), + bounding_box=bounding_box or BoundingBox((0, 0, 0), (0, 0, 0)), element_class=_dtype_per_channel_to_element_class( dtype_per_channel, num_channels ), @@ -952,6 +955,21 @@ def add_layer_like(self, other_layer: Layer, layer_name: str) -> Layer: self._export_as_json() return self._layers[layer_name] + def _add_existing_layer(self, layer_properties: LayerProperties) -> Layer: + self._ensure_writable() + + assert layer_properties.name not in self.layers, ( + f"Cannot import layer `{layer_properties.name}` into dataset, " + + "as a layer with this name is already present." + ) + + self._properties.data_layers.append(layer_properties) + layer = self._initialize_layer_from_properties(layer_properties) + self.layers[layer.name] = layer + + self._export_as_json() + return self.layers[layer.name] + def add_layer_for_existing_files( self, layer_name: str, @@ -965,18 +983,32 @@ def add_layer_for_existing_files( assert ( array_info is not None ), f"Could not find any valid mags in {self.path /layer_name}. Cannot add layer." + + num_channels = kwargs.pop("num_channels", array_info.num_channels) + dtype_per_channel = kwargs.pop("dtype_per_channel", array_info.voxel_type) + data_format = kwargs.pop("data_format", array_info.data_format) + layer = self.add_layer( layer_name, category=category, - num_channels=array_info.num_channels, - dtype_per_channel=array_info.voxel_type, - data_format=array_info.data_format, + num_channels=num_channels, + dtype_per_channel=dtype_per_channel, + data_format=data_format, **kwargs, ) for mag_dir in layer.path.iterdir(): + try: + # Tests if directory entry is a valid mag. + # Metadata files such as zarr.json are filtered out by this. + Mag(mag_dir.name) + except ValueError: + continue layer.add_mag_for_existing_files(mag_dir.name) finest_mag_view = layer.mags[min(layer.mags)] - layer.bounding_box = infer_bounding_box_existing_files(finest_mag_view) + if "bounding_box" not in kwargs: + layer.bounding_box = infer_bounding_box_existing_files(finest_mag_view) + else: + layer.bounding_box = kwargs["bounding_box"] return layer def add_layer_from_images( @@ -1565,7 +1597,12 @@ def shallow_copy_dataset( layer.path, new_layer.path, ignore=[str(mag) for mag in layer.mags] - + [PROPERTIES_FILE_NAME, ZGROUP_FILE_NAME, ZATTRS_FILE_NAME], + + [ + PROPERTIES_FILE_NAME, + ZGROUP_FILE_NAME, + ZATTRS_FILE_NAME, + ZARR_JSON_FILE_NAME, + ], make_relative=make_relative, ) @@ -1682,63 +1719,16 @@ def _export_as_json(self) -> None: # Write out Zarr and OME-Ngff metadata if there is a Zarr layer if any(layer.data_format == DataFormat.Zarr for layer in self.layers.values()): - zgroup_content = {"zarr_format": "2"} with (self.path / ZGROUP_FILE_NAME).open("w", encoding="utf-8") as outfile: - json.dump(zgroup_content, outfile, indent=4) - for layer in self.layers.values(): - if layer.data_format == DataFormat.Zarr: - with (layer.path / ZGROUP_FILE_NAME).open( - "w", encoding="utf-8" - ) as outfile: - json.dump(zgroup_content, outfile, indent=4) - with (layer.path / ZATTRS_FILE_NAME).open( - "w", encoding="utf-8" - ) as outfile: - json.dump( - { - "multiscales": [ - { - "version": "0.4", - "axes": [ - {"name": "c", "type": "channel"}, - { - "name": "x", - "type": "space", - "unit": "nanometer", - }, - { - "name": "y", - "type": "space", - "unit": "nanometer", - }, - { - "name": "z", - "type": "space", - "unit": "nanometer", - }, - ], - "datasets": [ - { - "path": mag.path.name, - "coordinateTransformations": [ - { - "type": "scale", - "scale": [1.0] - + ( - np.array(self.voxel_size) - * mag.mag.to_np() - ).tolist(), - } - ], - } - for mag in layer.mags.values() - ], - } - ] - }, - outfile, - indent=4, - ) + json.dump({"zarr_format": "2"}, outfile, indent=4) + if any(layer.data_format == DataFormat.Zarr3 for layer in self.layers.values()): + with (self.path / ZARR_JSON_FILE_NAME).open( + "w", encoding="utf-8" + ) as outfile: + json.dump({"zarr_format": 3, "node_type": "group"}, outfile, indent=4) + + for layer in self.layers.values(): + write_ome_0_4_metadata(self, layer) def _initialize_layer_from_properties(self, properties: LayerProperties) -> Layer: if properties.category == COLOR_CATEGORY: @@ -1817,15 +1807,15 @@ def __repr__(self) -> str: @property def url(self) -> str: - from webknossos.client.context import _get_context + from ..client.context import _get_context with self._context: wk_url = _get_context().url return f"{wk_url}/datasets/{self._organization_id}/{self._dataset_name}" def _get_dataset_info(self) -> "DatasetInfoResponse200": - from webknossos.client._generated.api.default import dataset_info - from webknossos.client.context import _get_generated_client + from ..client._generated.api.default import dataset_info + from ..client.context import _get_generated_client with self._context: dataset_info_response = dataset_info.sync_detailed( @@ -1848,11 +1838,11 @@ def _update_dataset_info( folder_id: str = _UNSET, tags: List[str] = _UNSET, ) -> None: - from webknossos.client._generated.api.default import dataset_update - from webknossos.client._generated.models.dataset_update_json_body import ( + from ..client._generated.api.default import dataset_update + from ..client._generated.models.dataset_update_json_body import ( DatasetUpdateJsonBody, ) - from webknossos.client.context import _get_generated_client + from ..client.context import _get_generated_client # Atm, the wk backend needs to get previous parameters passed # (this is a race-condition with parallel updates). @@ -1924,8 +1914,8 @@ def is_public(self, is_public: bool) -> None: @property def sharing_token(self) -> str: - from webknossos.client._generated.api.default import dataset_sharing_token - from webknossos.client.context import _get_generated_client + from ..client._generated.api.default import dataset_sharing_token + from ..client.context import _get_generated_client with self._context: dataset_sharing_token_response = dataset_sharing_token.sync_detailed( @@ -1941,7 +1931,7 @@ def sharing_token(self) -> str: @property def allowed_teams(self) -> Tuple["Team", ...]: - from webknossos.administration.user import Team + from ..administration.user import Team return tuple( Team(id=i.id, name=i.name, organization_id=i.organization) @@ -1951,9 +1941,9 @@ def allowed_teams(self) -> Tuple["Team", ...]: @allowed_teams.setter def allowed_teams(self, allowed_teams: Sequence[Union[str, "Team"]]) -> None: """Assign the teams that are allowed to access the dataset. Specify the teams like this `[Team.get_by_name("Lab_A"), ...]`.""" - from webknossos.administration.user import Team - from webknossos.client._generated.api.default import dataset_update_teams - from webknossos.client.context import _get_generated_client + from ..administration.user import Team + from ..client._generated.api.default import dataset_update_teams + from ..client.context import _get_generated_client team_ids = [i.id if isinstance(i, Team) else i for i in allowed_teams] diff --git a/webknossos/webknossos/dataset/defaults.py b/webknossos/webknossos/dataset/defaults.py index 86413038f..d8c08ab5e 100644 --- a/webknossos/webknossos/dataset/defaults.py +++ b/webknossos/webknossos/dataset/defaults.py @@ -1,10 +1,14 @@ -from os import environ +from ..geometry import Vec3Int +from .data_format import DataFormat -from webknossos.geometry import Vec3Int - -WK_USE_ZARRITA = environ.get("WK_USE_ZARRITA") is not None DEFAULT_WKW_FILE_LEN = 32 DEFAULT_CHUNK_SHAPE = Vec3Int.full(32) DEFAULT_CHUNKS_PER_SHARD = Vec3Int.full(32) DEFAULT_CHUNKS_PER_SHARD_ZARR = Vec3Int.full(1) DEFAULT_CHUNKS_PER_SHARD_FROM_IMAGES = Vec3Int(128, 128, 1) +DEFAULT_BIT_DEPTH = 8 +DEFAULT_DATA_FORMAT = DataFormat.WKW +PROPERTIES_FILE_NAME = "datasource-properties.json" +ZGROUP_FILE_NAME = ".zgroup" +ZATTRS_FILE_NAME = ".zattrs" +ZARR_JSON_FILE_NAME = "zarr.json" diff --git a/webknossos/webknossos/dataset/layer.py b/webknossos/webknossos/dataset/layer.py index 23c85bcfc..b1c8774b0 100644 --- a/webknossos/webknossos/dataset/layer.py +++ b/webknossos/webknossos/dataset/layer.py @@ -13,10 +13,7 @@ from numpy.typing import DTypeLike from upath import UPath -from webknossos.dataset.sampling_modes import SamplingModes -from webknossos.dataset.view import View, _copy_job -from webknossos.geometry import BoundingBox, Mag, Vec3Int, Vec3IntLike - +from ..geometry import BoundingBox, Mag, Vec3Int, Vec3IntLike from ._array import ArrayException, BaseArray, DataFormat from ._downsampling_utils import ( calculate_default_coarsest_mag, @@ -36,6 +33,8 @@ _properties_floating_type_to_python_type, _python_floating_type_to_properties_type, ) +from .sampling_modes import SamplingModes +from .view import View, _copy_job if TYPE_CHECKING: from .dataset import Dataset diff --git a/webknossos/webknossos/dataset/mag_view.py b/webknossos/webknossos/dataset/mag_view.py index f76b3fccd..c3e9f7e98 100644 --- a/webknossos/webknossos/dataset/mag_view.py +++ b/webknossos/webknossos/dataset/mag_view.py @@ -305,7 +305,7 @@ def compress( Compressing mags on remote file systems requires a `target_path`. """ - from webknossos.dataset.dataset import Dataset + from .dataset import Dataset if args is not None: warn_deprecated( diff --git a/webknossos/webknossos/dataset/ome_metadata.py b/webknossos/webknossos/dataset/ome_metadata.py new file mode 100644 index 000000000..09e304f21 --- /dev/null +++ b/webknossos/webknossos/dataset/ome_metadata.py @@ -0,0 +1,79 @@ +import json +from typing import TYPE_CHECKING, Any, Dict + +import numpy as np + +from .data_format import DataFormat +from .defaults import ZARR_JSON_FILE_NAME, ZATTRS_FILE_NAME, ZGROUP_FILE_NAME + +if TYPE_CHECKING: + from .dataset import Dataset + from .layer import Layer + + +def get_ome_0_4_multiscale_metadata( + dataset: "Dataset", layer: "Layer" +) -> Dict[str, Any]: + return { + "multiscales": [ + { + "version": "0.4", + "axes": [ + {"name": "c", "type": "channel"}, + { + "name": "x", + "type": "space", + "unit": "nanometer", + }, + { + "name": "y", + "type": "space", + "unit": "nanometer", + }, + { + "name": "z", + "type": "space", + "unit": "nanometer", + }, + ], + "datasets": [ + { + "path": mag.path.name, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [1.0] + + ( + np.array(dataset.voxel_size) * mag.mag.to_np() + ).tolist(), + } + ], + } + for mag in layer.mags.values() + ], + } + ] + } + + +def write_ome_0_4_metadata(dataset: "Dataset", layer: "Layer") -> None: + if layer.data_format == DataFormat.Zarr3: + with (layer.path / ZARR_JSON_FILE_NAME).open("w", encoding="utf-8") as outfile: + json.dump( + { + "zarr_format": 3, + "node_type": "group", + "attributes": get_ome_0_4_multiscale_metadata(dataset, layer), + }, + outfile, + indent=4, + ) + if layer.data_format == DataFormat.Zarr: + with (layer.path / ZGROUP_FILE_NAME).open("w", encoding="utf-8") as outfile: + json.dump({"zarr_format": "2"}, outfile, indent=4) + with (layer.path / ZATTRS_FILE_NAME).open("w", encoding="utf-8") as outfile: + json.dump( + get_ome_0_4_multiscale_metadata(dataset, layer), + outfile, + indent=4, + ) diff --git a/webknossos/webknossos/dataset/properties.py b/webknossos/webknossos/dataset/properties.py index 49fcf605a..069743511 100644 --- a/webknossos/webknossos/dataset/properties.py +++ b/webknossos/webknossos/dataset/properties.py @@ -6,9 +6,8 @@ import numpy as np from cattr.gen import make_dict_structure_fn, make_dict_unstructure_fn, override -from webknossos.geometry import BoundingBox, Mag, Vec3Int -from webknossos.utils import snake_to_camel_case, warn_deprecated - +from ..geometry import BoundingBox, Mag, Vec3Int +from ..utils import snake_to_camel_case, warn_deprecated from ._array import ArrayException, BaseArray, DataFormat from .layer_categories import LayerCategoryType diff --git a/webknossos/webknossos/dataset/remote_dataset_registry.py b/webknossos/webknossos/dataset/remote_dataset_registry.py index 06c60f755..73a5470b3 100644 --- a/webknossos/webknossos/dataset/remote_dataset_registry.py +++ b/webknossos/webknossos/dataset/remote_dataset_registry.py @@ -1,9 +1,9 @@ from typing import TYPE_CHECKING, Optional, Sequence, TypeVar, Union -from webknossos.utils import LazyReadOnlyDict +from ..utils import LazyReadOnlyDict if TYPE_CHECKING: - from webknossos.dataset.dataset import RemoteDataset + from .dataset import RemoteDataset K = TypeVar("K") # key @@ -19,10 +19,10 @@ def __init__( organization_id: Optional[str], tags: Optional[Union[str, Sequence[str]]], ) -> None: - from webknossos.administration.user import User - from webknossos.client._generated.api.default import dataset_list - from webknossos.client.context import _get_generated_client - from webknossos.dataset.dataset import Dataset + from ..administration.user import User + from ..client._generated.api.default import dataset_list + from ..client.context import _get_generated_client + from .dataset import Dataset client = _get_generated_client(enforce_auth=True) diff --git a/webknossos/webknossos/dataset/remote_folder.py b/webknossos/webknossos/dataset/remote_folder.py index 683238dfd..b9b44378d 100644 --- a/webknossos/webknossos/dataset/remote_folder.py +++ b/webknossos/webknossos/dataset/remote_folder.py @@ -3,7 +3,7 @@ import attr if TYPE_CHECKING: - from webknossos.client._generated.models.folder_tree_response_200_item import ( + from ..client._generated.models.folder_tree_response_200_item import ( FolderTreeResponse200Item, ) @@ -25,8 +25,8 @@ class RemoteFolder: @classmethod def get_by_id(cls, folder_id: str) -> "RemoteFolder": - from webknossos.client._generated.api.default import folder_tree - from webknossos.client.context import _get_generated_client + from ..client._generated.api.default import folder_tree + from ..client.context import _get_generated_client client = _get_generated_client(enforce_auth=True) @@ -41,8 +41,8 @@ def get_by_id(cls, folder_id: str) -> "RemoteFolder": @classmethod def get_by_path(cls, path: str) -> "RemoteFolder": - from webknossos.client._generated.api.default import folder_tree - from webknossos.client.context import _get_generated_client + from ..client._generated.api.default import folder_tree + from ..client.context import _get_generated_client client = _get_generated_client(enforce_auth=True) diff --git a/webknossos/webknossos/dataset/view.py b/webknossos/webknossos/dataset/view.py index 4de5bd391..b9606f25a 100644 --- a/webknossos/webknossos/dataset/view.py +++ b/webknossos/webknossos/dataset/view.py @@ -703,7 +703,7 @@ def get_buffered_slice_writer( writer.send(data_slice) ``` """ - from webknossos.dataset._utils.buffered_slice_writer import BufferedSliceWriter + from ._utils.buffered_slice_writer import BufferedSliceWriter assert ( not self._read_only @@ -756,7 +756,7 @@ def get_buffered_slice_reader( ... ``` """ - from webknossos.dataset._utils.buffered_slice_reader import BufferedSliceReader + from ._utils.buffered_slice_reader import BufferedSliceReader return BufferedSliceReader( view=self, diff --git a/webknossos/webknossos/geometry/mag.py b/webknossos/webknossos/geometry/mag.py index 72a183697..76671ef45 100644 --- a/webknossos/webknossos/geometry/mag.py +++ b/webknossos/webknossos/geometry/mag.py @@ -31,7 +31,7 @@ def _import_mag(mag_like: Any) -> Vec3Int: if as_vec3_int is None: raise ValueError( - "Mag must be int or a vector3 of ints or a string shaped like e.g. 2-2-1" + f"Mag must be int or a vector3 of ints or a string shaped like e.g. 2-2-1. Got: {mag_like}" ) for m in as_vec3_int: assert ( diff --git a/webknossos/webknossos/skeleton/__init__.py b/webknossos/webknossos/skeleton/__init__.py index cab7ebe33..fd8143645 100644 --- a/webknossos/webknossos/skeleton/__init__.py +++ b/webknossos/webknossos/skeleton/__init__.py @@ -1,11 +1,11 @@ from os import PathLike from typing import Union -from webknossos.skeleton.group import Group -from webknossos.skeleton.node import Node -from webknossos.skeleton.skeleton import Skeleton -from webknossos.skeleton.tree import Graph, Tree -from webknossos.utils import warn_deprecated +from ..utils import warn_deprecated +from .group import Group +from .node import Node +from .skeleton import Skeleton +from .tree import Graph, Tree def open_nml(file_path: Union[PathLike, str]) -> Skeleton: diff --git a/webknossos/webknossos/skeleton/group.py b/webknossos/webknossos/skeleton/group.py index edd9fbe0e..01ee8c35a 100644 --- a/webknossos/webknossos/skeleton/group.py +++ b/webknossos/webknossos/skeleton/group.py @@ -5,12 +5,13 @@ from boltons.strutils import unit_len import webknossos._nml as wknml -from webknossos.utils import warn_deprecated +from ..utils import warn_deprecated from .tree import Tree if TYPE_CHECKING: - from webknossos.skeleton import Node, Skeleton + from .node import Node + from .skeleton import Skeleton Vector3 = Tuple[float, float, float] diff --git a/webknossos/webknossos/skeleton/node.py b/webknossos/webknossos/skeleton/node.py index 40872d0bc..50587e357 100644 --- a/webknossos/webknossos/skeleton/node.py +++ b/webknossos/webknossos/skeleton/node.py @@ -2,10 +2,10 @@ import attr -from webknossos.geometry import Vec3Int +from ..geometry import Vec3Int if TYPE_CHECKING: - from webknossos.skeleton import Skeleton + from .skeleton import Skeleton Vec3Float = Tuple[float, float, float] diff --git a/webknossos/webknossos/skeleton/skeleton.py b/webknossos/webknossos/skeleton/skeleton.py index 62a9af4b8..ddde84f78 100644 --- a/webknossos/webknossos/skeleton/skeleton.py +++ b/webknossos/webknossos/skeleton/skeleton.py @@ -5,8 +5,7 @@ import attr -from webknossos.utils import warn_deprecated - +from ..utils import warn_deprecated from .group import Group Vector3 = Tuple[float, float, float] @@ -86,7 +85,7 @@ def load(file_path: Union[PathLike, str]) -> "Skeleton": layers). Returns the `Skeleton` object. Also see `Annotation.load` if you want to have the annotation which wraps the skeleton.""" - from webknossos import Annotation + from ..annotation import Annotation return Annotation.load(file_path).skeleton @@ -95,7 +94,7 @@ def save(self, out_path: Union[str, PathLike]) -> None: Stores the skeleton as a zip or nml at the given path. """ - from webknossos import Annotation + from ..annotation import Annotation out_path = Path(out_path) assert out_path.suffix in [ diff --git a/webknossos/webknossos/skeleton/tree.py b/webknossos/webknossos/skeleton/tree.py index 52853c49f..e75118569 100644 --- a/webknossos/webknossos/skeleton/tree.py +++ b/webknossos/webknossos/skeleton/tree.py @@ -4,13 +4,13 @@ import networkx as nx import numpy as np -from webknossos.geometry import Vec3Int, Vec3IntLike -from webknossos.utils import warn_deprecated - +from ..geometry import Vec3Int, Vec3IntLike +from ..utils import warn_deprecated from .node import Node if TYPE_CHECKING: - from webknossos.skeleton import Group, Skeleton + from .group import Group + from .skeleton import Skeleton Vector3 = Tuple[float, float, float] Vector4 = Tuple[float, float, float, float]