Skip to content

Commit

Permalink
Merge pull request #73 from brainglobe/imageio
Browse files Browse the repository at this point in the history
Add image IO loading code from cellfinder & create image IO submodule within IO
  • Loading branch information
alessandrofelder authored May 15, 2024
2 parents 66e88a4 + a1a53b5 commit 085a14c
Show file tree
Hide file tree
Showing 9 changed files with 123 additions and 32 deletions.
3 changes: 3 additions & 0 deletions brainglobe_utils/IO/image/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from brainglobe_utils.IO.image.load import *
from brainglobe_utils.IO.image.save import *
from brainglobe_utils.IO.image.utils import *
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
import glob
import logging
import math
import os
import warnings
from concurrent.futures import ProcessPoolExecutor
from pathlib import Path
from typing import Tuple

import nrrd
import numpy as np
import tifffile
from dask import array as da
from dask import delayed
from natsort import natsorted
from skimage import transform
from tqdm import tqdm
Expand All @@ -15,7 +20,7 @@
get_num_processes,
get_sorted_file_paths,
)
from brainglobe_utils.image_io.utils import ImageIOLoadException
from brainglobe_utils.IO.image.utils import ImageIOLoadException

from .utils import check_mem, scale_z

Expand Down Expand Up @@ -90,11 +95,11 @@ def load_any(
Raises
------
ImageIOLoadException
If there was an issue loading the image with image_io.
If there was an issue loading the image with image.
See Also
------
image_io.utils.ImageIOLoadException
image.utils.ImageIOLoadException
"""
src_path = Path(src_path)

Expand Down Expand Up @@ -732,3 +737,76 @@ def get_size_image_from_file_paths(file_path, file_extension="tif"):

image_shape = {"x": x_shape, "y": y_shape, "z": z_shape}
return image_shape


def get_tiff_meta(
path: str,
) -> Tuple[Tuple[int, int], np.dtype]:
with tifffile.TiffFile(path) as tfile:
nz = len(tfile.pages)
if not nz:
raise ValueError(f"tiff file {path} has no pages!")
first_page = tfile.pages[0]

return tfile.pages[0].shape, first_page.dtype


lazy_imread = delayed(tifffile.imread) # lazy reader


def read_z_stack(path):
"""
Reads z-stack, lazily, if possible.
If it's a text file or folder with 2D tiff files use dask to read lazily,
otherwise it's a single file tiff stack and is read into memory.
:param path: Filename of text file listing 2D tiffs, folder of 2D tiffs,
or single file tiff z-stack.
:return: The data as a dask/numpy array.
"""
if path.endswith(".tiff") or path.endswith(".tif"):
with tifffile.TiffFile(path) as tiff:
if not len(tiff.series):
raise ValueError(
f"Attempted to load {path} but couldn't read a z-stack"
)
if len(tiff.series) != 1:
raise ValueError(
f"Attempted to load {path} but found multiple stacks"
)

axes = tiff.series[0].axes.lower()
if set(axes) != {"x", "y", "z"} or axes[0] != "z":
raise ValueError(
f"Attempted to load {path} but didn't find a zyx or "
f"zxy stack. Found {axes} axes"
)

return tifffile.imread(path)

return read_with_dask(path)


def read_with_dask(path):
"""
Based on https://github.com/tlambert03/napari-ndtiffs
:param path:
:return:
"""
path = str(path)
if path.endswith(".txt"):
with open(path, "r") as f:
filenames = [line.rstrip() for line in f.readlines()]

else:
filenames = glob.glob(os.path.join(path, "*.tif"))

shape, dtype = get_tiff_meta(filenames[0])
lazy_arrays = [lazy_imread(fn) for fn in get_sorted_file_paths(filenames)]
dask_arrays = [
da.from_delayed(delayed_reader, shape=shape, dtype=dtype)
for delayed_reader in lazy_arrays
]
stack = da.stack(dask_arrays, axis=0)
return stack
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
class ImageIOLoadException(Exception):
"""
Custom exception class for errors found loading images with
image_io.load.
brainglobe_utils.IO.image.load
Alerts the user of: loading a directory containing only a single .tiff,
loading a single 2D .tiff, loading an image sequence where all 2D images
Expand Down Expand Up @@ -45,9 +45,7 @@ def __init__(self, error_type=None, total_size=None, free_mem=None):
)

else:
self.message = (
"File failed to load with brainglobe_utils.image_io."
)
self.message = "File failed to load with brainglobe_utils.image."

super().__init__(self.message)

Expand Down
2 changes: 1 addition & 1 deletion brainglobe_utils/brainreg/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import tifffile
from brainglobe_atlasapi import BrainGlobeAtlas

from brainglobe_utils.image_io import get_size_image_from_file_paths
from brainglobe_utils.IO.image import get_size_image_from_file_paths


def transform_points_from_downsampled_to_atlas_space(
Expand Down
2 changes: 1 addition & 1 deletion brainglobe_utils/image/heatmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from brainglobe_utils.image.binning import get_bins
from brainglobe_utils.image.masking import mask_image_threshold
from brainglobe_utils.image.scale import scale_and_convert_to_16_bits
from brainglobe_utils.image_io import to_tiff
from brainglobe_utils.IO.image import to_tiff


def rescale_array(source_array, target_array, order=1):
Expand Down
6 changes: 0 additions & 6 deletions brainglobe_utils/image_io/__init__.py

This file was deleted.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ dependencies = [
"natsort",
"nibabel >= 2.1.0",
"numpy",
"dask",
"pandas",
"psutil",
"pyarrow",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import psutil
import pytest

from brainglobe_utils.image_io import load, save, utils
from brainglobe_utils.IO.image import load, save, utils


@pytest.fixture()
Expand All @@ -21,6 +21,13 @@ def array_3d(array_2d):
return volume


@pytest.fixture()
def array_3D_as_2d_tiffs_path(tmp_path, array_3d, prefix="image"):
dest_path = tmp_path / prefix
save.to_tiffs(array_3d, dest_path)
return tmp_path


@pytest.fixture()
def txt_path(tmp_path, array_3d):
"""
Expand Down Expand Up @@ -76,7 +83,7 @@ def test_tiff_io(tmp_path, array_3d, use_path):
save.to_tiff(array_3d, dest_path)
reloaded = load.load_img_stack(dest_path, 1, 1, 1)

assert (reloaded == array_3d).all()
np.testing.assert_array_equal(reloaded, array_3d)


@pytest.mark.parametrize(
Expand All @@ -103,34 +110,30 @@ def test_3d_tiff_scaling(
assert reloaded.shape[2] == array_3d.shape[2] * x_scaling_factor


@pytest.mark.parametrize("use_path", [True, False], ids=["Path", "String"])
@pytest.mark.parametrize("use_str", [True, False], ids=["String", "Path"])
@pytest.mark.parametrize(
"load_parallel",
[
pytest.param(True, id="parallel loading"),
pytest.param(False, id="no parallel loading"),
],
)
def test_tiff_sequence_io(tmp_path, array_3d, load_parallel, use_path):
def test_tiff_sequence_io(
array_3d, array_3D_as_2d_tiffs_path, load_parallel, use_str
):
"""
Test that a 3D image can be written and read correctly as a sequence
of 2D tiffs (with or without parallel loading). Tests using both
string and pathlib.Path input.
"""
prefix = "image"
dest_path = tmp_path / prefix
dir_path = tmp_path
if not use_path:
dest_path = str(dest_path)
dir_path = array_3D_as_2d_tiffs_path
if use_str:
dir_path = str(dir_path)

save.to_tiffs(array_3d, dest_path)
assert len(list(tmp_path.glob("*.tif"))) == array_3d.shape[0]

reloaded_array = load.load_from_folder(
dir_path, load_parallel=load_parallel
)
assert (reloaded_array == array_3d).all()
np.testing.assert_array_equal(reloaded_array, array_3d)


def test_2d_tiff(tmp_path, array_2d):
Expand Down Expand Up @@ -208,7 +211,7 @@ def test_load_img_sequence_from_txt(txt_path, array_3d, use_path):
txt_path = str(txt_path)

reloaded_array = load.load_img_sequence(txt_path)
assert (reloaded_array == array_3d).all()
np.testing.assert_array_equal(reloaded_array, array_3d)


@pytest.mark.parametrize(
Expand All @@ -224,9 +227,9 @@ def test_sort_img_sequence_from_txt(shuffled_txt_path, array_3d, sort):
shuffled_txt_path, 1, 1, 1, sort=sort
)
if sort:
assert (reloaded_array == array_3d).all()
np.testing.assert_array_equal(reloaded_array, array_3d)
else:
assert not (reloaded_array == array_3d).all()
assert not np.array_equal(reloaded_array, array_3d)


@pytest.mark.parametrize("use_path", [True, False], ids=["Path", "String"])
Expand Down Expand Up @@ -258,7 +261,7 @@ def test_nii_read_to_numpy(tmp_path, array_3d):
save.save_any(array_3d, nii_path)
reloaded_array = load.load_any(nii_path, as_numpy=True)

assert (reloaded_array == array_3d).all()
np.testing.assert_array_equal(reloaded_array, array_3d)


@pytest.mark.parametrize("use_path", [True, False], ids=["Path", "String"])
Expand Down Expand Up @@ -389,3 +392,17 @@ def mock_memory():

with pytest.raises(utils.ImageIOLoadException):
utils.check_mem(8, 1000)


def test_read_with_dask_txt(array_3D_as_2d_tiffs_path, array_3d):
"""
Test that a series of images can be read correctly as a dask array
"""
stack = load.read_with_dask(array_3D_as_2d_tiffs_path)
np.testing.assert_array_equal(stack, array_3d)


def test_read_with_dask_glob_txt_equal(array_3D_as_2d_tiffs_path, txt_path):
glob_stack = load.read_with_dask(array_3D_as_2d_tiffs_path)
txt_stack = load.read_with_dask(txt_path)
np.testing.assert_array_equal(glob_stack, txt_stack)

0 comments on commit 085a14c

Please sign in to comment.