diff --git a/impy/arrays/__init__.py b/impy/arrays/__init__.py index 47ec30c..c5ea42c 100644 --- a/impy/arrays/__init__.py +++ b/impy/arrays/__init__.py @@ -6,17 +6,6 @@ from .lazy import LazyImgArray from .big import BigImgArray -# install deprecations - -try: - from ._deprecated import Funcs - - for k, v in Funcs.items(): - setattr(ImgArray, k, v) - -except Exception: - pass - __all__ = [ "ImgArray", "LabeledArray", diff --git a/impy/arrays/_deprecated.py b/impy/arrays/_deprecated.py deleted file mode 100644 index 066031e..0000000 --- a/impy/arrays/_deprecated.py +++ /dev/null @@ -1,366 +0,0 @@ -from __future__ import annotations -import warnings -import numpy as np -from typing import TYPE_CHECKING - -from ._utils._skimage import skfeat, skreg -from ._utils import _misc, _docs, _transform, _structures - -from ..utils.axesop import complement_axes, find_first_appeared -from ..utils.deco import check_input_and_output, dims_to_spatial_axes -from ..utils.gauss import GaussianParticle -from ..utils.misc import check_nd - -from ..collections import DataDict -from .._types import nDFloat, Dims -from ..array_api import xp -from ..axes import AxisLike - -if TYPE_CHECKING: - from ..frame import MarkerFrame - from .imgarray import ImgArray - -@dims_to_spatial_axes -def gauss_sm( - self: ImgArray, - coords = None, - radius = 4, - sigma = 1.5, - filt = None, - percentile: float = 95, - *, - return_all: bool = False, - dims = None -): - """ - Calculate positions of particles in subpixel precision using Gaussian fitting. - - Parameters - ---------- - coords : MarkerFrame or (N, 2) array, optional - Coordinates of peaks. If None, this will be determined by find_sm. - radius : int, default is 4. - Fitting range. Rectangular image with size 2r+1 x 2r+1 will be send to Gaussian - fitting function. - sigma : float, default is 1.5 - Expected standard deviation of particles. - filt : callable, optional - For every slice `sl`, label is added only when filt(`input`) == True is satisfied. - This discrimination is conducted before Gaussian fitting so that stringent filter - will save time. - percentile, dims : - Passed to peak_local_max() - return_all : bool, default is False - If True, fitting results are all returned as Frame Dict. - {dims} - - Returns - ------- - MarkerFrame, if return_all == False - Gaussian centers. - DataDict with keys {means, sigmas, errors}, if return_all == True - Dictionary that contains means, standard deviations and fitting errors. - """ - warnings.warn("'gauss_sm' is deprecated. Use 'centroid_sm' instead.", DeprecationWarning) - from scipy.linalg import pinv as pseudo_inverse - from ..frame import MarkerFrame - - if coords is None: - coords = self.find_sm(sigma=sigma, dims=dims, percentile=percentile) - else: - coords = _check_coordinates(coords, self) - ndim = len(dims) - filt = check_filter_func(filt) - - radius = np.asarray(check_nd(radius, ndim)) - - shape = self.sizesof(dims) - - means = [] # fitting results of means - sigmas = [] # fitting results of sigmas - errs = [] # fitting errors of means - ab = [] - for crd in coords.values: - center = tuple(crd[-ndim:]) - label_sl = tuple(crd[:-ndim]) - sl = _specify_one(center, radius, shape) # sl = (..., z,y,x) - input_img = self.value[label_sl][sl] - if input_img.size == 0 or not filt(input_img): - continue - - gaussian = GaussianParticle(initial_sg=sigma) - res = gaussian.fit(input_img, method="BFGS") - - if (gaussian.mu_inrange(0, radius*2) and - gaussian.sg_inrange(sigma/3, sigma*3) and - gaussian.a > 0): - gaussian.shift(center - radius) - # calculate fitting error with Jacobian - if return_all: - jac = res.jac[:2].reshape(1,-1) - cov = pseudo_inverse(jac.T @ jac) - err = np.sqrt(np.diag(cov)) - sigmas.append(label_sl + tuple(gaussian.sg)) - errs.append(label_sl + tuple(err)) - ab.append(label_sl + (gaussian.a, gaussian.b)) - - means.append(label_sl + tuple(gaussian.mu)) - - kw = dict(columns=coords.col_axes, dtype=np.float32) - - if return_all: - out = DataDict(means = MarkerFrame(means, **kw).as_standard_type(), - sigmas = MarkerFrame(sigmas, **kw).as_standard_type(), - errors = MarkerFrame(errs, **kw).as_standard_type(), - intensities = MarkerFrame(ab, - columns=str(coords.col_axes)[:-ndim]+"ab", - dtype=np.float32)) - - out.means.set_scale(coords.scale) - out.sigmas.set_scale(coords.scale) - out.errors.set_scale(coords.scale) - - else: - out = MarkerFrame(means, **kw) - out.set_scale(coords.scale) - - return out - - -@dims_to_spatial_axes -@check_input_and_output -def corner_peaks( - self: ImgArray, - *, - min_distance:int = 1, - percentile: float | None = None, - topn: int = np.inf, - topn_per_label: int = np.inf, - exclude_border: bool = True, - use_labels: bool = True, - dims: Dims = None -) -> MarkerFrame: - """ - Find local corner maxima. Slightly different from peak_local_max. - - Parameters - ---------- - min_distance : int, default is 1 - Minimum distance allowed for each two peaks. This parameter is slightly - different from that in ``skimage.feature.peak_local_max`` because here float - input is allowed and every time footprint is calculated. - percentile : float, optional - Percentile to compute absolute threshold. - topn : int, optional - Maximum number of peaks **for each iteration**. - topn_per_label : int, default is np.inf - Maximum number of peaks per label. - use_labels : bool, default is True - If use self.labels when it exists. - {dims} - - Returns - ------- - MarkerFrame - DataFrame with columns same as axes of self. For example, if self.axes is "tcyx" then - return value has "t", "c", "y" and "x" columns, and sub-frame at t=0, c=0 contains all - the coordinates of corners in the slice at t=0, c=0. - """ - warnings.warn("'corner_peaks' is deprecated and will be removed.", DeprecationWarning) - # separate spatial dimensions and others - ndim = len(dims) - dims_list = list(dims) - c_axes = complement_axes(dims, self.axes) - c_axes_list = list(c_axes) - - if isinstance(exclude_border, bool): - exclude_border = int(min_distance) if exclude_border else False - - thr = None if percentile is None else np.percentile(self.value, percentile) - - import pandas as pd - from ..frame import MarkerFrame - - df_all: list[pd.DataFrame] = [] - for sl, img in self.iter(c_axes, israw=True, exclude=dims): - # skfeat.corner_peaks overwrite something so we need to give copy of img. - if use_labels and img.labels is not None: - labels = xp.asnumpy(img.labels) - else: - labels = None - - indices = skfeat.corner_peaks( - xp.asnumpy(img), - footprint=xp.asnumpy(_structures.ball_like(min_distance, ndim)), - threshold_abs=thr, - num_peaks=topn, - num_peaks_per_label=topn_per_label, - labels=labels, - exclude_border=exclude_border - ) - indices = pd.DataFrame(indices, columns=dims_list) - indices[c_axes_list] = sl - df_all.append(indices) - - out = pd.concat(df_all, axis=0) - out = MarkerFrame(out, columns=self.axes, dtype="uint16") - out.set_scale(self) - return out - -@_docs.write_docs -@dims_to_spatial_axes -@check_input_and_output -def find_corners( - self, - sigma: nDFloat = 1, - k: float = 0.05, - *, - dims: Dims = None -) -> ImgArray: - """ - Corner detection using Harris response. - - Parameters - ---------- - {sigma} - k : float, optional - Sensitivity factor to separate corners from edges, typically in range [0, 0.2]. - Small values of k result in detection of sharp corners. - {dims} - - Returns - ------- - MarkerFrame - Coordinates of corners. For details see ``corner_peaks`` method. - """ - warnings.warn("'find_corners' is deprecated and will be removed.", DeprecationWarning) - res = self.gaussian_filter(sigma=1).corner_harris(sigma=sigma, k=k, dims=dims) - out = res.corner_peaks(min_distance=3, percentile=97, dims=dims) - return out - -@check_input_and_output -def track_template(self: ImgArray, template:np.ndarray, bg=None, along: AxisLike = "t") -> MarkerFrame: - """ - Tracking using template matching. For every time frame, matched region is interpreted as a - new template and is used for the next template. To avoid slight shifts accumulating to the - template image, new template image will be fitteg to the old one by phase cross correlation. - - Parameters - ---------- - template : np.ndarray - Template image. Must be 2 or 3 dimensional. - bg : float, optional - Background intensity. If not given, it will calculated as the minimum value of - the original image. - along : str, default is "t" - Which axis will be the time axis. - - Returns - ------- - MarkerFrame - Centers of matched templates. - """ - warnings.warn("'track_template' is deprecated and will be removed.", DeprecationWarning) - template = _check_template(template) - template_new = template - t_shape = np.array(template.shape) - bg = _check_bg(self, bg) - dims = "yx" if template.ndim == 2 else "zyx" - # check along - if along is None: - along = find_first_appeared("tpzc", include=self.axes, exclude=dims) - elif len(along) != 1: - raise ValueError("`along` must be single character.") - - if complement_axes(dims, self.axes) != along: - raise ValueError(f"Image axes do not match along ({along}) and template dimensions ({dims})") - - ndim = len(dims) - rem_edge_sl = tuple(slice(s//2, -s//2) for s in t_shape) - pos = [] - shift = np.zeros(ndim, dtype=np.float32) - for sl, img in self.as_float().iter(along): - template_old = _translate_image(template_new, shift, cval=bg) - _, resp = _misc.ncc(img, template_old, bg) - resp_crop = resp[rem_edge_sl] - peak = np.unravel_index(np.argmax(resp_crop), resp_crop.shape) + t_shape//2 - pos.append(peak) - sl = [] - for i in range(ndim): - d0 = peak[i] - t_shape[i]//2 - d1 = d0 + t_shape[i] - sl.append(slice(d0, d1, None)) - template_new = img[tuple(sl)] - shift = skreg.phase_cross_correlation(template_old, template_new, - return_error=False, upsample_factor=10) - - from ..frame import MarkerFrame - pos = np.array(pos) - pos = np.hstack([np.arange(self.sizeof(along), dtype=np.uint16).reshape(-1,1), pos]) - pos = MarkerFrame(pos, columns=along+dims) - - return pos - - -def _translate_image(img, shift, order=1, cval=0): - ndim = len(shift) - mx = _transform.compose_affine_matrix(translation=-np.asarray(shift), ndim=ndim) - return _transform.warp(img, mx, order=order, cval=cval) - - -def _specify_one(center, radius, shape:tuple) -> tuple[slice]: - sl = tuple(slice(max(0, xc-r), min(xc+r+1, sh), None) - for xc, r, sh in zip(center, radius, shape)) - return sl - -def check_filter_func(f): - if f is None: - f = lambda x: True - elif not callable(f): - raise TypeError("`filt` must be callable.") - return f - -def _check_bg(img: ImgArray, bg): - # determine bg - if bg is None: - bg = img.min() - elif isinstance(bg, str) and bg.endswith("%"): - bg = np.percentile(img.value, float(bg[:-1])) - elif not np.isscalar(bg): - raise TypeError("Wrong type of `bg`.") - return bg - -def _check_template(template): - if not isinstance(template, np.ndarray): - raise TypeError(f"`template` must be np.ndarray, but got {type(template)}") - elif template.ndim not in (2, 3): - raise ValueError("`template must be 2 or 3 dimensional.`") - template = np.asarray(template).astype(np.float32, copy=True) - return template - -def _check_coordinates(coords, img: ImgArray, dims: Dims = None): - from ..frame import MarkerFrame - if not isinstance(coords, MarkerFrame): - coords = np.asarray(coords) - if coords.ndim == 1: - coords = coords.reshape(1, -1) - elif coords.ndim != 2: - raise ValueError("Input cannot be interpreted as coordinate(s).") - if dims is None: - ndim = coords.shape[1] - if ndim == img.ndim: - dims = img.axes - else: - dims = complement_axes("c", img.axes)[-ndim:] - coords = MarkerFrame(coords, columns=dims, dtype=np.uint16) - coords.set_scale(img) - - return coords - - -Funcs = { - "gauss_sm": gauss_sm, - "corner_peaks": corner_peaks, - "find_corners": find_corners, - "track_template": track_template, -} diff --git a/impy/arrays/_utils/_filters.py b/impy/arrays/_utils/_filters.py index 5b6b1ba..f8656ae 100644 --- a/impy/arrays/_utils/_filters.py +++ b/impy/arrays/_utils/_filters.py @@ -1,6 +1,5 @@ from typing import Callable, TYPE_CHECKING import numpy as np -from ._skimage import * from ._linalg import hessian_eigval from impy.array_api import xp, cupy_dispatcher from scipy import ndimage as scipy_ndi @@ -165,8 +164,9 @@ def population(img, selem): # See skimage/feature/template.py def ncc_filter(img: np.ndarray, template: np.ndarray, bg=0, mode="constant"): from scipy.signal import fftconvolve + from skimage import feature ndim = template.ndim - _win_sum = skfeat.template._window_sum_2d if ndim == 2 else skfeat.template._window_sum_3d + _win_sum = feature.template._window_sum_2d if ndim == 2 else feature.template._window_sum_3d pad_width = [(w, w) for w in template.shape] padimg = np.pad(img, pad_width=pad_width, mode=mode, constant_values=bg) diff --git a/impy/arrays/_utils/_linalg.py b/impy/arrays/_utils/_linalg.py index e3c61f6..0e63b84 100644 --- a/impy/arrays/_utils/_linalg.py +++ b/impy/arrays/_utils/_linalg.py @@ -2,7 +2,6 @@ import numpy as np from typing import TYPE_CHECKING from skimage.feature.corner import _symmetric_image -from ._skimage import skfeat from impy.array_api import xp if TYPE_CHECKING: @@ -20,25 +19,33 @@ def eigvalsh(a: np.ndarray): return val def structure_tensor_eigval(img: np.ndarray, sigma: float, pxsize: float): - tensor_elements = skfeat.structure_tensor( + from skimage.feature import structure_tensor + + tensor_elements = structure_tensor( img, sigma, mode="reflect" ) return _solve_hermitian_eigval(tensor_elements, pxsize) def structure_tensor_eigh(img: np.ndarray, sigma: float, pxsize: float): - tensor_elements = skfeat.structure_tensor( + from skimage.feature import structure_tensor + + tensor_elements = structure_tensor( img, sigma, mode="reflect" ) return _solve_hermitian_eigs(tensor_elements, pxsize) def hessian_eigval(img: np.ndarray, sigma: float, pxsize: float): - hessian_elements = skfeat.hessian_matrix( + from skimage.feature import hessian_matrix + + hessian_elements = hessian_matrix( img, sigma=sigma, mode="reflect", use_gaussian_derivatives=False, ) return _solve_hermitian_eigval(hessian_elements, pxsize) def hessian_eigh(img: np.ndarray, sigma: float, pxsize: float): - hessian_elements = skfeat.hessian_matrix( + from skimage.feature import hessian_matrix + + hessian_elements = hessian_matrix( img, sigma=sigma, mode="reflect", use_gaussian_derivatives=False, ) return _solve_hermitian_eigs(hessian_elements, pxsize) diff --git a/impy/arrays/_utils/_plot.py b/impy/arrays/_utils/_plot.py index da028fd..34b8147 100644 --- a/impy/arrays/_utils/_plot.py +++ b/impy/arrays/_utils/_plot.py @@ -1,5 +1,5 @@ import numpy as np -from ._skimage import skimage, skexp +import skimage def plot_drift(result): import matplotlib.pyplot as plt @@ -66,6 +66,7 @@ def plot_3d(imglist, **kwargs): def plot_2d_label(img, label, alpha, ax=None, **kwargs): import matplotlib.pyplot as plt + from skimage.color import label2rgb vmax, vmin = _determine_range(img) imshow_kwargs = {"vmax": vmax, "vmin": vmin, "interpolation": "none"} imshow_kwargs.update(kwargs) @@ -75,7 +76,7 @@ def plot_2d_label(img, label, alpha, ax=None, **kwargs): image = (np.clip(img, vmin, vmax) - vmin)/(vmax - vmin) else: image = img - overlay = skimage.color.label2rgb(label, image=image, bg_label=0, alpha=alpha, image_alpha=1) + overlay = label2rgb(label, image=image, bg_label=0, alpha=alpha, image_alpha=1) if ax is None: plt.imshow(overlay, **imshow_kwargs) @@ -112,11 +113,12 @@ def plot_3d_label(imglist, labellist, alpha, **kwargs): def hist(img, contrast): import matplotlib.pyplot as plt + from skimage.exposure import histogram plt.figure(figsize=(4, 1.7)) nbin = min(int(np.sqrt(img.size / 3)), 256) d = img.astype(np.uint8).ravel() if img.dtype==bool else img.ravel() - y, x = skexp.histogram(d, nbins=nbin) + y, x = histogram(d, nbins=nbin) plt.plot(x, y, color="gray") plt.fill_between(x, y, np.zeros(len(y)), facecolor="gray", alpha=0.4) diff --git a/impy/arrays/_utils/_skimage.py b/impy/arrays/_utils/_skimage.py index 2bfaf3b..207f950 100644 --- a/impy/arrays/_utils/_skimage.py +++ b/impy/arrays/_utils/_skimage.py @@ -1,16 +1,5 @@ from __future__ import annotations -# skimage.morphology takes very long time to import if <0.19. -import skimage -from skimage import transform as sktrans -from skimage import filters as skfil -from skimage import exposure as skexp -from skimage import measure as skmes -from skimage import segmentation as skseg -from skimage import restoration as skres -from skimage import feature as skfeat -from skimage import registration as skreg -from skimage import graph as skgraph -from skimage import util as skutil + import numpy as np from functools import reduce, lru_cache diff --git a/impy/arrays/_utils/_transform.py b/impy/arrays/_utils/_transform.py index 1194153..4f9dcd6 100644 --- a/impy/arrays/_utils/_transform.py +++ b/impy/arrays/_utils/_transform.py @@ -3,7 +3,6 @@ from typing import Iterable, NamedTuple, Sequence, TYPE_CHECKING import numpy as np -from ._skimage import sktrans from impy.axes import Axis, Axes, AxisLike from impy.array_api import xp @@ -53,10 +52,12 @@ def compose_affine_matrix( shear=None, ndim: int = 2, ): + from skimage.transform import AffineTransform + # These two modules returns consistent matrix in the two dimensional case. # rotation must be in radian. if ndim == 2: - af = sktrans.AffineTransform(scale=scale, translation=translation, rotation=rotation, shear=shear) + af = AffineTransform(scale=scale, translation=translation, rotation=rotation, shear=shear) mx = af.params else: from napari.utils.transforms import Affine @@ -80,7 +81,8 @@ def compose_affine_matrix( def decompose_affine_matrix(matrix: np.ndarray): ndim = matrix.shape[0] - 1 if ndim == 2: - af = sktrans.AffineTransform(matrix=matrix) + from skimage.transform import AffineTransform + af = AffineTransform(matrix=matrix) out = AffineTransformationParameters(translation=af.translation, rotation=af.rotation, scale=af.scale, shear=af.shear) else: @@ -91,7 +93,7 @@ def decompose_affine_matrix(matrix: np.ndarray): return out -def calc_corr(img0, img1, matrix, corr_func): +def calc_corr(img0: np.ndarray, img1: np.ndarray, matrix, corr_func): """ Calculate value of corr_func(img0, matrix(img1)). """ @@ -99,9 +101,11 @@ def calc_corr(img0, img1, matrix, corr_func): return corr_func(img0, img1_transformed) def affinefit(img, imgref, bins=256, order=1): - as_3x3_matrix = lambda mtx: np.vstack((mtx.reshape(2,3), [0., 0., 1.])) from scipy.stats import entropy from scipy.optimize import minimize + from skimage.transform import warp + + as_3x3_matrix = lambda mtx: np.vstack((mtx.reshape(2,3), [0., 0., 1.])) def normalized_mutual_information(img, imgref): """ Y(A,B) = (H(A)+H(B))/H(A,B) @@ -116,7 +120,7 @@ def normalized_mutual_information(img, imgref): def cost_nmi(mtx, img, imgref): mtx = as_3x3_matrix(mtx) - img_transformed = sktrans.warp(img, mtx, order=order) + img_transformed = warp(img, mtx, order=order) return -normalized_mutual_information(img_transformed, imgref) mtx0 = np.array([[1., 0., 0.], diff --git a/impy/arrays/imgarray.py b/impy/arrays/imgarray.py index 9ca8a4b..d3e6091 100644 --- a/impy/arrays/imgarray.py +++ b/impy/arrays/imgarray.py @@ -4,6 +4,7 @@ from functools import partial from scipy import ndimage as ndi from typing import TYPE_CHECKING, Literal, Sequence, Iterable, Callable +import skimage from .labeledarray import LabeledArray from .label import Label @@ -11,7 +12,6 @@ from .specials import PropArray from .tiled import TiledAccessor -from ._utils._skimage import skexp, skfeat, skfil, skimage, skmes, skres, skseg, sktrans from ._utils import _filters, _linalg, _deconv, _misc, _glcm, _docs, _transform, _structures, _corr from impy.utils.axesop import add_axes, switch_slice, complement_axes, find_first_appeared @@ -244,6 +244,7 @@ def rescale( ImgArray Rescaled image. """ + from skimage.transform import rescale scale_is_seq = hasattr(scale, "__iter__") # Check if output is too large. @@ -265,7 +266,7 @@ def rescale( scale_ = [next(it) if a in dims else 1 for a in self.axes] else: scale_ = [scale if a in dims else 1 for a in self.axes] - out = sktrans.rescale( + out = rescale( self.value, scale_, order=order, mode=mode, cval=cval, anti_aliasing=False ) out = out.view(self.__class__)._set_info(self) @@ -696,11 +697,9 @@ def edge_filter( ImgArray Filtered image. """ + from skimage.filters import sobel, farid, scharr, prewitt # Get operator - method_dict = {"sobel": skfil.sobel, - "farid": skfil.farid, - "scharr": skfil.scharr, - "prewitt": skfil.prewitt} + method_dict = {"sobel": sobel, "farid": farid, "scharr": scharr, "prewitt": prewitt} try: f = method_dict[method] except KeyError: @@ -1347,14 +1346,16 @@ def entropy_filter(self, radius: nDFloat = 5, *, dims: Dims = None) -> ImgArray: ------- ImgArray Filtered image. - """ + """ + from skimage.filters.rank import entropy disk = _structures.ball_like(radius, len(dims)) self = self.as_uint8() - return self._apply_dask(skfil.rank.entropy, - c_axes=complement_axes(dims, self.axes), - kwargs=dict(footprint=disk) - ).as_float() + return self._apply_dask( + entropy, + c_axes=complement_axes(dims, self.axes), + kwargs=dict(footprint=disk), + ).as_float() @_docs.write_docs @dims_to_spatial_axes @@ -1371,18 +1372,21 @@ def enhance_contrast(self, radius:nDFloat=1, *, dims: Dims = None, update: bool ------- ImgArray Contrast enhanced image. - """ + """ + from skimage.filters.rank import enhance_contrast + disk = _structures.ball_like(radius, len(dims)) if self.dtype == np.float32: amp = max(np.abs(self.range)) self.value[:] /= amp with warnings.catch_warnings(): warnings.simplefilter("ignore") - out = self._apply_dask(skfil.rank.enhance_contrast, - c_axes=complement_axes(dims, self.axes), - dtype=self.dtype, - args=(disk,) - ) + out = self._apply_dask( + enhance_contrast, + c_axes=complement_axes(dims, self.axes), + dtype=self.dtype, + args=(disk,) + ) if self.dtype == np.float32: self.value[:] *= amp @@ -1406,8 +1410,9 @@ def laplacian_filter(self, radius: int = 1, *, dims: Dims = None, update: bool = ImgArray Filtered image. """ + from skimage.restoration.uft import laplacian ndim = len(dims) - _, laplace_op = skres.uft.laplacian(ndim, (2*radius+1,) * ndim) + _, laplace_op = laplacian(ndim, (2 * radius + 1,) * ndim) return self.as_float()._apply_dask( _filters.convolve, c_axes=complement_axes(dims, self.axes), @@ -1740,7 +1745,8 @@ def rolling_ball( ------- ImgArray Background subtracted image. - """ + """ + from skimage.restoration import rolling_ball c_axes = complement_axes(dims, self.axes) if prefilter == "mean": filt = self._apply_dask( @@ -1760,7 +1766,7 @@ def rolling_ball( raise ValueError("`prefilter` must be 'mean', 'median' or 'none'.") filt.axes = self.axes back = filt._apply_dask( - skres.rolling_ball, + rolling_ball, c_axes=c_axes, kwargs=dict(radius=radius) ) @@ -1800,9 +1806,10 @@ def rof_filter( ------- ImgArray Filtered image - """ + """ + from skimage.restoration._denoise import _denoise_tv_chambolle_nd return self._apply_dask( - skres._denoise._denoise_tv_chambolle_nd, + _denoise_tv_chambolle_nd, c_axes=complement_axes(dims, self.axes), kwargs=dict(weight=lmd, eps=tol, max_num_iter=max_iter) ) @@ -1849,7 +1856,8 @@ def wavelet_denoising( ------- ImgArray Denoised image. - """ + """ + from skimage.restoration import cycle_spin, denoise_wavelet func_kw = dict( sigma=noise_sigma, wavelet=wavelet, @@ -1858,9 +1866,9 @@ def wavelet_denoising( method=method ) return self._apply_dask( - skres.cycle_spin, + cycle_spin, c_axes=complement_axes(dims, self.axes), - args=(skres.denoise_wavelet,), + args=(denoise_wavelet,), kwargs=dict(func_kw=func_kw, max_shifts=max_shifts, shift_steps=shift_steps) ) @@ -2068,7 +2076,8 @@ def inpaint( Inpainted image of same data type. """ if method == "biharmonic": - func = skres.inpaint.inpaint_biharmonic + from skimage.restoration.inpaint import inpaint_biharmonic + func = inpaint_biharmonic elif method == "mean": func = _misc.inpaint_mean else: @@ -2133,6 +2142,7 @@ def peak_local_max( import pandas as pd from impy.frame import MarkerFrame + from skimage.feature import peak_local_max df_all: list[pd.DataFrame] = [] for sl, img in self.iter(c_axes, israw=True, exclude=dims): @@ -2142,7 +2152,7 @@ def peak_local_max( else: labels = None - indices = skfeat.peak_local_max( + indices = peak_local_max( xp.asnumpy(img), footprint=xp.asnumpy(_structures.ball_like(min_distance, ndim)), threshold_abs=thr, @@ -2185,9 +2195,10 @@ def corner_harris( ------- ImgArray Harris response - """ + """ + from skimage.feature import corner_harris return self._apply_dask( - skfeat.corner_harris, + corner_harris, c_axes=complement_axes(dims, self.axes), kwargs=dict(k=k, sigma=sigma) ) @@ -2223,6 +2234,7 @@ def voronoi( Segmentation labels of image. """ from scipy.spatial import Voronoi + from skimage.measure import grid_points_in_poly coords = _check_coordinates(coords, self, dims=self.axes) ny, nx = self.sizesof(dims) @@ -2251,7 +2263,7 @@ def voronoi( for r in vor.regions: if all(r0 > 0 for r0 in r): poly = vor.vertices[r] - grids = skmes.grid_points_in_poly(self.sizesof(dims), poly) + grids = grid_points_in_poly(self.sizesof(dims), poly) labels[sl][grids] = n_label n_label += 1 self.labels = Label( @@ -2503,10 +2515,12 @@ def edge_grad( >>> plt.hist(grad.ravel(), bins=100) """ # Get operator - method_dict = {"sobel": (skfil.sobel_h, skfil.sobel_v), - "farid": (skfil.farid_h, skfil.farid_v), - "scharr": (skfil.scharr_h, skfil.scharr_v), - "prewitt": (skfil.prewitt_h, skfil.prewitt_v)} + method_dict = { + "sobel": (skimage.filters.sobel_h, skimage.filters.sobel_v), + "farid": (skimage.filters.farid_h, skimage.filters.farid_v), + "scharr": (skimage.filters.scharr_h, skimage.filters.scharr_v), + "prewitt": (skimage.filters.prewitt_h, skimage.filters.prewitt_v) + } try: op_h, op_v = method_dict[method] except KeyError: @@ -2608,18 +2622,20 @@ def gabor_angle( See Also -------- hessian_angle - """ + """ + from skimage.filters import gabor_kernel thetas = np.linspace(0, np.pi, n_sample, False) max_ = np.empty(self.shape, dtype=np.float32) argmax_ = np.zeros(self.shape, dtype=np.float32) # This is float32 because finally this becomes angle array. c_axes = complement_axes(dims, self.axes) for i, theta in enumerate(thetas): - ker = skfil.gabor_kernel(1/lmd, theta, 0, sigma, sigma/gamma, 3, phi).astype(np.complex64) - out_ = self.as_float()._apply_dask(_filters.convolve, - c_axes=c_axes, - args=(ker.real,) - ) + ker = gabor_kernel(1/lmd, theta, 0, sigma, sigma/gamma, 3, phi, dtype=np.complex64) + out_ = self.as_float()._apply_dask( + _filters.convolve, + c_axes=c_axes, + args=(ker.real,) + ) if i > 0: where_update = out_ > max_ max_[where_update] = out_[where_update] @@ -2680,9 +2696,10 @@ def gabor_filter( >>> for i, theta in enumerate(thetas): >>> out[i] = img.gabor_filter(theta=theta) >>> out = np.max(out, axis=0) - """ + """ + from skimage.filters import gabor_kernel # TODO: 3D Gabor filter - ker = skfil.gabor_kernel(1/lmd, theta, 0, sigma, sigma/gamma, 3, phi).astype(np.complex64) + ker = gabor_kernel(1/lmd, theta, 0, sigma, sigma/gamma, 3, phi).astype(np.complex64) if return_imag: out = self.as_float()._apply_dask( _filters.gabor_filter, @@ -3144,18 +3161,7 @@ def threshold( if along is None: along = "c" if "c" in self.axes else "" - methods_ = {"isodata": skfil.threshold_isodata, - "li": skfil.threshold_li, - "local": skfil.threshold_local, - "mean": skfil.threshold_mean, - "min": skfil.threshold_minimum, - "minimum": skfil.threshold_minimum, - "niblack": skfil.threshold_niblack, - "otsu": skfil.threshold_otsu, - "sauvola": skfil.threshold_sauvola, - "triangle": skfil.threshold_triangle, - "yen": skfil.threshold_yen - } + methods_ = ["isodata", "li", "local", "mean", "min", "minimum", "niblack", "otsu", "sauvola", "triangle", "yen"] if isinstance(thr, str) and thr.endswith("%"): p = float(thr[:-1].strip()) @@ -3166,17 +3172,13 @@ def threshold( elif isinstance(thr, str): method = thr.lower() - try: - func = methods_[method] - except KeyError: - s = ", ".join(list(methods_.keys())) - raise KeyError(f"{method}\nmethod must be: {s}") - + if method not in methods_: + raise KeyError(f"{method}\nmethod must be in: {methods_!r}") + func = getattr(skimage.filters, "threshold_" + method) out = np.zeros(self.shape, dtype=bool) for sl, img in self.iter(along): thr = func(img, **kwargs) - out[sl] = img >= thr - + out[sl] = img >= thr elif np.isscalar(thr): out = self >= thr @@ -3482,7 +3484,7 @@ def watershed( Label Updated labels. """ - + from skimage.segmentation import watershed # Prepare the input image. if input == "self": input_img = self.copy() @@ -3508,9 +3510,12 @@ def watershed( # crd.values is (N, 2) array so tuple(crd.values.T.tolist()) is two (N,) list. crd = crd.values.T.tolist() markers[tuple(crd)] = np.arange(1, len(crd[0])+1, dtype=labels.dtype) - labels[sl] = skseg.watershed(-input_img.value[sl], markers, - mask=input_img.labels.value[sl], - connectivity=connectivity) + labels[sl] = watershed( + -input_img.value[sl], + markers, + mask=input_img.labels.value[sl], + connectivity=connectivity + ) labels[sl][labels[sl]>0] += n_labels n_labels = labels[sl].max() markers[:] = 0 # reset placeholder @@ -3548,10 +3553,11 @@ def random_walker( ImgArray Relabeled image. """ + from skimage.segmentation import random_walker c_axes = complement_axes(dims, self.axes) for sl, img in self.iter(c_axes, israw=True): - img.labels[:] = skseg.random_walker( + img.labels[:] = random_walker( img.value, img.labels.value, beta=beta, mode=mode, tol=tol ) @@ -3622,6 +3628,7 @@ def regionprops( >>> img.specify(coords, 3, labeltype="circle") >>> props = img.regionprops() """ + from skimage.measure import regionprops id_axis = "N" if isinstance(properties, str): properties = (properties,) @@ -3647,7 +3654,7 @@ def regionprops( # calculate property value for each slice for sl, img in self.iter(prop_axes, exclude=self.labels.axes): - props = skmes.regionprops(self.labels.value, img, cache=False, + props = regionprops(self.labels.value, img, cache=False, extra_properties=extra_properties) label_sl = (slice(None),) + sl for prop_name in properties: @@ -3688,9 +3695,9 @@ def lbp( ImgArray Local binary pattern image. """ - + from skimage.feature import local_binary_pattern return self._apply_dask( - skfeat.local_binary_pattern, + local_binary_pattern, c_axes=complement_axes(dims), args=(p, radius, method) ) @@ -3862,7 +3869,7 @@ def rescale_intensity( out = self.view(np.ndarray).astype(np.float32) lowerlim, upperlim = _check_clip_range(in_range, self.value) - out = skexp.rescale_intensity(out, in_range=(lowerlim, upperlim), out_range="dtype") + out = skimage.exposure.rescale_intensity(out, in_range=(lowerlim, upperlim), out_range="dtype") out = out.view(self.__class__) return out @@ -4032,10 +4039,11 @@ def estimate_sigma(self, *, squeeze: bool = True, dims: Dims = None) -> PropArra PropArray or float Estimated standard deviation. sigma["t=0;c=1"] means the estimated value of image slice at t=0 and c=1. - """ + """ + from skimage.restoration import estimate_sigma c_axes = complement_axes(dims, self.axes) out = self._apply_dask( - skres.estimate_sigma, + estimate_sigma, c_axes=c_axes, drop_axis=dims ) @@ -4461,7 +4469,8 @@ def _check_template(template): return template def _calc_centroid(img: np.ndarray, ndim: int) -> np.ndarray: - mom = skmes.moments(img, order=1) + from skimage.measure import moments + mom = moments(img, order=1) centroid = np.array([mom[(0,)*i + (1,) + (0,)*(ndim-i-1)] for i in range(ndim)]) / mom[(0,)*ndim] return centroid diff --git a/impy/arrays/label.py b/impy/arrays/label.py index d837bfc..81f9948 100644 --- a/impy/arrays/label.py +++ b/impy/arrays/label.py @@ -2,7 +2,6 @@ from typing import TYPE_CHECKING import numpy as np -from ._utils._skimage import skseg from ._utils import _filters, _structures, _docs from .bases import MetaArray @@ -83,7 +82,8 @@ def optimize(self) -> Label: return self def relabel(self) -> Label: - self.value[:] = skseg.relabel_sequential(self.value)[0] + from skimage.segmentation import relabel_sequential + self.value[:] = relabel_sequential(self.value)[0] return self @@ -103,12 +103,14 @@ def expand_labels(self, distance:int=1, *, dims=None) -> Label: ------- Label Same array but labels are updated. - """ - labels = self._apply_dask(skseg.expand_labels, - c_axes=complement_axes(dims, self.axes), - dtype=self.dtype, - kwargs=dict(distance=distance) - ) + """ + from skimage.segmentation import expand_labels + labels = self._apply_dask( + expand_labels, + c_axes=complement_axes(dims, self.axes), + dtype=self.dtype, + kwargs=dict(distance=distance) + ) self.value[:] = labels return self diff --git a/impy/arrays/labeledarray.py b/impy/arrays/labeledarray.py index 5896f65..7aaff54 100644 --- a/impy/arrays/labeledarray.py +++ b/impy/arrays/labeledarray.py @@ -8,7 +8,6 @@ from scipy import ndimage as ndi from .specials import PropArray -from ._utils._skimage import skmes, skseg from ._utils import _misc, _docs from .bases import MetaArray from .label import Label @@ -1064,7 +1063,10 @@ def filt(img, lbl, area, major_axis_length): >>> return lbl[yc, xc] > 0 >>> img.label(lbl, filt) - """ + """ + from skimage.segmentation import relabel_sequential + from skimage.measure import label as skmes_label + from skimage.measure import regionprops # check the shape of label_image if ref_image is None: ref_image = self @@ -1086,7 +1088,7 @@ def filt(img, lbl, area, major_axis_length): if filt is None: labels[:] = ref_image._apply_dask( - skmes.label, + skmes_label, c_axes=c_axes, kwargs=dict(background=0, connectivity=connectivity) ).view(np.ndarray) @@ -1100,18 +1102,18 @@ def filt(img, lbl, area, major_axis_length): offset = 1 for sl, lbl in ref_image.iter(c_axes): - lbl = skmes.label(lbl, background=0, connectivity=connectivity) + lbl = skmes_label(lbl, background=0, connectivity=connectivity) img = self.value[sl] # Following lines are essentially doing the same thing as # `skmes.regionprops_table`. However, `skmes.regionprops_table` # returns tuples in the separated columns in DataFrame and rename # property names like "centroid-0" and "centroid-1". - props_obj = skmes.regionprops(lbl, img, cache=False) + props_obj = regionprops(lbl, img, cache=False) d = {prop_name: [getattr(prop, prop_name) for prop in props_obj] for prop_name in properties} df = pd.DataFrame(d) del_list = [i + 1 for i, r in df.iterrows() if not filt(img, lbl, **r)] - labels[sl] = skseg.relabel_sequential( + labels[sl] = relabel_sequential( np.where(np.isin(lbl, del_list), 0, lbl), offset=offset )[0] @@ -1124,24 +1126,7 @@ def filt(img, lbl, area, major_axis_length): labels.set_scale(self) self.labels = labels return self.labels - - @_docs.write_docs - @dims_to_spatial_axes - def label_if( - self, - ref_image: np.ndarray | None = None, - filt: Callable[..., bool] | None = None, - *, - dims: Dims = None, - connectivity: int | None = None, - ) -> Label: - warn( - "`label_if` is deprecated and will be removed soon. `label` method does the " - "same function", - DeprecationWarning, - ) - return self.label(ref_image, filt, dims=dims, connectivity=connectivity) - + @check_input_and_output def append_label(self, label_image: np.ndarray, new: bool = False) -> Label: """ diff --git a/impy/arrays/phasearray.py b/impy/arrays/phasearray.py index d8d50b8..e5c0fad 100644 --- a/impy/arrays/phasearray.py +++ b/impy/arrays/phasearray.py @@ -3,7 +3,6 @@ import numpy as np from ._utils import _filters, _structures -from ._utils._skimage import skmes from .specials import PropArray from .labeledarray import LabeledArray from .bases import MetaArray @@ -251,7 +250,8 @@ def regionprops(self, properties: tuple[str,...] | str = ("phase_mean",), *, >>> coords = reference_img.centroid_sm() >>> img.specify(coords, 3, labeltype="circle") >>> props = img.regionprops() - """ + """ + from skimage.measure import regionprops def phase_mean(sl, img): return _calc_phase_mean(sl, img, self.periodicity) def phase_std(sl, img): @@ -292,8 +292,12 @@ def phase_std(sl, img): # calculate property value for each slice for sl, img in self.iter(prop_axes, exclude=self.labels.axes): - props = skmes.regionprops(self.labels, img, cache=False, - extra_properties=extra_properties) + props = regionprops( + self.labels, + img, + cache=False, + extra_properties=extra_properties, + ) label_sl = (slice(None),) + sl for prop_name in properties: # Both sides have length of p-axis (number of labels) so that values