Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
hanjinliu committed Feb 28, 2022
2 parents 165aa83 + 83d9419 commit 57f0837
Show file tree
Hide file tree
Showing 24 changed files with 562 additions and 367 deletions.
13 changes: 2 additions & 11 deletions impy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,11 @@
__version__ = "1.25.3.dev0"
__version__ = "1.26.0"
__author__ = "Hanjin Liu"
__email__ = "[email protected]"

import logging
from functools import wraps

from ._const import Const, SetConst

from ._cupy import GPU_AVAILABLE
if GPU_AVAILABLE:
Const._setitem_("RESOURCE", "cupy")
Const["SCHEDULER"] = "single-threaded"
else:
Const._setitem_("RESOURCE", "numpy")
del GPU_AVAILABLE
from ._const import Const, SetConst, silent, use

from .collections import *
from .core import *
Expand All @@ -22,7 +14,6 @@
from .correlation import *
from .arrays import ImgArray, LazyImgArray # for typing
from . import random
import numpy as np

r"""
Inheritance
Expand Down
105 changes: 70 additions & 35 deletions impy/_const.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
import dask
from __future__ import annotations
import psutil
from typing import Any, MutableMapping

memory = psutil.virtual_memory()

MAX_GB_LIMIT = memory.total / 2 * 1e-9

class GlobalConstant(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class GlobalConstant(MutableMapping[str, Any]):
_const: dict[str, Any]

def __init__(self, **kwargs):
object.__setattr__(self, "_const", dict(**kwargs))

def __len__(self) -> int:
return len(self._const)

def __iter__(self):
raise StopIteration

def __getitem__(self, k):
try:
return super().__getitem__(k)
except KeyError:
raise KeyError(f"Global constants: {', '.join(self.keys())}")
return self._const[k]

def __setitem__(self, k, v):
if k == "MAX_GB":
Expand All @@ -33,18 +39,22 @@ def __setitem__(self, k, v):
raise ValueError("ID_AXIS must be single character.")
elif k == "FONT_SIZE_FACTOR":
if not isinstance(v, (int, float)):
raise TypeError("MAX_GB must be float.")
raise TypeError("FONT_SIZE_FACTOR must be float.")
elif k == "RESOURCE":
raise RuntimeError("Cannot set RESOURCE.")
from .array_api import xp
if v == "numpy":
xp.setNumpy()
elif v == "cupy":
xp.setCupy()
else:
raise ValueError("RESOURCES must be either 'numpy' or 'cupy'.")
elif k == "SCHEDULER":
import dask
dask.config.set(scheduler=v)
else:
raise RuntimeError("Cannot set new keys.")

super().__setitem__(k, v)

def _setitem_(self, k, v):
super().__setitem__(k, v)
self._const[k] = v

__getattr__ = __getitem__
__setattr__ = __setitem__
Expand All @@ -53,15 +63,16 @@ def __delitem__(self, v):
raise RuntimeError("Cannot delete any items.")

def __repr__(self):
return \
f"""
MAX_GB : {self.MAX_GB:.2f} GB
SHOW_PROGRESS : {self.SHOW_PROGRESS}
ID_AXIS : {self.ID_AXIS}
FONT_SIZE_FACTOR: {self.FONT_SIZE_FACTOR}
RESOURCE : {self.RESOURCE}
SCHEDULER : {self.SCHEDULER}
"""
return (
f"""
MAX_GB : {self['MAX_GB']:.2f} GB
SHOW_PROGRESS : {self['SHOW_PROGRESS']}
ID_AXIS : {self['ID_AXIS']}
FONT_SIZE_FACTOR: {self['FONT_SIZE_FACTOR']}
RESOURCE : {self['RESOURCE']}
SCHEDULER : {self['SCHEDULER']}
"""
)

Const = GlobalConstant(
MAX_GB = MAX_GB_LIMIT/2,
Expand All @@ -74,19 +85,43 @@ def __repr__(self):

class SetConst:
n_ongoing = 0
def __init__(self, name=None, value=None, **kwargs):
if name is None and value is None and len(kwargs) == 1:
name, value = list(kwargs.items())[0]
elif name in Const.keys() and value is not None:
pass
else:
raise TypeError("Invalid input for SetConst")
self.name = name
self.value = value
def __init__(self, dict_: dict[str, Any] | None =None, **kwargs):
dict_ = dict_ or {}
dict_.update(kwargs)
self._kwargs = dict_

def __enter__(self):
self.old_value = Const[self.name]
Const[self.name] = self.value
self._old_value = [(k, Const[k]) for k in self._kwargs.keys()]
for k, v in self._kwargs.items():
Const[k] = v

def __exit__(self, exc_type, exc_value, traceback):
Const[self.name] = self.old_value
for k, v in self._old_value:
Const[k] = v

def silent():
"""
Do not show progress in this context.
An alias of ``ip.SetConst(SHOW_PROGRESS=False)``
"""
return SetConst(SHOW_PROGRESS=False)

def use(resource, import_error: bool = False):
"""
Use a resource (numpy or cupy) in this context.
Parameters
----------
resource : str
Resource to use.
import_error : bool, default is False
If false, resource will not be switched to cupy if not available.
Raise ImportError if true.
"""
if not import_error:
try:
import cupy
except ImportError:
resource = "numpy"
return SetConst(RESOURCE=resource)
40 changes: 0 additions & 40 deletions impy/_cupy.py

This file was deleted.

148 changes: 148 additions & 0 deletions impy/array_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@

from functools import wraps
import numpy as np
from numpy.typing import ArrayLike, DTypeLike

def cupy_dispatcher(function):
@wraps(function)
def func(*args, **kwargs):
if xp.state == "cupy":
args = (xp.asarray(a) if isinstance(a, np.ndarray) else a for a in args)
out = function(*args, **kwargs)
return xp.asnumpy(out)
return func

from types import ModuleType
from scipy import ndimage as scipy_ndi
from typing import Callable

# CUDA <= ver.8 does not have gradient
def _gradient(a, axis=None):
out = np.gradient(a.get(), axis=axis)
return xp.asarray(out)

class XP:
fft: ModuleType
linalg: ModuleType
random: ModuleType
ndi: ModuleType
asnumpy: Callable[[ArrayLike, DTypeLike], np.ndarray]
asarray: Callable[[ArrayLike], ArrayLike]
ndarray: type
state: str

def __init__(self):
self.setNumpy()

def __getattr__(self, key: str):
return getattr(self._module, key)

def setNumpy(self) -> None:
self._module = np
self.fft = np.fft
self.linalg = np.linalg
self.random = np.random
self.ndi = scipy_ndi
self.asnumpy = np.asarray
self.asarray = np.asarray
self.ndarray = np.ndarray
self.empty = np.empty
self.zeros = np.zeros
self.ones = np.ones
self.array = np.array
self.exp = np.exp
self.sin = np.sin
self.cos = np.cos
self.tan = np.tan
self.sqrt = np.sqrt
self.mean = np.mean
self.max = np.max
self.min = np.min
self.median = np.median
self.sum = np.sum
self.prod = np.prod
self.std = np.std
self.meshgrid = np.meshgrid
self.indices = np.indices
self.arange = np.arange
self.linspace = np.linspace
self.real = np.real
self.imag = np.imag
self.conjugate = np.conjugate
self.angle = np.angle
self.abs = np.abs
self.mod = np.mod
self.fix = np.fix
self.gradient = np.gradient
self.tensordot = np.tensordot
self.stack = np.stack
self.unravel_index = np.unravel_index
self.argmax = np.argmax
self.argmin = np.argmin

self.state = "numpy"
from ._const import Const
Const["SCHEDULER"] = "threads"

def setCupy(self) -> None:
import cupy as cp
def cp_asnumpy(arr, dtype=None):
out = cp.asnumpy(arr)
if dtype is None:
return out
return out.astype(dtype)
from cupyx.scipy import fft as cp_fft
from cupyx.scipy import ndimage as cp_ndi
from cupy import linalg as cp_linalg

self._module = cp
self.fft = cp_fft
self.linalg = cp_linalg
self.random = cp.random
self.ndi = cp_ndi
self.asnumpy = cp_asnumpy
self.asarray = cp.asarray
self.ndarray = cp.ndarray
self.empty = cp.empty
self.zeros = cp.zeros
self.ones = cp.ones
self.array = cp.array
self.exp = cp.exp
self.sin = cp.sin
self.cos = cp.cos
self.tan = cp.tan
self.sqrt = cp.sqrt
self.mean = cp.mean
self.max = cp.max
self.min = cp.min
self.median = cp.median
self.sum = cp.sum
self.prod = cp.prod
self.std = cp.std
self.meshgrid = cp.meshgrid
self.indices = cp.indices
self.arange = cp.arange
self.linspace = cp.linspace
self.real = cp.real
self.imag = cp.imag
self.conjugate = cp.conjugate
self.angle = cp.angle
self.abs = cp.abs
self.mod = cp.mod
self.fix = cp.fix
try:
self.gradient = cp.gradient
except AttributeError:
self.gradient = _gradient
self.tensordot = cp.tensordot
self.stack = cp.stack
self.unravel_index = cp.unravel_index
self.argmax = cp.argmax
self.argmin = cp.argmin
self.state = "cupy"

from ._const import Const
Const["SCHEDULER"] = "single-threaded"

xp = XP()

Loading

0 comments on commit 57f0837

Please sign in to comment.