Skip to content

Commit

Permalink
Merge branch 'unifyai:master' into dist
Browse files Browse the repository at this point in the history
  • Loading branch information
BigBalloon8 authored Aug 4, 2023
2 parents 394c734 + 951f116 commit f196ec7
Show file tree
Hide file tree
Showing 32 changed files with 818 additions and 66 deletions.
9 changes: 7 additions & 2 deletions ivy/data_classes/array/activations.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# global
import abc
from typing import Optional, Union
from typing import Optional, Union, Literal

# local
import ivy
Expand Down Expand Up @@ -44,6 +44,7 @@ def leaky_relu(
*,
alpha: float = 0.2,
out: Optional[ivy.Array] = None,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
) -> ivy.Array:
"""
ivy.Array instance method variant of ivy.leaky_relu. This method simply wraps
Expand All @@ -59,6 +60,8 @@ def leaky_relu(
out
optional output array, for writing the result to. It must have a shape
that the inputs broadcast to.
complex_mode
optional specifier for how to handle complex data types.
Returns
-------
Expand All @@ -72,7 +75,9 @@ def leaky_relu(
>>> print(y)
ivy.array([ 0.39, -0.17])
"""
return ivy.leaky_relu(self._data, alpha=alpha, out=out)
return ivy.leaky_relu(
self._data, alpha=alpha, out=out, complex_mode=complex_mode
)

def gelu(
self: ivy.Array,
Expand Down
27 changes: 27 additions & 0 deletions ivy/data_classes/array/experimental/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,6 +832,33 @@ def adaptive_avg_pool2d(
output_size,
)

def adaptive_max_pool2d(
self: ivy.Array,
output_size: Union[Sequence[int], int],
) -> ivy.Array:
"""
Apply a 2D adaptive maximum pooling over an input signal composed of several
input planes.
Parameters
----------
self
Input array. Must have shape (N, C, H_in, W_in) or (C, H_in, W_in) where N
is the batch dimension, C is the feature dimension, and H_in and W_in are
the 2 spatial dimensions.
output_size
Spatial output size.
Returns
-------
The result of the pooling operation. Will have shape (N, C, S_0, S_1) or
(C, S_0, S_1), where S = `output_size`
"""
return ivy.adaptive_max_pool2d(
self._data,
output_size,
)

def reduce_window(
self: ivy.Array,
init_value: Union[int, float],
Expand Down
10 changes: 9 additions & 1 deletion ivy/data_classes/container/activations.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# local
import ivy
from ivy.data_classes.container.base import ContainerBase
from typing import Optional, Union, List, Dict
from typing import Optional, Union, List, Dict, Literal


# ToDo: implement all methods here as public instance methods
Expand Down Expand Up @@ -140,6 +140,7 @@ def _static_leaky_relu(
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
out: Optional[ivy.Container] = None,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
) -> ivy.Container:
"""
ivy.Container static method variant of ivy.leaky_relu. This method simply wraps
Expand All @@ -166,6 +167,8 @@ def _static_leaky_relu(
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
complex_mode
optional specifier for how to handle complex data types.
Returns
-------
Expand All @@ -191,6 +194,7 @@ def _static_leaky_relu(
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
out=out,
complex_mode=complex_mode,
)

def leaky_relu(
Expand All @@ -203,6 +207,7 @@ def leaky_relu(
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
out: Optional[ivy.Container] = None,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
) -> ivy.Container:
"""
ivy.Container instance method variant of ivy.leaky_relu. This method simply
Expand All @@ -229,6 +234,8 @@ def leaky_relu(
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
complex_mode
optional specifier for how to handle complex data types.
Returns
-------
Expand All @@ -253,6 +260,7 @@ def leaky_relu(
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
out=out,
complex_mode=complex_mode,
)

@staticmethod
Expand Down
72 changes: 72 additions & 0 deletions ivy/data_classes/container/experimental/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1910,6 +1910,78 @@ def adaptive_avg_pool2d(
map_sequences=map_sequences,
)

@staticmethod
def static_adaptive_max_pool2d(
input: Union[ivy.Array, ivy.NativeArray, ivy.Container],
output_size: Union[Sequence[int], int, ivy.Container],
*,
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
) -> ivy.Container:
"""
ivy.Container static method variant of ivy.adaptive_max_pool2d. This method
simply wraps the function, and so the docstring for ivy.adaptive_max_pool2d also
applies to this method with minimal changes.
Parameters
----------
input
Input array. Must have shape (N, C, H_in, W_in) or (C, H_in, W_in) where N
is the batch dimension, C is the feature dimension, and H_in and W_in are
the 2 spatial dimensions.
output_size
Spatial output size.
Returns
-------
The result of the pooling operation. Will have shape (N, C, S_0, S_1) or
(C, S_0, S_1), where S = `output_size`
"""
return ContainerBase.cont_multi_map_in_function(
"adaptive_max_pool2d",
input,
output_size,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)

def adaptive_max_pool2d(
self: ivy.Container,
output_size: Union[int, ivy.Container],
*,
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
) -> ivy.Container:
"""
Apply a 2D adaptive maximum pooling over an input signal composed of several
input planes.
Parameters
----------
self
Input container.
output_size
Spatial output size.
Returns
-------
The result of the pooling operation.
"""
return self.static_adaptive_max_pool2d(
self,
output_size,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)

@staticmethod
def static_ifftn(
x: ivy.Container,
Expand Down
126 changes: 125 additions & 1 deletion ivy/func_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@
import warnings
import copy as python_copy
from types import FunctionType
from typing import Callable
from typing import Callable, Literal
import inspect
import numpy as np

from ivy.utils.exceptions import IvyValueError


# for wrapping (sequence matters)
FN_DECORATORS = [
"handle_complex_input",
"infer_device",
"handle_device_shifting",
"infer_dtype",
Expand Down Expand Up @@ -1385,6 +1388,127 @@ def _handle_nans(*args, **kwargs):
return _handle_nans


# Complex number handling #
# ----------------------- #
def handle_complex_input(fn: Callable) -> Callable:
@functools.wraps(fn)
def _handle_complex_input(
inp,
*args,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
**kwargs,
):
"""
Check whether the first positional argument is an array of complex type, and if
so handle it according to the provided `complex_mode`.
The options are:
`"jax"` (default): emulate the behaviour of the JAX framework. If the function
has a `jax_like` attribute then this will be used to decide on the
behaviour (see below) and if not, then the entire array will be passed to
the function.
`"split"`: execute the function separately on the real and imaginary parts of
the input.
`"magnitude"`: execute the function on the magnitude of the input, and keep the
angle constant.
The `jax_like` attribute (which should be added to the function itself, and not
passed as a parameter) has the following options:
`"entire"` (default): pass the entire input to the function. This is best used
for purely mathematical operators which are already well defined on complex
inputs, as many backends will throw exceptions otherwise.
`"split"`: as the `"split"` option for `complex_mode`
`"magnitude"`: as the `"magnitude"` option for `complex_mode`
A callable function: the function will be called instead of the originally
decorated function. It will be passed `inp` and `*args` as positional
arguments, and the original `**kwargs` plus `fn_original` as keyword
arguments. The latter is the original function, in case the `jax_like`
function wishes to call it.
Parameters
----------
inp
The first positional argument to the function, which is expected to be an
:class:`ivy.Array`.
args
The remaining positional arguments to be passed to the function.
complex_mode
Optional argument which specifies the method that will be used to handle
the input, if it is complex.
kwargs
The keyword arguments to be passed to the function.
Returns
-------
The return of the function, with handling of inputs based
on the selected `complex_mode`.
Examples
--------
Using the default `jax_like` behaviour
>>> @handle_complex_input
>>> def my_func(inp):
>>> return ivy.ones_like(inp)
>>> x = ivy.array([1+1j, 3+4j, 5+12j])
>>> my_func(x) # equivalent to setting complex_mode="jax"
ivy.array([1.+0.j, 1.+0.j, 1.+0.j])
>>> my_func(x, complex_mode="split")
ivy.array([1.+1.j, 1.+1.j, 1.+1.j])
>>> my_func(x, complex_mode="magnitude")
ivy.array([0.70710681+0.70710675j, 0.60000001+0.79999999j,
0.38461535+0.92307694j])
Using non-default `jax_like` behaviour
>>> @handle_complex_input
>>> def my_func(inp):
>>> return ivy.ones_like(inp)
>>> my_func.jax_like = "split"
>>> my_func(x, complex_mode="jax")
ivy.array([1.+1.j, 1.+1.j, 1.+1.j])
Using callable `jax_like` behaviour
>>> def _my_func_jax_like(inp, fn_original=None):
>>> return fn_original(inp) * 3j
>>> @handle_complex_input
>>> def my_func(inp):
>>> return ivy.ones_like(inp)
>>> my_func.jax_like = _my_func_jax_like
>>> my_func(x, complex_mode="jax")
ivy.array([0.+3.j, 0.+3.j, 0.+3.j])
"""
if not ivy.is_complex_dtype(inp):
return fn(inp, *args, **kwargs)

jax_like = fn.jax_like if hasattr(fn, "jax_like") else "entire"

if complex_mode == "split" or (complex_mode == "jax" and jax_like == "split"):
real_inp = ivy.real(inp)
imag_inp = ivy.imag(inp)
return fn(real_inp, *args, **kwargs) + 1j * fn(imag_inp, *args, **kwargs)

elif complex_mode == "magnitude" or (
complex_mode == "jax" and jax_like == "magnitude"
):
mag_inp = ivy.abs(inp)
angle_inp = ivy.angle(inp)
return fn(mag_inp, *args, **kwargs) * ivy.exp(1j * angle_inp)

elif complex_mode == "jax" and jax_like == "entire":
return fn(inp, *args, **kwargs)

elif complex_mode == "jax":
return jax_like(inp, *args, **kwargs, fn_original=fn)

else:
raise IvyValueError(f"complex_mode '{complex_mode}' is not recognised.")

_handle_complex_input.handle_complex_input = True
return _handle_complex_input


attribute_dict = {
"unsupported_dtypes",
"supported_dtypes",
Expand Down
3 changes: 3 additions & 0 deletions ivy/functional/backends/paddle/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ def relu(x: paddle.Tensor, /, *, out: Optional[paddle.Tensor] = None) -> paddle.
return F.relu(x)


@with_unsupported_device_and_dtypes(
{"2.5.1 and below": {"cpu": ("bfloat16",)}}, backend_version
)
def leaky_relu(
x: paddle.Tensor,
/,
Expand Down
9 changes: 9 additions & 0 deletions ivy/functional/backends/paddle/experimental/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,15 @@ def interpolate(
raise IvyNotImplementedException()


def adaptive_max_pool2d(
input: paddle.Tensor, output_size: Union[Sequence[int], int]
) -> paddle.Tensor:
squeeze = input.ndim == 3
x = paddle.unsqueeze(input, axis=0) if squeeze else input
ret = paddle.nn.functional.adaptive_max_pool2d(x, output_size)
return paddle.squeeze(ret, axis=0) if squeeze else ret


def ifftn(
x: paddle.Tensor,
s: Optional[Union[int, Tuple[int]]] = None,
Expand Down
1 change: 0 additions & 1 deletion ivy/functional/backends/tensorflow/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ def gelu(
return tf.nn.gelu(x, approximate)


@with_unsupported_dtypes({"2.13.0 and below": ("complex",)}, backend_version)
def leaky_relu(
x: Tensor, /, *, alpha: float = 0.2, out: Optional[Tensor] = None
) -> Tensor:
Expand Down
2 changes: 1 addition & 1 deletion ivy/functional/backends/torch/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def relu(x: torch.Tensor, /, *, out: Optional[torch.Tensor] = None) -> torch.Ten
return torch.relu(x)


@with_unsupported_dtypes({"2.0.1 and below": ("complex", "float16")}, backend_version)
@with_unsupported_dtypes({"2.0.1 and below": ("float16",)}, backend_version)
def leaky_relu(
x: torch.Tensor,
/,
Expand Down
Loading

0 comments on commit f196ec7

Please sign in to comment.