Skip to content

Commit

Permalink
fix: Fixed failing docstring tests (ivy-llc#28172)
Browse files Browse the repository at this point in the history
  • Loading branch information
Sai-Suraj-27 authored Feb 7, 2024
1 parent 2c465d6 commit 1f5876b
Show file tree
Hide file tree
Showing 28 changed files with 221 additions and 294 deletions.
2 changes: 1 addition & 1 deletion ivy/data_classes/array/experimental/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ def hardtanh(
>>> x = ivy.array([-1., .2, 1.])
>>> y = x.hardtanh()
>>> print(y)
ivy.array([-1., 1., 1.])
ivy.array([-1. , 0.2, 1. ])
"""
return ivy.hardtanh(self._data, min_val=min_val, max_val=max_val, out=out)

Expand Down
4 changes: 2 additions & 2 deletions ivy/data_classes/array/experimental/linear_algebra.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,7 @@ def general_inner_product(
>>> a = ivy.array([1, 2, 3])
>>> b = ivy.array([4, 5, 6])
>>> result = a.general_inner_product(b, n_modes=1)
>>> result = a.general_inner_product(b, 1)
>>> print(result)
ivy.array(32)
Expand All @@ -844,7 +844,7 @@ def general_inner_product(
>>> a = ivy.array([[1, 1], [1, 1]])
>>> b = ivy.array([[1, 2, 3, 4],[1, 1, 1, 1]])
>>> result = a.general_inner_product(b, n_modes=1)
>>> result = a.general_inner_product(b, 1)
>>> print(result)
ivy.array([[2, 3, 4, 5],
[2, 3, 4, 5]])
Expand Down
6 changes: 3 additions & 3 deletions ivy/data_classes/array/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def log_poisson_loss(
ivy.array([1.28402555, 1.28402555, 1.03402555, 1.28402555])
>>> z = ivy.array([0.1, 0.1, 0.7, 0.1])
>>> loss = x.x.log_poisson_loss(z, reduction='mean')
>>> loss = x.log_poisson_loss(z, reduction='mean')
>>> print(loss)
ivy.array(1.1573164)
"""
Expand Down Expand Up @@ -353,9 +353,9 @@ def poisson_nll_loss(
--------
>>> input_tensor = ivy.array([1, 2, 3, 4], dtype=ivy.float64)
>>> target_tensor = ivy.array([2, 2, 2, 2], dtype=ivy.float64)
>>> loss = poisson_nll_loss(input_tensor, target_tensor, log_input=True)
>>> loss = input_tensor.poisson_nll_loss(target_tensor, log_input=True)
>>> print(loss)
ivy.array(16.1978)
ivy.array(16.1977562)
"""
return ivy.poisson_nll_loss(
self._data,
Expand Down
6 changes: 4 additions & 2 deletions ivy/data_classes/array/experimental/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1177,9 +1177,11 @@ def unflatten(
With 'ivy.Array' input:
>>> x = ivy.array([[1.2, 2.3, 3.4, 4.5],
[5.6, 6.7, 7.8, 8.9]])
... [5.6, 6.7, 7.8, 8.9]])
>>> dim = 1
>>> shape = (2, 2)
>>> y = x.unflatten(shape=shape, dim=dim, out=y)
>>> y = ivy.zeros([2, 2, 2])
>>> x.unflatten(shape=shape, dim=dim, out=y)
>>> print(y)
ivy.array([[[1.2, 2.3], [3.4, 4.5]], [[5.6, 6.7], [7.8, 8.9]]])
"""
Expand Down
6 changes: 3 additions & 3 deletions ivy/data_classes/array/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def cross_entropy(
>>> y = ivy.array([0.25, 0.25, 0.25, 0.25])
>>> z = x.cross_entropy(y)
>>> print(z)
ivy.array(1.3862944)
ivy.array(0.34657359)
"""
return ivy.cross_entropy(
self._data, pred, axis=axis, epsilon=epsilon, reduction=reduction, out=out
Expand Down Expand Up @@ -110,7 +110,7 @@ def binary_cross_entropy(
>>> y = ivy.array([0.7, 0.8, 0.2])
>>> z = x.binary_cross_entropy(y)
>>> print(z)
ivy.array([0.357, 0.223, 0.223])
ivy.array(0.26765382)
"""
return ivy.binary_cross_entropy(
self._data,
Expand Down Expand Up @@ -170,7 +170,7 @@ def sparse_cross_entropy(
>>> y = ivy.array([0.7, 0.8, 0.2])
>>> z = x.sparse_cross_entropy(y)
>>> print(z)
ivy.array([0.223, 0.223, 0.357])
ivy.array([0.07438118, 0.07438118, 0.11889165])
"""
return ivy.sparse_cross_entropy(
self._data, pred, axis=axis, epsilon=epsilon, reduction=reduction, out=out
Expand Down
13 changes: 6 additions & 7 deletions ivy/data_classes/container/experimental/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -968,10 +968,10 @@ def _static_hardtanh(
Examples
--------
>>> x = x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.static_hardtanh(x)
>>> y = ivy.Container._static_hardtanh(x)
>>> print(y)
{
a: ivy.array([0.39, -1.]),
a: ivy.array([0.3899, -1.]),
b: ivy.array([1., -0.2])
}
"""
Expand Down Expand Up @@ -1033,11 +1033,11 @@ def hardtanh(
Examples
--------
>>> x = x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.static_hardtanh(x)
>>> x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.hardtanh(x)
>>> print(y)
{
a: ivy.array([0.39, -1.]),
a: ivy.array([0.389999, -1.]),
b: ivy.array([1., -0.2])
}
"""
Expand Down Expand Up @@ -1820,8 +1820,7 @@ def hardshrink(
Examples
--------
>>> import ivy.numpy as np
>>> x = ivy.Container(a=np.array([1., -2.]), b=np.array([0.4, -0.2]))
>>> x = ivy.Container(a=ivy.array([1., -2.]), b=ivy.array([0.4, -0.2]))
>>> y = ivy.Container.hardshrink(x)
>>> print(y)
{
Expand Down
12 changes: 6 additions & 6 deletions ivy/data_classes/container/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def l1_loss(
>>> z = x.l1_loss(y)
>>> print(z)
{
a: ivy.array(1.),
a: ivy.array(0.),
b: ivy.array(0.)
}
"""
Expand Down Expand Up @@ -314,8 +314,8 @@ def log_poisson_loss(
>>> z = x.log_poisson_loss(y)
>>> print(z)
{
a: ivy.array(1.),
b: ivy.array(0.)
a: ivy.array(3.3890561),
b: ivy.array(123.413159)
}
"""
return self._static_log_poisson_loss(
Expand Down Expand Up @@ -478,12 +478,12 @@ def smooth_l1_loss(
--------
>>> x = ivy.Container(a=ivy.array([1, 0, 2]), b=ivy.array([3, 2, 1]))
>>> y = ivy.Container(a=ivy.array([0.6, 0.2, 0.3]),
b=ivy.array([0.8, 0.2, 0.2]))
... b=ivy.array([0.8, 0.2, 0.2]))
>>> z = x.smooth_l1_loss(y)
>>> print(z)
{
a: ivy.array(0.9),
b: ivy.array(0.25)
a: ivy.array(0.43333333),
b: ivy.array(1.10666666)
}
"""
return self._static_smooth_l1_loss(
Expand Down
10 changes: 5 additions & 5 deletions ivy/data_classes/container/experimental/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4212,19 +4212,19 @@ def unflatten(
With 'ivy.Container' input:
>>> x = ivy.Container(a = ivy.array([[True, False, False, True],
[False, True, False, True]])),
... [False, True, False, True]]),
... b = ivy.array([[1.2, 2.3, 3.4, 4.5],
[5.6, 6.7, 7.8, 8.9]]),
... [5.6, 6.7, 7.8, 8.9]]),
... c = ivy.array([[1, 2, 3, 4],
[5, 6, 7, 8]]))
... [5, 6, 7, 8]]))
>>> dim = 1
>>> shape = (2, 2)
>>> y = x.unflatten(shape=shape, dim=dim)
>>> print(y)
{
a: ivy.array([[[True, False], [False, True]],
[[False, True], [False, True]]])
b: ivy.array([[[1.2, 2.3], [3.4, 4.5]], [[5.6, 6.7], [7.8, 8.9]]])
[[False, True], [False, True]]]),
b: ivy.array([[[1.2, 2.3], [3.4, 4.5]], [[5.6, 6.7], [7.8, 8.9]]]),
c: ivy.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
}
"""
Expand Down
12 changes: 6 additions & 6 deletions ivy/data_classes/container/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def cross_entropy(
>>> z = x.cross_entropy(y)
>>> print(z)
{
a:ivy.array(0.5108256),
b:ivy.array(1.609438)
a: ivy.array(0.17027519),
b: ivy.array(0.53647931)
}
"""
return self._static_cross_entropy(
Expand Down Expand Up @@ -348,8 +348,8 @@ def binary_cross_entropy(
>>> z = x.binary_cross_entropy(y)
>>> print(z)
{
a: ivy.array([0.511, 0.223, 0.357]),
b: ivy.array([1.61, 0.223, 1.61])
a: ivy.array(0.36354783),
b: ivy.array(1.14733934)
}
"""
return self._static_binary_cross_entropy(
Expand Down Expand Up @@ -517,8 +517,8 @@ def sparse_cross_entropy(
>>> z = x.sparse_cross_entropy(y)
>>> print(z)
{
a: ivy.array([1.61, 0.511, 0.511]),
b: ivy.array([0.223, 0.223, 1.61])
a: ivy.array([0.53647929, 0.1702752, 0.1702752]),
b: ivy.array([0.07438118, 0.07438118, 0.53647929])
}
"""
return self._static_sparse_cross_entropy(
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/backends/tensorflow/experimental/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1403,8 +1403,8 @@ def rfft_operations(x, rank, norm_factor):
},
)
norm_factor = tf.cast(norm_factor, tf.complex128)
x = x / norm_factor
x = tf.cast(x, tf.complex128)
x = x / norm_factor
return x


Expand Down Expand Up @@ -1542,7 +1542,7 @@ def rfftn(
s: Optional[Union[int, Tuple[int]]] = None,
axes: Optional[Union[int, Tuple[int]]] = None,
*,
norm: Optional[str] = [("forward", "ortho", "backward")],
norm: str = "backward",
out: Optional[Union[tf.Tensor, tf.Variable]] = None,
) -> Union[tf.Tensor, tf.Variable]:
result = _rfftn_helper(x, s, axes, norm)
Expand Down
2 changes: 1 addition & 1 deletion ivy/functional/ivy/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2021,7 +2021,7 @@ def one_hot(
}
>>> x = ivy.Container(a=ivy.array([2]), \
b=ivy.array([]), c=ivy.native_array([4]))
b=ivy.array([], dtype=ivy.int32), c=ivy.native_array([4]))
>>> y = 7
>>> z = x.one_hot(y)
>>> print(z)
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/ivy/data_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,7 @@ def finfo(
>>> x = ivy.array([1.3,2.1,3.4], dtype=ivy.float64)
>>> print(ivy.finfo(x))
finfo(resolution=1e-15, min=-1.7976931348623157e+308, /
finfo(resolution=1e-15, min=-1.7976931348623157e+308, \
max=1.7976931348623157e+308, dtype=float64)
>>> x = ivy.array([0.7,8.4,3.14], dtype=ivy.float16)
Expand All @@ -694,7 +694,7 @@ def finfo(
>>> print(ivy.finfo(c))
{
x: finfo(resolution=0.001, min=-6.55040e+04, max=6.55040e+04, dtype=float16),
y: finfo(resolution=1e-15, min=-1.7976931348623157e+308, /
y: finfo(resolution=1e-15, min=-1.7976931348623157e+308, \
max=1.7976931348623157e+308, dtype=float64)
}
"""
Expand Down
8 changes: 7 additions & 1 deletion ivy/functional/ivy/device.py
Original file line number Diff line number Diff line change
Expand Up @@ -743,7 +743,7 @@ def tpu_is_available() -> bool:
--------
>>> ivy.set_backend("torch")
>>> print(ivy.tpu_is_available())
True
False
"""
return ivy.current_backend().tpu_is_available()

Expand Down Expand Up @@ -1221,7 +1221,13 @@ def function_supported_devices(
Examples
--------
>>> import ivy
>>> ivy.set_backend('numpy')
>>> print(ivy.function_supported_devices(ivy.ones))
('cpu',)
>>> ivy.set_backend('torch')
>>> x = ivy.function_supported_devices(ivy.ones)
>>> x = sorted(x)
('cpu', 'gpu')
"""
ivy.utils.assertions.check_true(
Expand Down
Loading

0 comments on commit 1f5876b

Please sign in to comment.