Skip to content

Commit

Permalink
feat(losses): add hinge_embedding_loss to ivy experimental API. (#27048)
Browse files Browse the repository at this point in the history
Co-authored-by: ivy-branch <ivy.branch@lets-unify.ai>
  • Loading branch information
akshatvishu and ivy-branch authored Jan 1, 2024
1 parent 40b95ae commit a5aea35
Show file tree
Hide file tree
Showing 9 changed files with 581 additions and 14 deletions.
81 changes: 81 additions & 0 deletions ivy/data_classes/array/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,3 +365,84 @@ def poisson_nll_loss(
eps=eps,
reduction=reduction,
)

def hinge_embedding_loss(
self: Union[ivy.Array, ivy.NativeArray],
target: Union[ivy.Array, ivy.NativeArray],
*,
margin: float = 1.0,
reduction: str = "mean",
) -> ivy.Array:
r"""Measures loss from input `x` and label `y` with values 1 or -1. It
evaluates if two inputs are similar or not, often used for embedding or
semi-supervised learning.
Loss for the `n`-th sample:
.. math::
l_n = \begin{cases}
x_n, & \text{if}\; y_n = 1,\\
\max \{0, margin - x_n\}, & \text{if}\; y_n = -1,
\end{cases}
Total loss:
.. math::
\ell(x, y) = \begin{cases}
\operatorname{mean}(L), & \text{if reduction} = \text{`mean';}\\
\operatorname{sum}(L), & \text{if reduction} = \text{`sum'.}
\end{cases}
where :math:`L = \{l_1,\dots,l_N\}^\top`
Parameters
----------
input
Input tensor with dtype float.
The shape is [N, \*], where N is batch size and `\*` represents
any number of additional dimensions.
label
Label tensor containing 1 or -1 with dtype float32 or float64.
Its shape matches that of the input.
margin
Sets the hyperparameter margin. Determines the necessary input size
for hinge_embedding_loss calculations when label is -1. Inputs smaller
than the margin are minimized with hinge_embedding_loss.
Default is 1.0.
reduction
Specifies how to aggregate the loss across the batch. Options are:
- ``'none'``: Returns the unreduced loss.
- ``'mean'``: Returns the mean loss.
- ``'sum'``: Returns the summed loss.
Default is ``'mean'``.
Shape
-----
- Input: :math:`(*)` where :math:`*` means, any number of dimensions. \
The sum operation operates over all the elements.
- Target: :math:`(*)`, same shape as the input
- Output: scalar. If :attr:`reduction` is ``'none'``,
then same shape as the input
Returns
-------
ret
Hinge embedding loss calculated from the input and label,
shaped based on the reduction method.
Examples
--------
>>> input_tensor = ivy.array([1, 2, 3, 4], dtype=ivy.float64)
>>> target_tensor = ivy.array([1, 1, 1, 1], dtype=ivy.float64)
>>> input_tensor.hinge_embedding_loss(target_tensor,reduction="sum")
ivy.array(10.)
>>> input_tensor = ivy.array([1, 2, 3], dtype=ivy.float64)
>>> target_tensor = ivy.array([1, -1, -1], dtype=ivy.float64)
>>> input_tensor.hinge_embedding_loss(target_tensor, margin=2.0)
ivy.array(0.33333333)
"""
return ivy.hinge_embedding_loss(
self._data,
target,
margin=margin,
reduction=reduction,
)
183 changes: 183 additions & 0 deletions ivy/data_classes/container/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -1089,3 +1089,186 @@ def poisson_nll_loss(
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)

@staticmethod
def _static_hinge_embedding_loss(
input: Union[ivy.Container, ivy.Array, ivy.NativeArray],
target: Union[ivy.Container, ivy.Array, ivy.NativeArray],
*,
margin: [Union[float, ivy.Container]] = 1.0,
reduction: [Union[str, ivy.Container]] = "mean",
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
) -> ivy.Container:
r"""ivy.Container static method variant of ivy.hinge_embedding_loss.
This method simplywraps the function, and so the docstring for
ivy.hinge_embedding_loss also applies to this method with minimal
changes.
Parameters
----------
input
input array or container containing input labels.
target
input array or container containing the target labels.
margin
Sets the hyperparameter margin. Determines the necessary input size
for hinge_embedding_loss calculations when label is -1. Inputs smaller
than the margin are minimized with hinge_embedding_loss.
Default is 1.0.
reduction
Specifies how to aggregate the loss across the batch. Options are:
- ``'none'``: Returns the unreduced loss.
- ``'mean'``: Returns the mean loss.
- ``'sum'``: Returns the summed loss.
Default is ``'mean'``.
key_chains
The key-chains to apply or not apply the method to. Default is ``None``.
to_apply
If input, the method will be applied to key_chains, otherwise key_chains
will be skipped. Default is ``input``.
prune_unapplied
Whether to prune key_chains for which the function was not applied.
Default is ``False``.
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is ``False``.
Shape
-----
- Input: :math:`(*)` where :math:`*` means, any number of dimensions. \
The sum operation operates over all the elements.
- Target: :math:`(*)`, same shape as the input
- Output: scalar. If :attr:`reduction` is ``'none'``,
then same shape as the input
Returns
-------
ret
Hinge embedding loss calculated from the input and label,
shaped based on the reduction method.
Examples
--------
With :class:`ivy.Container` inputs:
>>> x = ivy.Container(a=ivy.array([[1, 0, 2]], dtype=ivy.float32),
... b=ivy.array([[-1, 1, 1]], dtype=ivy.float32))
>>> y = ivy.Container(a=ivy.array([[0.6, 0.2, 0.3]], dtype=ivy.float32),
... b=ivy.array([[1, 1, 1]], dtype=ivy.float32))
>>> z = ivy.Container._static_hinge_embedding_loss(x, y, reduction="none")
>>> z
{
a: ivy.array([[0., 0., 0.]]),
b: ivy.array([[-1., 1., 1.]])
}
With a mix of :class:`ivy.Array` and :class:`ivy.Container` inputs:
>>> x = ivy.array([[10, 20, 32]], dtype=ivy.float32)
>>> y = ivy.Container(a=ivy.array([[-1, -1, -1]], dtype=ivy.float32),
... b=ivy.array([[1, 1, 1]], dtype=ivy.float32))
>>> z = ivy.Container._static_hinge_embedding_loss(x, y,
... reduction="sum", margin=2.0)
>>> z
{
a: ivy.array(0.),
b: ivy.array(62.)
}
"""
return ContainerBase.cont_multi_map_in_function(
"hinge_embedding_loss",
input,
target,
margin=margin,
reduction=reduction,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)

def hinge_embedding_loss(
self: Union[ivy.Container, ivy.Array, ivy.NativeArray],
target: Union[ivy.Container, ivy.Array, ivy.NativeArray],
*,
margin: [Union[float, ivy.Container]] = 1.0,
reduction: [Union[str, ivy.Container]] = "mean",
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
) -> ivy.Container:
r"""ivy.Container instance method variant of ivy.hinge_embedding_loss.
This method simply wraps the function, and so the docstring for
ivy.hinge_embedding_loss also applies to this method with minimal
changes.
Parameters
----------
input
input array or container containing input labels.
target
input array or container containing the target labels.
margin
Sets the hyperparameter margin. Determines the necessary input size
for hinge_embedding_loss calculations when label is -1. Inputs smaller
than the margin are minimized with hinge_embedding_loss.
Default is 1.0.
reduction
Specifies how to aggregate the loss across the batch. Options are:
- ``'none'``: Returns the unreduced loss.
- ``'mean'``: Returns the mean loss.
- ``'sum'``: Returns the summed loss.
Default is ``'mean'``.
key_chains
The key-chains to apply or not apply the method to. Default is ``None``.
to_apply
If input, the method will be applied to key_chains, otherwise key_chains
will be skipped. Default is ``input``.
prune_unapplied
Whether to prune key_chains for which the function was not applied.
Default is ``False``.
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is ``False``.
Shape
-----
- Input: :math:`(*)` where :math:`*` means, any number of dimensions. \
The sum operation operates over all the elements.
- Target: :math:`(*)`, same shape as the input
- Output: scalar. If :attr:`reduction` is ``'none'``,
then same shape as the input
Returns
-------
ret
Hinge embedding loss calculated from the input and label,
shaped based on the reduction method.
Examples
--------
>>> x = ivy.Container(a=ivy.array([[1, 0, 2]], dtype=ivy.float32),
... b=ivy.array([[3, 2, 1]], dtype=ivy.float32))
>>> y = ivy.Container(a=ivy.array([[-1, -1, -1]], dtype=ivy.float32),
... b=ivy.array([[1, 1, 1]], dtype=ivy.float32))
>>> x.hinge_embedding_loss(y, reduction="none", margin=0.5)
{
a: ivy.array([[0., 0.5, 0.]]),
b: ivy.array([[3., 2., 1.]])
}
"""
return self._static_hinge_embedding_loss(
self,
target,
margin=margin,
reduction=reduction,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
33 changes: 29 additions & 4 deletions ivy/functional/backends/jax/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,11 @@ def soft_margin_loss(
return loss


def _apply_loss_reduction(loss: JaxArray, reduction: str, axis=None) -> JaxArray:
def _apply_loss_reduction(loss: JaxArray, reduction: str) -> JaxArray:
if reduction == "sum":
return jnp.sum(loss, axis=axis)
return jnp.sum(loss)
elif reduction == "mean":
return jnp.mean(loss, axis=axis)
return jnp.mean(loss)
else: # reduction == "none"
return loss

Expand Down Expand Up @@ -114,7 +114,7 @@ def _validate_poisson_nll_params(

@with_supported_device_and_dtypes(
{
"0.4.14 and below": {
"0.4.18 and below": {
"cpu": ("float16", "float32", "float64"),
}
},
Expand Down Expand Up @@ -153,3 +153,28 @@ def poisson_nll_loss(
cond = jnp.logical_and(target_arr >= zeroes, target_arr <= ones)
loss = loss + jnp.where(cond, zeroes, striling_approx_term)
return _apply_loss_reduction(loss, reduction)


@with_supported_device_and_dtypes(
{
"0.4.18 and below": {
"cpu": ("float32", "float64"),
}
},
backend_version,
)
def hinge_embedding_loss(
input: JaxArray,
target: JaxArray,
*,
margin: float = 1.0,
reduction: str = "mean",
) -> JaxArray:
zero_ = jnp.zeros([1], dtype=input.dtype)

relu_part = jnp.maximum(margin - input, 0)

loss = jnp.where(target == 1.0, input, zero_) + jnp.where(
target == -1.0, relu_part, zero_
)
return _apply_loss_reduction(loss, reduction)
37 changes: 30 additions & 7 deletions ivy/functional/backends/numpy/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,15 +75,12 @@ def soft_margin_loss(
return loss


def _apply_loss_reduction(loss: np.ndarray, reduction: str, axis, out) -> np.ndarray:
def _apply_loss_reduction(loss: np.ndarray, reduction: str) -> np.ndarray:
if reduction == "sum":
return np.sum(loss, axis=axis, out=out)
return np.sum(loss)
elif reduction == "mean":
return np.mean(loss, axis=axis, out=out)
return np.mean(loss)
else: # reduction == "none"
if out is not None:
out[...] = loss
return out
return loss


Expand Down Expand Up @@ -128,7 +125,7 @@ def _validate_poisson_nll_params(

@with_supported_device_and_dtypes(
{
"1.25.2 and below": {
"1.26.0 and below": {
"cpu": ("float16", "float32", "float64"),
}
},
Expand Down Expand Up @@ -167,3 +164,29 @@ def poisson_nll_loss(
cond = np.logical_and(target_arr >= zeroes, target_arr <= ones)
loss = loss + np.where(cond, zeroes, striling_approx_term)
return _apply_loss_reduction(loss, reduction)


@with_supported_device_and_dtypes(
{
"1.26.0 and below": {
"cpu": ("float32", "float64"),
}
},
backend_version,
)
def hinge_embedding_loss(
input: np.ndarray,
target: np.ndarray,
*,
margin: float = 1.0,
reduction: str = "mean",
) -> np.ndarray:
zero_ = np.zeros([1], dtype=input.dtype)

relu_part = np.maximum(margin - input, 0)

loss = np.where(target == 1.0, input, zero_) + np.where(
target == -1.0, relu_part, zero_
)

return _apply_loss_reduction(loss, reduction)
Loading

0 comments on commit a5aea35

Please sign in to comment.