Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow_addons/activations/rrelu.py: 31%
16 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
16import tensorflow as tf
17from tensorflow_addons.utils.types import Number, TensorLike
18from typing import Optional
21@tf.keras.utils.register_keras_serializable(package="Addons")
22def rrelu(
23 x: TensorLike,
24 lower: Number = 0.125,
25 upper: Number = 0.3333333333333333,
26 training: Optional[bool] = None,
27 seed: Optional[int] = None,
28 rng: Optional[tf.random.Generator] = None,
29) -> tf.Tensor:
30 r"""Randomized leaky rectified liner unit function.
32 Computes rrelu function:
34 $$
35 \mathrm{rrelu}(x) =
36 \begin{cases}
37 x & \text{if } x > 0 \\
38 a x
39 \end{cases},
40 $$
42 where
44 $$
45 a \sim \mathcal{U}(\mathrm{lower}, \mathrm{upper})
46 $$
48 when `training` is `True`; or
50 $$
51 a = \frac{\mathrm{lower} + \mathrm{upper}}{2}
52 $$
54 when `training` is `False`.
56 See [Empirical Evaluation of Rectified Activations in Convolutional Network](https://arxiv.org/abs/1505.00853).
58 Usage:
60 >>> x = tf.constant([-1.0, 0.0, 1.0])
61 >>> tfa.activations.rrelu(x, training=False)
62 <tf.Tensor: shape=(3,), dtype=float32, numpy=array([-0.22916667, 0. , 1. ], dtype=float32)>
63 >>> tfa.activations.rrelu(x, training=True, seed=2020)
64 <tf.Tensor: shape=(3,), dtype=float32, numpy=array([-0.22631127, 0. , 1. ], dtype=float32)>
65 >>> generator = tf.random.Generator.from_seed(2021)
66 >>> tfa.activations.rrelu(x, training=True, rng=generator)
67 <tf.Tensor: shape=(3,), dtype=float32, numpy=array([-0.16031083, 0. , 1. ], dtype=float32)>
69 Args:
70 x: A `Tensor`. Must be one of the following types:
71 `bfloat16`, `float16`, `float32`, `float64`.
72 lower: `float`, lower bound for random alpha.
73 upper: `float`, upper bound for random alpha.
74 training: `bool`, indicating whether the `call`
75 is meant for training or inference.
76 seed: `int`, this sets the operation-level seed.
77 rng: A `tf.random.Generator`.
78 Returns:
79 result: A `Tensor`. Has the same type as `x`.
80 """
81 x = tf.convert_to_tensor(x)
82 lower = tf.cast(lower, x.dtype)
83 upper = tf.cast(upper, x.dtype)
85 def random_a():
86 if rng is not None and seed is not None:
87 raise ValueError(
88 "Either seed or rng should be specified. Not both at the same time."
89 )
91 if rng is not None:
92 return rng.uniform(tf.shape(x), minval=lower, maxval=upper, dtype=x.dtype)
94 return tf.random.uniform(
95 tf.shape(x), minval=lower, maxval=upper, dtype=x.dtype, seed=seed
96 )
98 a = tf.keras.backend.in_train_phase(random_a, (lower + upper) / 2, training)
100 return tf.where(x >= 0, x, a * x)