Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow_addons/activations/gelu.py: 71%
7 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
16import tensorflow as tf
17import warnings
19from tensorflow_addons.utils.types import TensorLike
22@tf.keras.utils.register_keras_serializable(package="Addons")
23def gelu(x: TensorLike, approximate: bool = True) -> tf.Tensor:
24 r"""Gaussian Error Linear Unit.
26 Computes gaussian error linear:
28 $$
29 \mathrm{gelu}(x) = x \Phi(x),
30 $$
32 where
34 $$
35 \Phi(x) = \frac{1}{2} \left[ 1 + \mathrm{erf}(\frac{x}{\sqrt{2}}) \right]$
36 $$
38 when `approximate` is `False`; or
40 $$
41 \Phi(x) = \frac{x}{2} \left[ 1 + \tanh(\sqrt{\frac{2}{\pi}} \cdot (x + 0.044715 \cdot x^3)) \right]
42 $$
44 when `approximate` is `True`.
46 See [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415)
47 and [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805).
49 Consider using `tf.nn.gelu` instead.
50 Note that the default of `approximate` changed to `False` in `tf.nn.gelu`.
52 Usage:
54 >>> x = tf.constant([0.0, 0.0, 1.0])
55 >>> tfa.activations.gelu(x, approximate=False)
56 <tf.Tensor: shape=(3,), dtype=float32, numpy=array([0. , 0. , 0.8413447], dtype=float32)>
57 >>> tfa.activations.gelu(x, approximate=True)
58 <tf.Tensor: shape=(3,), dtype=float32, numpy=array([0. , 0. , 0.841192], dtype=float32)>
60 Args:
61 x: A `Tensor`. Must be one of the following types:
62 `float16`, `float32`, `float64`.
63 approximate: bool, whether to enable approximation.
64 Returns:
65 A `Tensor`. Has the same type as `x`.
66 """
67 warnings.warn(
68 "gelu activation has been migrated to core TensorFlow, "
69 "and will be deprecated in Addons 0.13. "
70 "Note that the default of `approximate` changed to `False` in `tf.nn.gelu`.",
71 DeprecationWarning,
72 )
74 return tf.nn.gelu(x, approximate)