Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/layers/activation/leaky_relu.py: 55%
22 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Leaky version of a Rectified Linear Unit activation layer."""
18from keras.src import backend
19from keras.src.engine.base_layer import Layer
20from keras.src.utils import tf_utils
22# isort: off
23from tensorflow.python.util.tf_export import keras_export
26@keras_export("keras.layers.LeakyReLU")
27class LeakyReLU(Layer):
28 """Leaky version of a Rectified Linear Unit.
30 It allows a small gradient when the unit is not active:
32 ```
33 f(x) = alpha * x if x < 0
34 f(x) = x if x >= 0
35 ```
37 Usage:
39 >>> layer = tf.keras.layers.LeakyReLU()
40 >>> output = layer([-3.0, -1.0, 0.0, 2.0])
41 >>> list(output.numpy())
42 [-0.9, -0.3, 0.0, 2.0]
43 >>> layer = tf.keras.layers.LeakyReLU(alpha=0.1)
44 >>> output = layer([-3.0, -1.0, 0.0, 2.0])
45 >>> list(output.numpy())
46 [-0.3, -0.1, 0.0, 2.0]
48 Input shape:
49 Arbitrary. Use the keyword argument `input_shape`
50 (tuple of integers, does not include the batch axis)
51 when using this layer as the first layer in a model.
53 Output shape:
54 Same shape as the input.
56 Args:
57 alpha: Float >= `0.`. Negative slope coefficient. Defaults to `0.3`.
59 """
61 def __init__(self, alpha=0.3, **kwargs):
62 super().__init__(**kwargs)
63 if alpha is None:
64 raise ValueError(
65 "The alpha value of a Leaky ReLU layer cannot be None, "
66 f"Expecting a float. Received: {alpha}"
67 )
68 self.supports_masking = True
69 self.alpha = backend.cast_to_floatx(alpha)
71 def call(self, inputs):
72 return backend.relu(inputs, alpha=self.alpha)
74 def get_config(self):
75 config = {"alpha": float(self.alpha)}
76 base_config = super().get_config()
77 return dict(list(base_config.items()) + list(config.items()))
79 @tf_utils.shape_type_conversion
80 def compute_output_shape(self, input_shape):
81 return input_shape