Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/layers/regularization/alpha_dropout.py: 37%
38 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Contains the AlphaDropout layer."""
18import tensorflow.compat.v2 as tf
20from keras.src import backend
21from keras.src.engine import base_layer
22from keras.src.utils import tf_utils
24# isort: off
25from tensorflow.python.util.tf_export import keras_export
28@keras_export("keras.layers.AlphaDropout")
29class AlphaDropout(base_layer.BaseRandomLayer):
30 """Applies Alpha Dropout to the input.
32 Alpha Dropout is a `Dropout` that keeps mean and variance of inputs
33 to their original values, in order to ensure the self-normalizing property
34 even after this dropout.
35 Alpha Dropout fits well to Scaled Exponential Linear Units
36 by randomly setting activations to the negative saturation value.
38 Args:
39 rate: float, drop probability (as with `Dropout`).
40 The multiplicative noise will have
41 standard deviation `sqrt(rate / (1 - rate))`.
42 seed: Integer, optional random seed to enable deterministic behavior.
44 Call arguments:
45 inputs: Input tensor (of any rank).
46 training: Python boolean indicating whether the layer should behave in
47 training mode (adding dropout) or in inference mode (doing nothing).
49 Input shape:
50 Arbitrary. Use the keyword argument `input_shape`
51 (tuple of integers, does not include the samples axis)
52 when using this layer as the first layer in a model.
54 Output shape:
55 Same shape as input.
56 """
58 def __init__(self, rate, noise_shape=None, seed=None, **kwargs):
59 super().__init__(seed=seed, **kwargs)
60 self.rate = rate
61 self.noise_shape = noise_shape
62 self.seed = seed
63 self.supports_masking = True
65 def _get_noise_shape(self, inputs):
66 return self.noise_shape if self.noise_shape else tf.shape(inputs)
68 def call(self, inputs, training=None):
69 if 0.0 < self.rate < 1.0:
70 noise_shape = self._get_noise_shape(inputs)
72 def dropped_inputs(inputs=inputs, rate=self.rate):
73 alpha = 1.6732632423543772848170429916717
74 scale = 1.0507009873554804934193349852946
75 alpha_p = -alpha * scale
77 kept_idx = tf.greater_equal(
78 self._random_generator.random_uniform(noise_shape), rate
79 )
80 kept_idx = tf.cast(kept_idx, inputs.dtype)
82 # Get affine transformation params
83 a = ((1 - rate) * (1 + rate * alpha_p**2)) ** -0.5
84 b = -a * alpha_p * rate
86 # Apply mask
87 x = inputs * kept_idx + alpha_p * (1 - kept_idx)
89 # Do affine transformation
90 return a * x + b
92 return backend.in_train_phase(
93 dropped_inputs, inputs, training=training
94 )
95 return inputs
97 def get_config(self):
98 config = {"rate": self.rate, "seed": self.seed}
99 base_config = super().get_config()
100 return dict(list(base_config.items()) + list(config.items()))
102 @tf_utils.shape_type_conversion
103 def compute_output_shape(self, input_shape):
104 return input_shape