Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/layers/regularization/dropout.py: 35%

40 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-03 07:57 +0000

1# Copyright 2015 The TensorFlow Authors. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14# ============================================================================== 

15"""Contains the Dropout layer.""" 

16 

17import numbers 

18 

19import tensorflow.compat.v2 as tf 

20 

21from keras.src import backend 

22from keras.src.engine import base_layer 

23from keras.src.utils import control_flow_util 

24 

25# isort: off 

26from tensorflow.python.util.tf_export import keras_export 

27 

28 

29@keras_export("keras.layers.Dropout") 

30class Dropout(base_layer.BaseRandomLayer): 

31 """Applies Dropout to the input. 

32 

33 The Dropout layer randomly sets input units to 0 with a frequency of `rate` 

34 at each step during training time, which helps prevent overfitting. 

35 Inputs not set to 0 are scaled up by 1/(1 - rate) such that the sum over 

36 all inputs is unchanged. 

37 

38 Note that the Dropout layer only applies when `training` is set to True 

39 such that no values are dropped during inference. When using `model.fit`, 

40 `training` will be appropriately set to True automatically, and in other 

41 contexts, you can set the kwarg explicitly to True when calling the layer. 

42 

43 (This is in contrast to setting `trainable=False` for a Dropout layer. 

44 `trainable` does not affect the layer's behavior, as Dropout does 

45 not have any variables/weights that can be frozen during training.) 

46 

47 >>> tf.random.set_seed(0) 

48 >>> layer = tf.keras.layers.Dropout(.2, input_shape=(2,)) 

49 >>> data = np.arange(10).reshape(5, 2).astype(np.float32) 

50 >>> print(data) 

51 [[0. 1.] 

52 [2. 3.] 

53 [4. 5.] 

54 [6. 7.] 

55 [8. 9.]] 

56 >>> outputs = layer(data, training=True) 

57 >>> print(outputs) 

58 tf.Tensor( 

59 [[ 0. 1.25] 

60 [ 2.5 3.75] 

61 [ 5. 6.25] 

62 [ 7.5 8.75] 

63 [10. 0. ]], shape=(5, 2), dtype=float32) 

64 

65 Args: 

66 rate: Float between 0 and 1. Fraction of the input units to drop. 

67 noise_shape: 1D integer tensor representing the shape of the 

68 binary dropout mask that will be multiplied with the input. 

69 For instance, if your inputs have shape 

70 `(batch_size, timesteps, features)` and 

71 you want the dropout mask to be the same for all timesteps, 

72 you can use `noise_shape=(batch_size, 1, features)`. 

73 seed: A Python integer to use as random seed. 

74 

75 Call arguments: 

76 inputs: Input tensor (of any rank). 

77 training: Python boolean indicating whether the layer should behave in 

78 training mode (adding dropout) or in inference mode (doing nothing). 

79 """ 

80 

81 def __init__(self, rate, noise_shape=None, seed=None, **kwargs): 

82 super().__init__(seed=seed, **kwargs) 

83 if isinstance(rate, (int, float)) and not 0 <= rate <= 1: 

84 raise ValueError( 

85 f"Invalid value {rate} received for " 

86 "`rate`, expected a value between 0 and 1." 

87 ) 

88 self.rate = rate 

89 self.noise_shape = noise_shape 

90 self.seed = seed 

91 self.supports_masking = True 

92 

93 def _get_noise_shape(self, inputs): 

94 # Subclasses of `Dropout` may implement `_get_noise_shape(self, 

95 # inputs)`, which will override `self.noise_shape`, and allows for 

96 # custom noise shapes with dynamically sized inputs. 

97 if self.noise_shape is None: 

98 return None 

99 

100 concrete_inputs_shape = tf.shape(inputs) 

101 noise_shape = [] 

102 for i, value in enumerate(self.noise_shape): 

103 noise_shape.append( 

104 concrete_inputs_shape[i] if value is None else value 

105 ) 

106 return tf.convert_to_tensor(noise_shape) 

107 

108 def call(self, inputs, training=None): 

109 if isinstance(self.rate, numbers.Real) and self.rate == 0: 

110 return tf.identity(inputs) 

111 

112 if training is None: 

113 training = backend.learning_phase() 

114 

115 def dropped_inputs(): 

116 return self._random_generator.dropout( 

117 inputs, self.rate, noise_shape=self._get_noise_shape(inputs) 

118 ) 

119 

120 output = control_flow_util.smart_cond( 

121 training, dropped_inputs, lambda: tf.identity(inputs) 

122 ) 

123 return output 

124 

125 def compute_output_shape(self, input_shape): 

126 return input_shape 

127 

128 def get_config(self): 

129 config = { 

130 "rate": self.rate, 

131 "noise_shape": self.noise_shape, 

132 "seed": self.seed, 

133 } 

134 base_config = super().get_config() 

135 return dict(list(base_config.items()) + list(config.items())) 

136