Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/layers/activation/relu.py: 40%

30 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-03 07:57 +0000

1# Copyright 2015 The TensorFlow Authors. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14# ============================================================================== 

15"""Rectified Linear Unit activation layer.""" 

16 

17 

18from keras.src import backend 

19from keras.src.engine.base_layer import Layer 

20from keras.src.utils import tf_utils 

21 

22# isort: off 

23from tensorflow.python.util.tf_export import keras_export 

24 

25 

26@keras_export("keras.layers.ReLU") 

27class ReLU(Layer): 

28 """Rectified Linear Unit activation function. 

29 

30 With default values, it returns element-wise `max(x, 0)`. 

31 

32 Otherwise, it follows: 

33 

34 ``` 

35 f(x) = max_value if x >= max_value 

36 f(x) = x if threshold <= x < max_value 

37 f(x) = negative_slope * (x - threshold) otherwise 

38 ``` 

39 

40 Usage: 

41 

42 >>> layer = tf.keras.layers.ReLU() 

43 >>> output = layer([-3.0, -1.0, 0.0, 2.0]) 

44 >>> list(output.numpy()) 

45 [0.0, 0.0, 0.0, 2.0] 

46 >>> layer = tf.keras.layers.ReLU(max_value=1.0) 

47 >>> output = layer([-3.0, -1.0, 0.0, 2.0]) 

48 >>> list(output.numpy()) 

49 [0.0, 0.0, 0.0, 1.0] 

50 >>> layer = tf.keras.layers.ReLU(negative_slope=1.0) 

51 >>> output = layer([-3.0, -1.0, 0.0, 2.0]) 

52 >>> list(output.numpy()) 

53 [-3.0, -1.0, 0.0, 2.0] 

54 >>> layer = tf.keras.layers.ReLU(threshold=1.5) 

55 >>> output = layer([-3.0, -1.0, 1.0, 2.0]) 

56 >>> list(output.numpy()) 

57 [0.0, 0.0, 0.0, 2.0] 

58 

59 Input shape: 

60 Arbitrary. Use the keyword argument `input_shape` 

61 (tuple of integers, does not include the batch axis) 

62 when using this layer as the first layer in a model. 

63 

64 Output shape: 

65 Same shape as the input. 

66 

67 Args: 

68 max_value: Float >= 0. Maximum activation value. None means unlimited. 

69 Defaults to `None`. 

70 negative_slope: Float >= 0. Negative slope coefficient. 

71 Defaults to `0.`. 

72 threshold: Float >= 0. Threshold value for thresholded activation. 

73 Defaults to `0.`. 

74 """ 

75 

76 def __init__( 

77 self, max_value=None, negative_slope=0.0, threshold=0.0, **kwargs 

78 ): 

79 super().__init__(**kwargs) 

80 if max_value is not None and max_value < 0.0: 

81 raise ValueError( 

82 "max_value of a ReLU layer cannot be a negative " 

83 f"value. Received: {max_value}" 

84 ) 

85 if negative_slope is None or negative_slope < 0.0: 

86 raise ValueError( 

87 "negative_slope of a ReLU layer cannot be a negative " 

88 f"value. Received: {negative_slope}" 

89 ) 

90 if threshold is None or threshold < 0.0: 

91 raise ValueError( 

92 "threshold of a ReLU layer cannot be a negative " 

93 f"value. Received: {threshold}" 

94 ) 

95 

96 self.supports_masking = True 

97 if max_value is not None: 

98 max_value = backend.cast_to_floatx(max_value) 

99 self.max_value = max_value 

100 self.negative_slope = backend.cast_to_floatx(negative_slope) 

101 self.threshold = backend.cast_to_floatx(threshold) 

102 

103 def call(self, inputs): 

104 # alpha is used for leaky relu slope in activations instead of 

105 # negative_slope. 

106 return backend.relu( 

107 inputs, 

108 alpha=self.negative_slope, 

109 max_value=self.max_value, 

110 threshold=self.threshold, 

111 ) 

112 

113 def get_config(self): 

114 config = { 

115 "max_value": self.max_value, 

116 "negative_slope": self.negative_slope, 

117 "threshold": self.threshold, 

118 } 

119 base_config = super().get_config() 

120 return dict(list(base_config.items()) + list(config.items())) 

121 

122 @tf_utils.shape_type_conversion 

123 def compute_output_shape(self, input_shape): 

124 return input_shape 

125