Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow_addons/optimizers/cyclical_learning_rate.py: 51%
51 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Cyclical Learning Rate Schedule policies for TensorFlow."""
17import tensorflow as tf
18from tensorflow_addons.utils.types import FloatTensorLike
20from typeguard import typechecked
21from typing import Union, Callable
24@tf.keras.utils.register_keras_serializable(package="Addons")
25class CyclicalLearningRate(tf.keras.optimizers.schedules.LearningRateSchedule):
26 """A LearningRateSchedule that uses cyclical schedule."""
28 @typechecked
29 def __init__(
30 self,
31 initial_learning_rate: Union[FloatTensorLike, Callable],
32 maximal_learning_rate: Union[FloatTensorLike, Callable],
33 step_size: FloatTensorLike,
34 scale_fn: Callable,
35 scale_mode: str = "cycle",
36 name: str = "CyclicalLearningRate",
37 ):
38 """Applies cyclical schedule to the learning rate.
40 See Cyclical Learning Rates for Training Neural Networks. https://arxiv.org/abs/1506.01186
43 ```python
44 lr_schedule = tf.keras.optimizers.schedules.CyclicalLearningRate(
45 initial_learning_rate=1e-4,
46 maximal_learning_rate=1e-2,
47 step_size=2000,
48 scale_fn=lambda x: 1.,
49 scale_mode="cycle",
50 name="MyCyclicScheduler")
52 model.compile(optimizer=tf.keras.optimizers.SGD(
53 learning_rate=lr_schedule),
54 loss='sparse_categorical_crossentropy',
55 metrics=['accuracy'])
57 model.fit(data, labels, epochs=5)
58 ```
60 You can pass this schedule directly into a
61 `tf.keras.optimizers.legacy.Optimizer` as the learning rate.
63 Args:
64 initial_learning_rate: A scalar `float32` or `float64` `Tensor` or
65 a Python number. The initial learning rate.
66 maximal_learning_rate: A scalar `float32` or `float64` `Tensor` or
67 a Python number. The maximum learning rate.
68 step_size: A scalar `float32` or `float64` `Tensor` or a
69 Python number. Step size denotes the number of training iterations it takes to get to maximal_learning_rate.
70 scale_fn: A function. Scheduling function applied in cycle
71 scale_mode: ['cycle', 'iterations']. Mode to apply during cyclic
72 schedule
73 name: (Optional) Name for the operation.
75 Returns:
76 Updated learning rate value.
77 """
78 super().__init__()
79 self.initial_learning_rate = initial_learning_rate
80 self.maximal_learning_rate = maximal_learning_rate
81 self.step_size = step_size
82 self.scale_fn = scale_fn
83 self.scale_mode = scale_mode
84 self.name = name
86 def __call__(self, step):
87 with tf.name_scope(self.name or "CyclicalLearningRate"):
88 initial_learning_rate = tf.convert_to_tensor(
89 self.initial_learning_rate, name="initial_learning_rate"
90 )
91 dtype = initial_learning_rate.dtype
92 maximal_learning_rate = tf.cast(self.maximal_learning_rate, dtype)
93 step_size = tf.cast(self.step_size, dtype)
94 step_as_dtype = tf.cast(step, dtype)
95 cycle = tf.floor(1 + step_as_dtype / (2 * step_size))
96 x = tf.abs(step_as_dtype / step_size - 2 * cycle + 1)
98 mode_step = cycle if self.scale_mode == "cycle" else step
100 return initial_learning_rate + (
101 maximal_learning_rate - initial_learning_rate
102 ) * tf.maximum(tf.cast(0, dtype), (1 - x)) * self.scale_fn(mode_step)
104 def get_config(self):
105 return {
106 "initial_learning_rate": self.initial_learning_rate,
107 "maximal_learning_rate": self.maximal_learning_rate,
108 "scale_fn": self.scale_fn,
109 "step_size": self.step_size,
110 "scale_mode": self.scale_mode,
111 }
114@tf.keras.utils.register_keras_serializable(package="Addons")
115class TriangularCyclicalLearningRate(CyclicalLearningRate):
116 @typechecked
117 def __init__(
118 self,
119 initial_learning_rate: Union[FloatTensorLike, Callable],
120 maximal_learning_rate: Union[FloatTensorLike, Callable],
121 step_size: FloatTensorLike,
122 scale_mode: str = "cycle",
123 name: str = "TriangularCyclicalLearningRate",
124 ):
125 """Applies triangular cyclical schedule to the learning rate.
127 See Cyclical Learning Rates for Training Neural Networks. https://arxiv.org/abs/1506.01186
130 ```python
131 from tf.keras.optimizers import schedules
133 lr_schedule = schedules.TriangularCyclicalLearningRate(
134 initial_learning_rate=1e-4,
135 maximal_learning_rate=1e-2,
136 step_size=2000,
137 scale_mode="cycle",
138 name="MyCyclicScheduler")
140 model.compile(optimizer=tf.keras.optimizers.SGD(
141 learning_rate=lr_schedule),
142 loss='sparse_categorical_crossentropy',
143 metrics=['accuracy'])
145 model.fit(data, labels, epochs=5)
146 ```
148 You can pass this schedule directly into a
149 `tf.keras.optimizers.legacy.Optimizer` as the learning rate.
151 Args:
152 initial_learning_rate: A scalar `float32` or `float64` `Tensor` or
153 a Python number. The initial learning rate.
154 maximal_learning_rate: A scalar `float32` or `float64` `Tensor` or
155 a Python number. The maximum learning rate.
156 step_size: A scalar `float32` or `float64` `Tensor` or a
157 Python number. Step size denotes the number of training iterations it takes to get to maximal_learning_rate
158 scale_mode: ['cycle', 'iterations']. Mode to apply during cyclic
159 schedule
160 name: (Optional) Name for the operation.
162 Returns:
163 Updated learning rate value.
164 """
165 super().__init__(
166 initial_learning_rate=initial_learning_rate,
167 maximal_learning_rate=maximal_learning_rate,
168 step_size=step_size,
169 scale_fn=lambda x: 1.0,
170 scale_mode=scale_mode,
171 name=name,
172 )
174 def get_config(self):
175 return {
176 "initial_learning_rate": self.initial_learning_rate,
177 "maximal_learning_rate": self.maximal_learning_rate,
178 "step_size": self.step_size,
179 "scale_mode": self.scale_mode,
180 }
183@tf.keras.utils.register_keras_serializable(package="Addons")
184class Triangular2CyclicalLearningRate(CyclicalLearningRate):
185 @typechecked
186 def __init__(
187 self,
188 initial_learning_rate: Union[FloatTensorLike, Callable],
189 maximal_learning_rate: Union[FloatTensorLike, Callable],
190 step_size: FloatTensorLike,
191 scale_mode: str = "cycle",
192 name: str = "Triangular2CyclicalLearningRate",
193 ):
194 """Applies triangular2 cyclical schedule to the learning rate.
196 See Cyclical Learning Rates for Training Neural Networks. https://arxiv.org/abs/1506.01186
199 ```python
200 from tf.keras.optimizers import schedules
202 lr_schedule = schedules.Triangular2CyclicalLearningRate(
203 initial_learning_rate=1e-4,
204 maximal_learning_rate=1e-2,
205 step_size=2000,
206 scale_mode="cycle",
207 name="MyCyclicScheduler")
209 model.compile(optimizer=tf.keras.optimizers.SGD(
210 learning_rate=lr_schedule),
211 loss='sparse_categorical_crossentropy',
212 metrics=['accuracy'])
214 model.fit(data, labels, epochs=5)
215 ```
217 You can pass this schedule directly into a
218 `tf.keras.optimizers.legacy.Optimizer` as the learning rate.
220 Args:
221 initial_learning_rate: A scalar `float32` or `float64` `Tensor` or
222 a Python number. The initial learning rate.
223 maximal_learning_rate: A scalar `float32` or `float64` `Tensor` or
224 a Python number. The maximum learning rate.
225 step_size: A scalar `float32` or `float64` `Tensor` or a
226 Python number. Step size denotes the number of training iterations it takes to get to maximal_learning_rate
227 scale_mode: ['cycle', 'iterations']. Mode to apply during cyclic
228 schedule
229 name: (Optional) Name for the operation.
231 Returns:
232 Updated learning rate value.
233 """
234 super().__init__(
235 initial_learning_rate=initial_learning_rate,
236 maximal_learning_rate=maximal_learning_rate,
237 step_size=step_size,
238 scale_fn=lambda x: 1 / (2.0 ** (x - 1)),
239 scale_mode=scale_mode,
240 name=name,
241 )
243 def get_config(self):
244 return {
245 "initial_learning_rate": self.initial_learning_rate,
246 "maximal_learning_rate": self.maximal_learning_rate,
247 "step_size": self.step_size,
248 "scale_mode": self.scale_mode,
249 }
252@tf.keras.utils.register_keras_serializable(package="Addons")
253class ExponentialCyclicalLearningRate(CyclicalLearningRate):
254 @typechecked
255 def __init__(
256 self,
257 initial_learning_rate: Union[FloatTensorLike, Callable],
258 maximal_learning_rate: Union[FloatTensorLike, Callable],
259 step_size: FloatTensorLike,
260 scale_mode: str = "iterations",
261 gamma: FloatTensorLike = 1.0,
262 name: str = "ExponentialCyclicalLearningRate",
263 ):
264 """Applies exponential cyclical schedule to the learning rate.
266 See Cyclical Learning Rates for Training Neural Networks. https://arxiv.org/abs/1506.01186
269 ```python
270 from tf.keras.optimizers import schedules
272 lr_schedule = ExponentialCyclicalLearningRate(
273 initial_learning_rate=1e-4,
274 maximal_learning_rate=1e-2,
275 step_size=2000,
276 scale_mode="cycle",
277 gamma=0.96,
278 name="MyCyclicScheduler")
280 model.compile(optimizer=tf.keras.optimizers.SGD(
281 learning_rate=lr_schedule),
282 loss='sparse_categorical_crossentropy',
283 metrics=['accuracy'])
285 model.fit(data, labels, epochs=5)
286 ```
288 You can pass this schedule directly into a
289 `tf.keras.optimizers.legacy.Optimizer` as the learning rate.
291 Args:
292 initial_learning_rate: A scalar `float32` or `float64` `Tensor` or
293 a Python number. The initial learning rate.
294 maximal_learning_rate: A scalar `float32` or `float64` `Tensor` or
295 a Python number. The maximum learning rate.
296 step_size: A scalar `float32` or `float64` `Tensor` or a
297 Python number. Step size denotes the number of training iterations it takes to get to maximal_learning_rate
298 scale_mode: ['cycle', 'iterations']. Mode to apply during cyclic
299 schedule
300 gamma: A scalar `float32` or `float64` `Tensor` or a
301 Python number. Gamma value.
302 name: (Optional) Name for the operation.
304 Returns:
305 Updated learning rate value.
306 """
307 self.gamma = gamma
308 super().__init__(
309 initial_learning_rate=initial_learning_rate,
310 maximal_learning_rate=maximal_learning_rate,
311 step_size=step_size,
312 scale_fn=lambda x: gamma**x,
313 scale_mode=scale_mode,
314 name=name,
315 )
317 def get_config(self):
318 return {
319 "initial_learning_rate": self.initial_learning_rate,
320 "maximal_learning_rate": self.maximal_learning_rate,
321 "step_size": self.step_size,
322 "scale_mode": self.scale_mode,
323 "gamma": self.gamma,
324 }