Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/initializers/initializers_v1.py: 80%
55 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Keras initializers for TF 1."""
18import tensorflow.compat.v2 as tf
20# isort: off
21from tensorflow.python.util.tf_export import keras_export
23_v1_zeros_initializer = tf.compat.v1.zeros_initializer
24_v1_ones_initializer = tf.compat.v1.ones_initializer
25_v1_constant_initializer = tf.compat.v1.constant_initializer
26_v1_variance_scaling_initializer = tf.compat.v1.variance_scaling_initializer
27_v1_orthogonal_initializer = tf.compat.v1.orthogonal_initializer
28_v1_identity = tf.compat.v1.initializers.identity
29_v1_glorot_uniform_initializer = tf.compat.v1.glorot_uniform_initializer
30_v1_glorot_normal_initializer = tf.compat.v1.glorot_normal_initializer
32keras_export(
33 v1=["keras.initializers.Zeros", "keras.initializers.zeros"],
34 allow_multiple_exports=True,
35)(_v1_zeros_initializer)
36keras_export(
37 v1=["keras.initializers.Ones", "keras.initializers.ones"],
38 allow_multiple_exports=True,
39)(_v1_ones_initializer)
40keras_export(
41 v1=["keras.initializers.Constant", "keras.initializers.constant"],
42 allow_multiple_exports=True,
43)(_v1_constant_initializer)
44keras_export(
45 v1=["keras.initializers.VarianceScaling"], allow_multiple_exports=True
46)(_v1_variance_scaling_initializer)
47keras_export(
48 v1=["keras.initializers.Orthogonal", "keras.initializers.orthogonal"],
49 allow_multiple_exports=True,
50)(_v1_orthogonal_initializer)
51keras_export(
52 v1=["keras.initializers.Identity", "keras.initializers.identity"],
53 allow_multiple_exports=True,
54)(_v1_identity)
55keras_export(
56 v1=["keras.initializers.glorot_uniform"], allow_multiple_exports=True
57)(_v1_glorot_uniform_initializer)
58keras_export(
59 v1=["keras.initializers.glorot_normal"], allow_multiple_exports=True
60)(_v1_glorot_normal_initializer)
63@keras_export(
64 v1=[
65 "keras.initializers.RandomNormal",
66 "keras.initializers.random_normal",
67 "keras.initializers.normal",
68 ]
69)
70class RandomNormal(tf.compat.v1.random_normal_initializer):
71 """Initializer that generates a normal distribution.
73 Args:
74 mean: a python scalar or a scalar tensor. Mean of the random values to
75 generate.
76 stddev: a python scalar or a scalar tensor. Standard deviation of the
77 random values to generate.
78 seed: A Python integer. Used to create random seeds. See
79 `tf.compat.v1.set_random_seed` for behavior.
80 dtype: Default data type, used if no `dtype` argument is provided when
81 calling the initializer. Only floating point types are supported.
83 @compatibility(TF2)
84 Although it is a legacy compat.v1 api,
85 `tf.compat.v1.keras.initializers.RandomNormal` is compatible with eager
86 execution and `tf.function`.
88 To switch to native TF2, switch to using
89 `tf.keras.initializers.RandomNormal` (not from `compat.v1`) and
90 if you need to change the default dtype use
91 `tf.keras.backend.set_floatx(float_dtype)`
92 or pass the dtype when calling the initializer, rather than passing it
93 when constructing the initializer.
95 Random seed behavior:
96 Also be aware that if you pass a seed to the TF2 initializer
97 API it will reuse that same seed for every single initialization
98 (unlike the TF1 initializer)
100 #### Structural Mapping to Native TF2
102 Before:
104 ```python
105 initializer = tf.compat.v1.keras.initializers.RandomNormal(
106 mean=mean,
107 stddev=stddev,
108 seed=seed,
109 dtype=dtype)
111 weight_one = tf.Variable(initializer(shape_one))
112 weight_two = tf.Variable(initializer(shape_two))
113 ```
115 After:
117 ```python
118 initializer = tf.keras.initializers.RandomNormal(
119 mean=mean,
120 # seed=seed, # Setting a seed in the native TF2 API
121 # causes it to produce the same initializations
122 # across multiple calls of the same initializer.
123 stddev=stddev)
125 weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
126 weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
127 ```
129 #### How to Map Arguments
131 | TF1 Arg Name | TF2 Arg Name | Note |
132 | :---------------- | :-------------- | :------------------------- |
133 | `mean` | `mean` | No change to defaults |
134 | `stddev` | `stddev` | No change to defaults |
135 | `seed` | `seed` | Different random number generation |
136 : : : semantics (to change in a :
137 : : : future version). If set, the TF2 version :
138 : : : will use stateless random number :
139 : : : generation which will produce the exact :
140 : : : same initialization even across multiple :
141 : : : calls of the initializer instance. the :
142 : : : `compat.v1` version will generate new :
143 : : : initializations each time. Do not set :
144 : : : a seed if you need different :
145 : : : initializations each time. Instead :
146 : : : either set a global tf seed with :
147 : : : `tf.random.set_seed` if you need :
148 : : : determinism, or initialize each weight:
149 : : : with a separate initializer instance :
150 : : : and a different seed. :
151 | `dtype` | `dtype` | The TF2 native api only takes it |
152 : : : as a `__call__` arg, not a constructor arg. :
153 | `partition_info` | - | (`__call__` arg in TF1) Not supported |
155 #### Example of fixed-seed behavior differences
157 `compat.v1` Fixed seed behavior:
159 >>> initializer = tf.compat.v1.keras.initializers.RandomNormal(seed=10)
160 >>> a = initializer(shape=(2, 2))
161 >>> b = initializer(shape=(2, 2))
162 >>> tf.reduce_sum(a - b) == 0
163 <tf.Tensor: shape=(), dtype=bool, numpy=False>
165 After:
167 >>> initializer = tf.keras.initializers.RandomNormal(seed=10)
168 >>> a = initializer(shape=(2, 2))
169 >>> b = initializer(shape=(2, 2))
170 >>> tf.reduce_sum(a - b) == 0
171 <tf.Tensor: shape=(), dtype=bool, numpy=True>
173 @end_compatibility
174 """
176 def __init__(self, mean=0.0, stddev=0.05, seed=None, dtype=tf.float32):
177 super().__init__(mean=mean, stddev=stddev, seed=seed, dtype=dtype)
180@keras_export(
181 v1=[
182 "keras.initializers.RandomUniform",
183 "keras.initializers.random_uniform",
184 "keras.initializers.uniform",
185 ]
186)
187class RandomUniform(tf.compat.v1.random_uniform_initializer):
188 """Initializer that generates tensors with a uniform distribution.
190 Args:
191 minval: A python scalar or a scalar tensor. Lower bound of the range of
192 random values to generate. Defaults to `-0.05`.
193 maxval: A python scalar or a scalar tensor. Upper bound of the range of
194 random values to generate. Defaults to `0.05`.
195 seed: A Python integer. Used to create random seeds. See
196 `tf.compat.v1.set_random_seed` for behavior.
197 dtype: Default data type, used if no `dtype` argument is provided when
198 calling the initializer.
200 @compatibility(TF2)
201 Although it is a legacy `compat.v1` api,
202 `tf.compat.v1.keras.initializers.RandomUniform` is compatible with eager
203 execution and `tf.function`.
205 To switch to native TF2, switch to using
206 `tf.keras.initializers.RandomUniform` (not from `compat.v1`) and
207 if you need to change the default dtype use
208 `tf.keras.backend.set_floatx(float_dtype)`
209 or pass the dtype when calling the initializer, rather than passing it
210 when constructing the initializer.
212 Random seed behavior:
214 Also be aware that if you pass a seed to the TF2 initializer
215 API it will reuse that same seed for every single initialization
216 (unlike the TF1 initializer)
218 #### Structural Mapping to Native TF2
220 Before:
222 ```python
224 initializer = tf.compat.v1.keras.initializers.RandomUniform(
225 minval=minval,
226 maxval=maxval,
227 seed=seed,
228 dtype=dtype)
230 weight_one = tf.Variable(initializer(shape_one))
231 weight_two = tf.Variable(initializer(shape_two))
232 ```
234 After:
236 ```python
237 initializer = tf.keras.initializers.RandomUniform(
238 minval=minval,
239 maxval=maxval,
240 # seed=seed, # Setting a seed in the native TF2 API
241 # causes it to produce the same initializations
242 # across multiple calls of the same initializer.
243 )
245 weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
246 weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
247 ```
249 #### How to Map Arguments
251 | TF1 Arg Name | TF2 Arg Name | Note |
252 | :---------------- | :-------------- | :------------------------- |
253 | `minval` | `minval` | No change to defaults |
254 | `maxval` | `maxval` | No change to defaults |
255 | `seed` | `seed` | Different random number generation |
256 : : : semantics (to change in a :
257 : : : future version). If set, the TF2 version :
258 : : : will use stateless random number :
259 : : : generation which will produce the exact :
260 : : : same initialization even across multiple :
261 : : : calls of the initializer instance. the :
262 : : : `compat.v1` version will generate new :
263 : : : initializations each time. Do not set :
264 : : : a seed if you need different :
265 : : : initializations each time. Instead :
266 : : : either set a global tf seed with
267 : : : `tf.random.set_seed` if you need :
268 : : : determinism, or initialize each weight :
269 : : : with a separate initializer instance :
270 : : : and a different seed. :
271 | `dtype` | `dtype` | The TF2 native api only takes it |
272 : : : as a `__call__` arg, not a constructor arg. :
273 | `partition_info` | - | (`__call__` arg in TF1) Not supported |
275 #### Example of fixed-seed behavior differences
277 `compat.v1` Fixed seed behavior:
279 >>> initializer = tf.compat.v1.keras.initializers.RandomUniform(seed=10)
280 >>> a = initializer(shape=(2, 2))
281 >>> b = initializer(shape=(2, 2))
282 >>> tf.reduce_sum(a - b) == 0
283 <tf.Tensor: shape=(), dtype=bool, numpy=False>
285 After:
287 >>> initializer = tf.keras.initializers.RandomUniform(seed=10)
288 >>> a = initializer(shape=(2, 2))
289 >>> b = initializer(shape=(2, 2))
290 >>> tf.reduce_sum(a - b) == 0
291 <tf.Tensor: shape=(), dtype=bool, numpy=True>
293 @end_compatibility
294 """
296 def __init__(self, minval=-0.05, maxval=0.05, seed=None, dtype=tf.float32):
297 super().__init__(minval=minval, maxval=maxval, seed=seed, dtype=dtype)
300@keras_export(
301 v1=[
302 "keras.initializers.TruncatedNormal",
303 "keras.initializers.truncated_normal",
304 ]
305)
306class TruncatedNormal(tf.compat.v1.truncated_normal_initializer):
307 """Initializer that generates a truncated normal distribution.
309 These values are similar to values from a `random_normal_initializer`
310 except that values more than two standard deviations from the mean
311 are discarded and re-drawn. This is the recommended initializer for
312 neural network weights and filters.
314 Args:
315 mean: a python scalar or a scalar tensor. Mean of the random values to
316 generate.
317 stddev: a python scalar or a scalar tensor. Standard deviation of the
318 random values to generate.
319 seed: A Python integer. Used to create random seeds. See
320 `tf.compat.v1.set_random_seed` for behavior.
321 dtype: Default data type, used if no `dtype` argument is provided when
322 calling the initializer. Only floating point types are supported.
324 @compatibility(TF2)
325 Although it is a legacy compat.v1 api,
326 `tf.compat.v1.keras.initializers.TruncatedNormal` is compatible with eager
327 execution and `tf.function`.
329 To switch to native TF2, switch to using
330 `tf.keras.initializers.TruncatedNormal` (not from `compat.v1`) and
331 if you need to change the default dtype use
332 `tf.keras.backend.set_floatx(float_dtype)`
333 or pass the dtype when calling the initializer, rather than passing it
334 when constructing the initializer.
336 Random seed behavior:
337 Also be aware that if you pass a seed to the TF2 initializer
338 API it will reuse that same seed for every single initialization
339 (unlike the TF1 initializer)
341 #### Structural Mapping to Native TF2
343 Before:
345 ```python
346 initializer = tf.compat.v1.keras.initializers.TruncatedNormal(
347 mean=mean,
348 stddev=stddev,
349 seed=seed,
350 dtype=dtype)
352 weight_one = tf.Variable(initializer(shape_one))
353 weight_two = tf.Variable(initializer(shape_two))
354 ```
356 After:
358 ```python
359 initializer = tf.keras.initializers.TruncatedNormal(
360 mean=mean,
361 # seed=seed, # Setting a seed in the native TF2 API
362 # causes it to produce the same initializations
363 # across multiple calls of the same initializer.
364 stddev=stddev)
366 weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
367 weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
368 ```
370 #### How to Map Arguments
372 | TF1 Arg Name | TF2 Arg Name | Note |
373 | :---------------- | :-------------- | :------------------------- |
374 | `mean` | `mean` | No change to defaults |
375 | `stddev` | `stddev` | No change to defaults |
376 | `seed` | `seed` | Different random number generation |
377 : : : semantics (to change in a :
378 : : : future version). If set, the TF2 version :
379 : : : will use stateless random number :
380 : : : generation which will produce the exact :
381 : : : same initialization even across multiple :
382 : : : calls of the initializer instance. the :
383 : : : `compat.v1` version will generate new :
384 : : : initializations each time. Do not set :
385 : : : a seed if you need different :
386 : : : initializations each time. Instead :
387 : : : either set a global tf seed with
388 : : : `tf.random.set_seed` if you need :
389 : : : determinism, or initialize each weight :
390 : : : with a separate initializer instance :
391 : : : and a different seed. :
392 | `dtype` | `dtype` | The TF2 native api only takes it |
393 : : : as a `__call__` arg, not a constructor arg. :
394 | `partition_info` | - | (`__call__` arg in TF1) Not supported |
396 #### Example of fixed-seed behavior differences
398 `compat.v1` Fixed seed behavior:
400 >>> initializer = tf.compat.v1.keras.initializers.TruncatedNormal(seed=10)
401 >>> a = initializer(shape=(2, 2))
402 >>> b = initializer(shape=(2, 2))
403 >>> tf.reduce_sum(a - b) == 0
404 <tf.Tensor: shape=(), dtype=bool, numpy=False>
406 After:
408 >>> initializer = tf.keras.initializers.TruncatedNormal(seed=10)
409 >>> a = initializer(shape=(2, 2))
410 >>> b = initializer(shape=(2, 2))
411 >>> tf.reduce_sum(a - b) == 0
412 <tf.Tensor: shape=(), dtype=bool, numpy=True>
414 @end_compatibility
415 """
417 def __init__(self, mean=0.0, stddev=0.05, seed=None, dtype=tf.float32):
418 """Initializer that generates a truncated normal distribution.
421 Args:
422 mean: a python scalar or a scalar tensor. Mean of the random values to
423 generate.
424 stddev: a python scalar or a scalar tensor. Standard deviation of the
425 random values to generate.
426 seed: A Python integer. Used to create random seeds. See
427 `tf.compat.v1.set_random_seed` for behavior.
428 dtype: Default data type, used if no `dtype` argument is provided when
429 calling the initializer. Only floating point types are supported.
430 """
431 super().__init__(mean=mean, stddev=stddev, seed=seed, dtype=dtype)
434@keras_export(v1=["keras.initializers.lecun_normal"])
435class LecunNormal(tf.compat.v1.variance_scaling_initializer):
436 def __init__(self, seed=None):
437 super().__init__(
438 scale=1.0, mode="fan_in", distribution="truncated_normal", seed=seed
439 )
441 def get_config(self):
442 return {"seed": self.seed}
445@keras_export(v1=["keras.initializers.lecun_uniform"])
446class LecunUniform(tf.compat.v1.variance_scaling_initializer):
447 def __init__(self, seed=None):
448 super().__init__(
449 scale=1.0, mode="fan_in", distribution="uniform", seed=seed
450 )
452 def get_config(self):
453 return {"seed": self.seed}
456@keras_export(v1=["keras.initializers.he_normal"])
457class HeNormal(tf.compat.v1.variance_scaling_initializer):
458 def __init__(self, seed=None):
459 super().__init__(
460 scale=2.0, mode="fan_in", distribution="truncated_normal", seed=seed
461 )
463 def get_config(self):
464 return {"seed": self.seed}
467@keras_export(v1=["keras.initializers.he_uniform"])
468class HeUniform(tf.compat.v1.variance_scaling_initializer):
469 def __init__(self, seed=None):
470 super().__init__(
471 scale=2.0, mode="fan_in", distribution="uniform", seed=seed
472 )
474 def get_config(self):
475 return {"seed": self.seed}