Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/applications/regnet.py: 35%
282 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
17"""RegNet models for Keras.
19References:
21- [Designing Network Design Spaces](https://arxiv.org/abs/2003.13678)
22 (CVPR 2020)
23- [Fast and Accurate Model Scaling](https://arxiv.org/abs/2103.06877)
24 (CVPR 2021)
25"""
27import tensorflow.compat.v2 as tf
29from keras.src import backend
30from keras.src import layers
31from keras.src.applications import imagenet_utils
32from keras.src.engine import training
33from keras.src.utils import data_utils
34from keras.src.utils import layer_utils
36# isort: off
37from tensorflow.python.util.tf_export import keras_export
39BASE_WEIGHTS_PATH = (
40 "https://storage.googleapis.com/tensorflow/keras-applications/regnet/"
41)
43WEIGHTS_HASHES = {
44 "x002": (
45 "49fb46e56cde07fdaf57bffd851461a86548f6a3a4baef234dd37290b826c0b8",
46 "5445b66cd50445eb7ecab094c1e78d4d3d29375439d1a7798861c4af15ffff21",
47 ),
48 "x004": (
49 "3523c7f5ac0dbbcc2fd6d83b3570e7540f7449d3301cc22c29547302114e4088",
50 "de139bf07a66c9256f2277bf5c1b6dd2d5a3a891a5f8a925a10c8a0a113fd6f3",
51 ),
52 "x006": (
53 "340216ef334a7bae30daac9f414e693c136fac9ab868704bbfcc9ce6a5ec74bb",
54 "a43ec97ad62f86b2a96a783bfdc63a5a54de02eef54f26379ea05e1bf90a9505",
55 ),
56 "x008": (
57 "8f145d6a5fae6da62677bb8d26eb92d0b9dfe143ec1ebf68b24a57ae50a2763d",
58 "3c7e4b0917359304dc18e644475c5c1f5e88d795542b676439c4a3acd63b7207",
59 ),
60 "x016": (
61 "31c386f4c7bfef4c021a583099aa79c1b3928057ba1b7d182f174674c5ef3510",
62 "1b8e3d545d190271204a7b2165936a227d26b79bb7922bac5ee4d303091bf17a",
63 ),
64 "x032": (
65 "6c025df1409e5ea846375bc9dfa240956cca87ef57384d93fef7d6fa90ca8c7f",
66 "9cd4522806c0fcca01b37874188b2bd394d7c419956d77472a4e072b01d99041",
67 ),
68 "x040": (
69 "ba128046c588a26dbd3b3a011b26cb7fa3cf8f269c184c132372cb20b6eb54c1",
70 "b4ed0ca0b9a98e789e05000e830403a7ade4d8afa01c73491c44610195198afe",
71 ),
72 "x064": (
73 "0f4489c3cd3ad979bd6b0324213998bcb36dc861d178f977997ebfe53c3ba564",
74 "3e706fa416a18dfda14c713423eba8041ae2509db3e0a611d5f599b5268a46c4",
75 ),
76 "x080": (
77 "76320e43272719df648db37271a247c22eb6e810fe469c37a5db7e2cb696d162",
78 "7b1ce8e29ceefec10a6569640ee329dba7fbc98b5d0f6346aabade058b66cf29",
79 ),
80 "x120": (
81 "5cafc461b78897d5e4f24e68cb406d18e75f31105ef620e7682b611bb355eb3a",
82 "36174ddd0299db04a42631d028abcb1cc7afec2b705e42bd28fcd325e5d596bf",
83 ),
84 "x160": (
85 "8093f57a5824b181fb734ea21ae34b1f7ee42c5298e63cf6d587c290973195d2",
86 "9d1485050bdf19531ffa1ed7827c75850e0f2972118a996b91aa9264b088fd43",
87 ),
88 "x320": (
89 "91fb3e6f4e9e44b3687e80977f7f4412ee9937c0c704232664fc83e4322ea01e",
90 "9db7eacc37b85c98184070e1a172e6104c00846f44bcd4e727da9e50d9692398",
91 ),
92 "y002": (
93 "1e8091c674532b1a61c04f6393a9c570113e0197f22bd1b98cc4c4fe800c6465",
94 "f63221f63d625b8e201221499682587bfe29d33f50a4c4f4d53be00f66c0f12c",
95 ),
96 "y004": (
97 "752fdbad21c78911bf1dcb8c513e5a0e14697b068e5d9e73525dbaa416d18d8e",
98 "45e6ba8309a17a77e67afc05228454b2e0ee6be0dae65edc0f31f1da10cc066b",
99 ),
100 "y006": (
101 "98942e07b273da500ff9699a1f88aca78dfad4375faabb0bab784bb0dace80a9",
102 "b70261cba4e60013c99d130cc098d2fce629ff978a445663b6fa4f8fc099a2be",
103 ),
104 "y008": (
105 "1b099377cc9a4fb183159a6f9b24bc998e5659d25a449f40c90cbffcbcfdcae4",
106 "b11f5432a216ee640fe9be6e32939defa8d08b8d136349bf3690715a98752ca1",
107 ),
108 "y016": (
109 "b7ce1f5e223f0941c960602de922bcf846288ce7a4c33b2a4f2e4ac4b480045b",
110 "d7404f50205e82d793e219afb9eb2bfeb781b6b2d316a6128c6d7d7dacab7f57",
111 ),
112 "y032": (
113 "6a6a545cf3549973554c9b94f0cd40e25f229fffb1e7f7ac779a59dcbee612bd",
114 "eb3ac1c45ec60f4f031c3f5180573422b1cf7bebc26c004637517372f68f8937",
115 ),
116 "y040": (
117 "98d00118b335162bbffe8f1329e54e5c8e75ee09b2a5414f97b0ddfc56e796f6",
118 "b5be2a5e5f072ecdd9c0b8a437cd896df0efa1f6a1f77e41caa8719b7dfcb05d",
119 ),
120 "y064": (
121 "65c948c7a18aaecaad2d1bd4fd978987425604ba6669ef55a1faa0069a2804b7",
122 "885c4b7ed7ea339daca7dafa1a62cb7d41b1068897ef90a5a3d71b4a2e2db31a",
123 ),
124 "y080": (
125 "7a2c62da2982e369a4984d3c7c3b32d6f8d3748a71cb37a31156c436c37f3e95",
126 "3d119577e1e3bf8d153b895e8ea9e4ec150ff2d92abdca711b6e949c3fd7115d",
127 ),
128 "y120": (
129 "a96ab0d27d3ae35a422ee7df0d789069b3e3217a99334e0ce861a96595bc5986",
130 "4a6fa387108380b730b71feea2ad80b5224b5ea9dc21dc156c93fe3c6186485c",
131 ),
132 "y160": (
133 "45067240ffbc7ca2591313fee2f80dbdda6d66ec1a7451446f9a6d00d8f7ac6e",
134 "ead1e6b568be8f34447ec8941299a9df4368736ba9a8205de5427fa20a1fb316",
135 ),
136 "y320": (
137 "b05e173e4ae635cfa22d06392ee3741284d17dadfee68f2aa6fd8cb2b7561112",
138 "cad78f74a586e24c61d38be17f3ae53bb9674380174d2585da1a526b8c20e1fd",
139 ),
140}
142# The widths and depths are deduced from a quantized linear function. For
143# more information, please refer to "Designing Network Design Spaces" by
144# Radosavovic et al.
146# BatchNorm momentum and epsilon values taken from original implementation.
148MODEL_CONFIGS = {
149 "x002": {
150 "depths": [1, 1, 4, 7],
151 "widths": [24, 56, 152, 368],
152 "group_width": 8,
153 "default_size": 224,
154 "block_type": "X",
155 },
156 "x004": {
157 "depths": [1, 2, 7, 12],
158 "widths": [32, 64, 160, 384],
159 "group_width": 16,
160 "default_size": 224,
161 "block_type": "X",
162 },
163 "x006": {
164 "depths": [1, 3, 5, 7],
165 "widths": [48, 96, 240, 528],
166 "group_width": 24,
167 "default_size": 224,
168 "block_type": "X",
169 },
170 "x008": {
171 "depths": [1, 3, 7, 5],
172 "widths": [64, 128, 288, 672],
173 "group_width": 16,
174 "default_size": 224,
175 "block_type": "X",
176 },
177 "x016": {
178 "depths": [2, 4, 10, 2],
179 "widths": [72, 168, 408, 912],
180 "group_width": 24,
181 "default_size": 224,
182 "block_type": "X",
183 },
184 "x032": {
185 "depths": [2, 6, 15, 2],
186 "widths": [96, 192, 432, 1008],
187 "group_width": 48,
188 "default_size": 224,
189 "block_type": "X",
190 },
191 "x040": {
192 "depths": [2, 5, 14, 2],
193 "widths": [80, 240, 560, 1360],
194 "group_width": 40,
195 "default_size": 224,
196 "block_type": "X",
197 },
198 "x064": {
199 "depths": [2, 4, 10, 1],
200 "widths": [168, 392, 784, 1624],
201 "group_width": 56,
202 "default_size": 224,
203 "block_type": "X",
204 },
205 "x080": {
206 "depths": [2, 5, 15, 1],
207 "widths": [80, 240, 720, 1920],
208 "group_width": 120,
209 "default_size": 224,
210 "block_type": "X",
211 },
212 "x120": {
213 "depths": [2, 5, 11, 1],
214 "widths": [224, 448, 896, 2240],
215 "group_width": 112,
216 "default_size": 224,
217 "block_type": "X",
218 },
219 "x160": {
220 "depths": [2, 6, 13, 1],
221 "widths": [256, 512, 896, 2048],
222 "group_width": 128,
223 "default_size": 224,
224 "block_type": "X",
225 },
226 "x320": {
227 "depths": [2, 7, 13, 1],
228 "widths": [336, 672, 1344, 2520],
229 "group_width": 168,
230 "default_size": 224,
231 "block_type": "X",
232 },
233 "y002": {
234 "depths": [1, 1, 4, 7],
235 "widths": [24, 56, 152, 368],
236 "group_width": 8,
237 "default_size": 224,
238 "block_type": "Y",
239 },
240 "y004": {
241 "depths": [1, 3, 6, 6],
242 "widths": [48, 104, 208, 440],
243 "group_width": 8,
244 "default_size": 224,
245 "block_type": "Y",
246 },
247 "y006": {
248 "depths": [1, 3, 7, 4],
249 "widths": [48, 112, 256, 608],
250 "group_width": 16,
251 "default_size": 224,
252 "block_type": "Y",
253 },
254 "y008": {
255 "depths": [1, 3, 8, 2],
256 "widths": [64, 128, 320, 768],
257 "group_width": 16,
258 "default_size": 224,
259 "block_type": "Y",
260 },
261 "y016": {
262 "depths": [2, 6, 17, 2],
263 "widths": [48, 120, 336, 888],
264 "group_width": 24,
265 "default_size": 224,
266 "block_type": "Y",
267 },
268 "y032": {
269 "depths": [2, 5, 13, 1],
270 "widths": [72, 216, 576, 1512],
271 "group_width": 24,
272 "default_size": 224,
273 "block_type": "Y",
274 },
275 "y040": {
276 "depths": [2, 6, 12, 2],
277 "widths": [128, 192, 512, 1088],
278 "group_width": 64,
279 "default_size": 224,
280 "block_type": "Y",
281 },
282 "y064": {
283 "depths": [2, 7, 14, 2],
284 "widths": [144, 288, 576, 1296],
285 "group_width": 72,
286 "default_size": 224,
287 "block_type": "Y",
288 },
289 "y080": {
290 "depths": [2, 4, 10, 1],
291 "widths": [168, 448, 896, 2016],
292 "group_width": 56,
293 "default_size": 224,
294 "block_type": "Y",
295 },
296 "y120": {
297 "depths": [2, 5, 11, 1],
298 "widths": [224, 448, 896, 2240],
299 "group_width": 112,
300 "default_size": 224,
301 "block_type": "Y",
302 },
303 "y160": {
304 "depths": [2, 4, 11, 1],
305 "widths": [224, 448, 1232, 3024],
306 "group_width": 112,
307 "default_size": 224,
308 "block_type": "Y",
309 },
310 "y320": {
311 "depths": [2, 5, 12, 1],
312 "widths": [232, 696, 1392, 3712],
313 "group_width": 232,
314 "default_size": 224,
315 "block_type": "Y",
316 },
317}
319BASE_DOCSTRING = """Instantiates the {name} architecture.
321 Reference:
322 - [Designing Network Design Spaces](https://arxiv.org/abs/2003.13678)
323 (CVPR 2020)
325 For image classification use cases, see
326 [this page for detailed examples](
327 https://keras.io/api/applications/#usage-examples-for-image-classification-models).
329 For transfer learning use cases, make sure to read the
330 [guide to transfer learning & fine-tuning](
331 https://keras.io/guides/transfer_learning/).
333 Note: Each Keras Application expects a specific kind of input preprocessing.
334 For Regnets, preprocessing is included in the model using a `Rescaling` layer.
335 RegNet models expect their inputs to be float or uint8 tensors of pixels with
336 values in the [0-255] range.
338 The naming of models is as follows: `RegNet<block_type><flops>` where
339 `block_type` is one of `(X, Y)` and `flops` signifies hundred million
340 floating point operations. For example RegNetY064 corresponds to RegNet with
341 Y block and 6.4 giga flops (64 hundred million flops).
343 Args:
344 include_top: Whether to include the fully-connected
345 layer at the top of the network. Defaults to True.
346 weights: One of `None` (random initialization),
347 `"imagenet"` (pre-training on ImageNet), or the path to the weights
348 file to be loaded. Defaults to `"imagenet"`.
349 input_tensor: Optional Keras tensor
350 (i.e. output of `layers.Input()`)
351 to use as image input for the model.
352 input_shape: Optional shape tuple, only to be specified
353 if `include_top` is False.
354 It should have exactly 3 inputs channels.
355 pooling: Optional pooling mode for feature extraction
356 when `include_top` is `False`. Defaults to None.
357 - `None` means that the output of the model will be
358 the 4D tensor output of the
359 last convolutional layer.
360 - `avg` means that global average pooling
361 will be applied to the output of the
362 last convolutional layer, and thus
363 the output of the model will be a 2D tensor.
364 - `max` means that global max pooling will
365 be applied.
366 classes: Optional number of classes to classify images
367 into, only to be specified if `include_top` is True, and
368 if no `weights` argument is specified. Defaults to 1000 (number of
369 ImageNet classes).
370 classifier_activation: A `str` or callable. The activation function to use
371 on the "top" layer. Ignored unless `include_top=True`. Set
372 `classifier_activation=None` to return the logits of the "top" layer.
373 Defaults to `"softmax"`.
374 When loading pretrained weights, `classifier_activation` can only
375 be `None` or `"softmax"`.
377 Returns:
378 A `keras.Model` instance.
379"""
382def PreStem(name=None):
383 """Rescales and normalizes inputs to [0,1] and ImageNet mean and std.
385 Args:
386 name: name prefix
388 Returns:
389 Rescaled and normalized tensor
390 """
391 if name is None:
392 name = "prestem" + str(backend.get_uid("prestem"))
394 def apply(x):
395 x = layers.Rescaling(
396 scale=1.0 / 255.0, name=name + "_prestem_rescaling"
397 )(x)
398 return x
400 return apply
403def Stem(name=None):
404 """Implementation of RegNet stem.
406 (Common to all model variants)
407 Args:
408 name: name prefix
410 Returns:
411 Output tensor of the Stem
412 """
413 if name is None:
414 name = "stem" + str(backend.get_uid("stem"))
416 def apply(x):
417 x = layers.Conv2D(
418 32,
419 (3, 3),
420 strides=2,
421 use_bias=False,
422 padding="same",
423 kernel_initializer="he_normal",
424 name=name + "_stem_conv",
425 )(x)
426 x = layers.BatchNormalization(
427 momentum=0.9, epsilon=1e-5, name=name + "_stem_bn"
428 )(x)
429 x = layers.ReLU(name=name + "_stem_relu")(x)
430 return x
432 return apply
435def SqueezeAndExciteBlock(filters_in, se_filters, name=None):
436 """Implements the Squeeze & Excite block (https://arxiv.org/abs/1709.01507).
438 Args:
439 filters_in: input filters to the block
440 se_filters: filters to squeeze to
441 name: name prefix
443 Returns:
444 A function object
445 """
446 if name is None:
447 name = str(backend.get_uid("squeeze_and_excite"))
449 def apply(inputs):
450 x = layers.GlobalAveragePooling2D(
451 name=name + "_squeeze_and_excite_gap", keepdims=True
452 )(inputs)
453 x = layers.Conv2D(
454 se_filters,
455 (1, 1),
456 activation="relu",
457 kernel_initializer="he_normal",
458 name=name + "_squeeze_and_excite_squeeze",
459 )(x)
460 x = layers.Conv2D(
461 filters_in,
462 (1, 1),
463 activation="sigmoid",
464 kernel_initializer="he_normal",
465 name=name + "_squeeze_and_excite_excite",
466 )(x)
467 x = tf.math.multiply(x, inputs)
468 return x
470 return apply
473def XBlock(filters_in, filters_out, group_width, stride=1, name=None):
474 """Implementation of X Block.
476 Reference: [Designing Network Design
477 Spaces](https://arxiv.org/abs/2003.13678)
478 Args:
479 filters_in: filters in the input tensor
480 filters_out: filters in the output tensor
481 group_width: group width
482 stride: stride
483 name: name prefix
484 Returns:
485 Output tensor of the block
486 """
487 if name is None:
488 name = str(backend.get_uid("xblock"))
490 def apply(inputs):
491 if filters_in != filters_out and stride == 1:
492 raise ValueError(
493 f"Input filters({filters_in}) and output "
494 f"filters({filters_out}) "
495 f"are not equal for stride {stride}. Input and output filters "
496 f"must be equal for stride={stride}."
497 )
499 # Declare layers
500 groups = filters_out // group_width
502 if stride != 1:
503 skip = layers.Conv2D(
504 filters_out,
505 (1, 1),
506 strides=stride,
507 use_bias=False,
508 kernel_initializer="he_normal",
509 name=name + "_skip_1x1",
510 )(inputs)
511 skip = layers.BatchNormalization(
512 momentum=0.9, epsilon=1e-5, name=name + "_skip_bn"
513 )(skip)
514 else:
515 skip = inputs
517 # Build block
518 # conv_1x1_1
519 x = layers.Conv2D(
520 filters_out,
521 (1, 1),
522 use_bias=False,
523 kernel_initializer="he_normal",
524 name=name + "_conv_1x1_1",
525 )(inputs)
526 x = layers.BatchNormalization(
527 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_1_bn"
528 )(x)
529 x = layers.ReLU(name=name + "_conv_1x1_1_relu")(x)
531 # conv_3x3
532 x = layers.Conv2D(
533 filters_out,
534 (3, 3),
535 use_bias=False,
536 strides=stride,
537 groups=groups,
538 padding="same",
539 kernel_initializer="he_normal",
540 name=name + "_conv_3x3",
541 )(x)
542 x = layers.BatchNormalization(
543 momentum=0.9, epsilon=1e-5, name=name + "_conv_3x3_bn"
544 )(x)
545 x = layers.ReLU(name=name + "_conv_3x3_relu")(x)
547 # conv_1x1_2
548 x = layers.Conv2D(
549 filters_out,
550 (1, 1),
551 use_bias=False,
552 kernel_initializer="he_normal",
553 name=name + "_conv_1x1_2",
554 )(x)
555 x = layers.BatchNormalization(
556 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_2_bn"
557 )(x)
559 x = layers.ReLU(name=name + "_exit_relu")(x + skip)
561 return x
563 return apply
566def YBlock(
567 filters_in,
568 filters_out,
569 group_width,
570 stride=1,
571 squeeze_excite_ratio=0.25,
572 name=None,
573):
574 """Implementation of Y Block.
576 Reference: [Designing Network Design
577 Spaces](https://arxiv.org/abs/2003.13678)
578 Args:
579 filters_in: filters in the input tensor
580 filters_out: filters in the output tensor
581 group_width: group width
582 stride: stride
583 squeeze_excite_ratio: expansion ration for Squeeze and Excite block
584 name: name prefix
585 Returns:
586 Output tensor of the block
587 """
588 if name is None:
589 name = str(backend.get_uid("yblock"))
591 def apply(inputs):
592 if filters_in != filters_out and stride == 1:
593 raise ValueError(
594 f"Input filters({filters_in}) and output "
595 f"filters({filters_out}) "
596 f"are not equal for stride {stride}. Input and output filters "
597 f"must be equal for stride={stride}."
598 )
600 groups = filters_out // group_width
601 se_filters = int(filters_in * squeeze_excite_ratio)
603 if stride != 1:
604 skip = layers.Conv2D(
605 filters_out,
606 (1, 1),
607 strides=stride,
608 use_bias=False,
609 kernel_initializer="he_normal",
610 name=name + "_skip_1x1",
611 )(inputs)
612 skip = layers.BatchNormalization(
613 momentum=0.9, epsilon=1e-5, name=name + "_skip_bn"
614 )(skip)
615 else:
616 skip = inputs
618 # Build block
619 # conv_1x1_1
620 x = layers.Conv2D(
621 filters_out,
622 (1, 1),
623 use_bias=False,
624 kernel_initializer="he_normal",
625 name=name + "_conv_1x1_1",
626 )(inputs)
627 x = layers.BatchNormalization(
628 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_1_bn"
629 )(x)
630 x = layers.ReLU(name=name + "_conv_1x1_1_relu")(x)
632 # conv_3x3
633 x = layers.Conv2D(
634 filters_out,
635 (3, 3),
636 use_bias=False,
637 strides=stride,
638 groups=groups,
639 padding="same",
640 kernel_initializer="he_normal",
641 name=name + "_conv_3x3",
642 )(x)
643 x = layers.BatchNormalization(
644 momentum=0.9, epsilon=1e-5, name=name + "_conv_3x3_bn"
645 )(x)
646 x = layers.ReLU(name=name + "_conv_3x3_relu")(x)
648 # Squeeze-Excitation block
649 x = SqueezeAndExciteBlock(filters_out, se_filters, name=name)(x)
651 # conv_1x1_2
652 x = layers.Conv2D(
653 filters_out,
654 (1, 1),
655 use_bias=False,
656 kernel_initializer="he_normal",
657 name=name + "_conv_1x1_2",
658 )(x)
659 x = layers.BatchNormalization(
660 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_2_bn"
661 )(x)
663 x = layers.ReLU(name=name + "_exit_relu")(x + skip)
665 return x
667 return apply
670def ZBlock(
671 filters_in,
672 filters_out,
673 group_width,
674 stride=1,
675 squeeze_excite_ratio=0.25,
676 bottleneck_ratio=0.25,
677 name=None,
678):
679 """Implementation of Z block Reference: [Fast and Accurate Model
680 Scaling](https://arxiv.org/abs/2103.06877).
682 Args:
683 filters_in: filters in the input tensor
684 filters_out: filters in the output tensor
685 group_width: group width
686 stride: stride
687 squeeze_excite_ratio: expansion ration for Squeeze and Excite block
688 bottleneck_ratio: inverted bottleneck ratio
689 name: name prefix
690 Returns:
691 Output tensor of the block
692 """
693 if name is None:
694 name = str(backend.get_uid("zblock"))
696 def apply(inputs):
697 if filters_in != filters_out and stride == 1:
698 raise ValueError(
699 f"Input filters({filters_in}) and output filters({filters_out})"
700 f"are not equal for stride {stride}. Input and output filters "
701 f"must be equal for stride={stride}."
702 )
704 groups = filters_out // group_width
705 se_filters = int(filters_in * squeeze_excite_ratio)
707 inv_btlneck_filters = int(filters_out / bottleneck_ratio)
709 # Build block
710 # conv_1x1_1
711 x = layers.Conv2D(
712 inv_btlneck_filters,
713 (1, 1),
714 use_bias=False,
715 kernel_initializer="he_normal",
716 name=name + "_conv_1x1_1",
717 )(inputs)
718 x = layers.BatchNormalization(
719 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_1_bn"
720 )(x)
721 x = tf.nn.silu(x)
723 # conv_3x3
724 x = layers.Conv2D(
725 inv_btlneck_filters,
726 (3, 3),
727 use_bias=False,
728 strides=stride,
729 groups=groups,
730 padding="same",
731 kernel_initializer="he_normal",
732 name=name + "_conv_3x3",
733 )(x)
734 x = layers.BatchNormalization(
735 momentum=0.9, epsilon=1e-5, name=name + "_conv_3x3_bn"
736 )(x)
737 x = tf.nn.silu(x)
739 # Squeeze-Excitation block
740 x = SqueezeAndExciteBlock(inv_btlneck_filters, se_filters, name=name)
742 # conv_1x1_2
743 x = layers.Conv2D(
744 filters_out,
745 (1, 1),
746 use_bias=False,
747 kernel_initializer="he_normal",
748 name=name + "_conv_1x1_2",
749 )(x)
750 x = layers.BatchNormalization(
751 momentum=0.9, epsilon=1e-5, name=name + "_conv_1x1_2_bn"
752 )(x)
754 if stride != 1:
755 return x
756 else:
757 return x + inputs
759 return apply
762def Stage(block_type, depth, group_width, filters_in, filters_out, name=None):
763 """Implementation of Stage in RegNet.
765 Args:
766 block_type: must be one of "X", "Y", "Z"
767 depth: depth of stage, number of blocks to use
768 group_width: group width of all blocks in this stage
769 filters_in: input filters to this stage
770 filters_out: output filters from this stage
771 name: name prefix
773 Returns:
774 Output tensor of Stage
775 """
776 if name is None:
777 name = str(backend.get_uid("stage"))
779 def apply(inputs):
780 x = inputs
781 if block_type == "X":
782 x = XBlock(
783 filters_in,
784 filters_out,
785 group_width,
786 stride=2,
787 name=f"{name}_XBlock_0",
788 )(x)
789 for i in range(1, depth):
790 x = XBlock(
791 filters_out,
792 filters_out,
793 group_width,
794 name=f"{name}_XBlock_{i}",
795 )(x)
796 elif block_type == "Y":
797 x = YBlock(
798 filters_in,
799 filters_out,
800 group_width,
801 stride=2,
802 name=name + "_YBlock_0",
803 )(x)
804 for i in range(1, depth):
805 x = YBlock(
806 filters_out,
807 filters_out,
808 group_width,
809 name=f"{name}_YBlock_{i}",
810 )(x)
811 elif block_type == "Z":
812 x = ZBlock(
813 filters_in,
814 filters_out,
815 group_width,
816 stride=2,
817 name=f"{name}_ZBlock_0",
818 )(x)
819 for i in range(1, depth):
820 x = ZBlock(
821 filters_out,
822 filters_out,
823 group_width,
824 name=f"{name}_ZBlock_{i}",
825 )(x)
826 else:
827 raise NotImplementedError(
828 f"Block type `{block_type}` not recognized."
829 "block_type must be one of (`X`, `Y`, `Z`). "
830 )
831 return x
833 return apply
836def Head(num_classes=1000, name=None):
837 """Implementation of classification head of RegNet.
839 Args:
840 num_classes: number of classes for Dense layer
841 name: name prefix
843 Returns:
844 Classification head function.
845 """
846 if name is None:
847 name = str(backend.get_uid("head"))
849 def apply(x):
850 x = layers.GlobalAveragePooling2D(name=name + "_head_gap")(x)
851 x = layers.Dense(num_classes, name=name + "head_dense")(x)
852 return x
854 return apply
857def RegNet(
858 depths,
859 widths,
860 group_width,
861 block_type,
862 default_size,
863 model_name="regnet",
864 include_preprocessing=True,
865 include_top=True,
866 weights="imagenet",
867 input_tensor=None,
868 input_shape=None,
869 pooling=None,
870 classes=1000,
871 classifier_activation="softmax",
872):
873 """Instantiates RegNet architecture given specific configuration.
875 Args:
876 depths: An iterable containing depths for each individual stages.
877 widths: An iterable containing output channel width of each individual
878 stages
879 group_width: Number of channels to be used in each group. See grouped
880 convolutions for more information.
881 block_type: Must be one of `{"X", "Y", "Z"}`. For more details see the
882 papers "Designing network design spaces" and "Fast and Accurate Model
883 Scaling"
884 default_size: Default input image size.
885 model_name: An optional name for the model.
886 include_preprocessing: boolean denoting whther to include preprocessing in
887 the model
888 include_top: Boolean denoting whether to include classification head to
889 the model.
890 weights: one of `None` (random initialization), "imagenet" (pre-training
891 on ImageNet), or the path to the weights file to be loaded.
892 input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to
893 use as image input for the model.
894 input_shape: optional shape tuple, only to be specified if `include_top`
895 is False. It should have exactly 3 inputs channels.
896 pooling: optional pooling mode for feature extraction when `include_top`
897 is `False`. - `None` means that the output of the model will be the 4D
898 tensor output of the last convolutional layer. - `avg` means that global
899 average pooling will be applied to the output of the last convolutional
900 layer, and thus the output of the model will be a 2D tensor. - `max`
901 means that global max pooling will be applied.
902 classes: optional number of classes to classify images into, only to be
903 specified if `include_top` is True, and if no `weights` argument is
904 specified.
905 classifier_activation: A `str` or callable. The activation function to use
906 on the "top" layer. Ignored unless `include_top=True`. Set
907 `classifier_activation=None` to return the logits of the "top" layer.
909 Returns:
910 A `keras.Model` instance.
912 Raises:
913 ValueError: in case of invalid argument for `weights`,
914 or invalid input shape.
915 ValueError: if `classifier_activation` is not `softmax` or `None` when
916 using a pretrained top layer.
917 ValueError: if `include_top` is True but `num_classes` is not 1000.
918 ValueError: if `block_type` is not one of `{"X", "Y", "Z"}`
920 """
921 if not (weights in {"imagenet", None} or tf.io.gfile.exists(weights)):
922 raise ValueError(
923 "The `weights` argument should be either "
924 "`None` (random initialization), `imagenet` "
925 "(pre-training on ImageNet), "
926 "or the path to the weights file to be loaded."
927 )
929 if weights == "imagenet" and include_top and classes != 1000:
930 raise ValueError(
931 "If using `weights` as `'imagenet'` with `include_top`"
932 " as true, `classes` should be 1000"
933 )
935 # Determine proper input shape
936 input_shape = imagenet_utils.obtain_input_shape(
937 input_shape,
938 default_size=default_size,
939 min_size=32,
940 data_format=backend.image_data_format(),
941 require_flatten=include_top,
942 weights=weights,
943 )
945 if input_tensor is None:
946 img_input = layers.Input(shape=input_shape)
947 else:
948 if not backend.is_keras_tensor(input_tensor):
949 img_input = layers.Input(tensor=input_tensor, shape=input_shape)
950 else:
951 img_input = input_tensor
953 if input_tensor is not None:
954 inputs = layer_utils.get_source_inputs(input_tensor)[0]
955 else:
956 inputs = img_input
958 x = inputs
959 if include_preprocessing:
960 x = PreStem(name=model_name)(x)
961 x = Stem(name=model_name)(x)
963 in_channels = 32 # Output from Stem
965 for num_stage in range(4):
966 depth = depths[num_stage]
967 out_channels = widths[num_stage]
969 x = Stage(
970 block_type,
971 depth,
972 group_width,
973 in_channels,
974 out_channels,
975 name=model_name + "_Stage_" + str(num_stage),
976 )(x)
977 in_channels = out_channels
979 if include_top:
980 x = Head(num_classes=classes)(x)
981 imagenet_utils.validate_activation(classifier_activation, weights)
983 else:
984 if pooling == "avg":
985 x = layers.GlobalAveragePooling2D()(x)
986 elif pooling == "max":
987 x = layers.GlobalMaxPooling2D()(x)
989 model = training.Model(inputs=inputs, outputs=x, name=model_name)
991 # Load weights.
992 if weights == "imagenet":
993 if include_top:
994 file_suffix = ".h5"
995 file_hash = WEIGHTS_HASHES[model_name[-4:]][0]
996 else:
997 file_suffix = "_notop.h5"
998 file_hash = WEIGHTS_HASHES[model_name[-4:]][1]
999 file_name = model_name + file_suffix
1000 weights_path = data_utils.get_file(
1001 file_name,
1002 BASE_WEIGHTS_PATH + file_name,
1003 cache_subdir="models",
1004 file_hash=file_hash,
1005 )
1006 model.load_weights(weights_path)
1007 elif weights is not None:
1008 model.load_weights(weights)
1010 return model
1013## Instantiating variants ##
1016@keras_export(
1017 "keras.applications.regnet.RegNetX002", "keras.applications.RegNetX002"
1018)
1019def RegNetX002(
1020 model_name="regnetx002",
1021 include_top=True,
1022 include_preprocessing=True,
1023 weights="imagenet",
1024 input_tensor=None,
1025 input_shape=None,
1026 pooling=None,
1027 classes=1000,
1028 classifier_activation="softmax",
1029):
1030 return RegNet(
1031 MODEL_CONFIGS["x002"]["depths"],
1032 MODEL_CONFIGS["x002"]["widths"],
1033 MODEL_CONFIGS["x002"]["group_width"],
1034 MODEL_CONFIGS["x002"]["block_type"],
1035 MODEL_CONFIGS["x002"]["default_size"],
1036 model_name=model_name,
1037 include_top=include_top,
1038 include_preprocessing=include_preprocessing,
1039 weights=weights,
1040 input_tensor=input_tensor,
1041 input_shape=input_shape,
1042 pooling=pooling,
1043 classes=classes,
1044 classifier_activation=classifier_activation,
1045 )
1048@keras_export(
1049 "keras.applications.regnet.RegNetX004", "keras.applications.RegNetX004"
1050)
1051def RegNetX004(
1052 model_name="regnetx004",
1053 include_top=True,
1054 include_preprocessing=True,
1055 weights="imagenet",
1056 input_tensor=None,
1057 input_shape=None,
1058 pooling=None,
1059 classes=1000,
1060 classifier_activation="softmax",
1061):
1062 return RegNet(
1063 MODEL_CONFIGS["x004"]["depths"],
1064 MODEL_CONFIGS["x004"]["widths"],
1065 MODEL_CONFIGS["x004"]["group_width"],
1066 MODEL_CONFIGS["x004"]["block_type"],
1067 MODEL_CONFIGS["x004"]["default_size"],
1068 model_name=model_name,
1069 include_top=include_top,
1070 include_preprocessing=include_preprocessing,
1071 weights=weights,
1072 input_tensor=input_tensor,
1073 input_shape=input_shape,
1074 pooling=pooling,
1075 classes=classes,
1076 classifier_activation=classifier_activation,
1077 )
1080@keras_export(
1081 "keras.applications.regnet.RegNetX006", "keras.applications.RegNetX006"
1082)
1083def RegNetX006(
1084 model_name="regnetx006",
1085 include_top=True,
1086 include_preprocessing=True,
1087 weights="imagenet",
1088 input_tensor=None,
1089 input_shape=None,
1090 pooling=None,
1091 classes=1000,
1092 classifier_activation="softmax",
1093):
1094 return RegNet(
1095 MODEL_CONFIGS["x006"]["depths"],
1096 MODEL_CONFIGS["x006"]["widths"],
1097 MODEL_CONFIGS["x006"]["group_width"],
1098 MODEL_CONFIGS["x006"]["block_type"],
1099 MODEL_CONFIGS["x006"]["default_size"],
1100 model_name=model_name,
1101 include_top=include_top,
1102 include_preprocessing=include_preprocessing,
1103 weights=weights,
1104 input_tensor=input_tensor,
1105 input_shape=input_shape,
1106 pooling=pooling,
1107 classes=classes,
1108 classifier_activation=classifier_activation,
1109 )
1112@keras_export(
1113 "keras.applications.regnet.RegNetX008", "keras.applications.RegNetX008"
1114)
1115def RegNetX008(
1116 model_name="regnetx008",
1117 include_top=True,
1118 include_preprocessing=True,
1119 weights="imagenet",
1120 input_tensor=None,
1121 input_shape=None,
1122 pooling=None,
1123 classes=1000,
1124 classifier_activation="softmax",
1125):
1126 return RegNet(
1127 MODEL_CONFIGS["x008"]["depths"],
1128 MODEL_CONFIGS["x008"]["widths"],
1129 MODEL_CONFIGS["x008"]["group_width"],
1130 MODEL_CONFIGS["x008"]["block_type"],
1131 MODEL_CONFIGS["x008"]["default_size"],
1132 model_name=model_name,
1133 include_top=include_top,
1134 include_preprocessing=include_preprocessing,
1135 weights=weights,
1136 input_tensor=input_tensor,
1137 input_shape=input_shape,
1138 pooling=pooling,
1139 classes=classes,
1140 classifier_activation=classifier_activation,
1141 )
1144@keras_export(
1145 "keras.applications.regnet.RegNetX016", "keras.applications.RegNetX016"
1146)
1147def RegNetX016(
1148 model_name="regnetx016",
1149 include_top=True,
1150 include_preprocessing=True,
1151 weights="imagenet",
1152 input_tensor=None,
1153 input_shape=None,
1154 pooling=None,
1155 classes=1000,
1156 classifier_activation="softmax",
1157):
1158 return RegNet(
1159 MODEL_CONFIGS["x016"]["depths"],
1160 MODEL_CONFIGS["x016"]["widths"],
1161 MODEL_CONFIGS["x016"]["group_width"],
1162 MODEL_CONFIGS["x016"]["block_type"],
1163 MODEL_CONFIGS["x016"]["default_size"],
1164 model_name=model_name,
1165 include_top=include_top,
1166 include_preprocessing=include_preprocessing,
1167 weights=weights,
1168 input_tensor=input_tensor,
1169 input_shape=input_shape,
1170 pooling=pooling,
1171 classes=classes,
1172 classifier_activation=classifier_activation,
1173 )
1176@keras_export(
1177 "keras.applications.regnet.RegNetX032", "keras.applications.RegNetX032"
1178)
1179def RegNetX032(
1180 model_name="regnetx032",
1181 include_top=True,
1182 include_preprocessing=True,
1183 weights="imagenet",
1184 input_tensor=None,
1185 input_shape=None,
1186 pooling=None,
1187 classes=1000,
1188 classifier_activation="softmax",
1189):
1190 return RegNet(
1191 MODEL_CONFIGS["x032"]["depths"],
1192 MODEL_CONFIGS["x032"]["widths"],
1193 MODEL_CONFIGS["x032"]["group_width"],
1194 MODEL_CONFIGS["x032"]["block_type"],
1195 MODEL_CONFIGS["x032"]["default_size"],
1196 model_name=model_name,
1197 include_top=include_top,
1198 include_preprocessing=include_preprocessing,
1199 weights=weights,
1200 input_tensor=input_tensor,
1201 input_shape=input_shape,
1202 pooling=pooling,
1203 classes=classes,
1204 classifier_activation=classifier_activation,
1205 )
1208@keras_export(
1209 "keras.applications.regnet.RegNetX040", "keras.applications.RegNetX040"
1210)
1211def RegNetX040(
1212 model_name="regnetx040",
1213 include_top=True,
1214 include_preprocessing=True,
1215 weights="imagenet",
1216 input_tensor=None,
1217 input_shape=None,
1218 pooling=None,
1219 classes=1000,
1220 classifier_activation="softmax",
1221):
1222 return RegNet(
1223 MODEL_CONFIGS["x040"]["depths"],
1224 MODEL_CONFIGS["x040"]["widths"],
1225 MODEL_CONFIGS["x040"]["group_width"],
1226 MODEL_CONFIGS["x040"]["block_type"],
1227 MODEL_CONFIGS["x040"]["default_size"],
1228 model_name=model_name,
1229 include_top=include_top,
1230 include_preprocessing=include_preprocessing,
1231 weights=weights,
1232 input_tensor=input_tensor,
1233 input_shape=input_shape,
1234 pooling=pooling,
1235 classes=classes,
1236 classifier_activation=classifier_activation,
1237 )
1240@keras_export(
1241 "keras.applications.regnet.RegNetX064", "keras.applications.RegNetX064"
1242)
1243def RegNetX064(
1244 model_name="regnetx064",
1245 include_top=True,
1246 include_preprocessing=True,
1247 weights="imagenet",
1248 input_tensor=None,
1249 input_shape=None,
1250 pooling=None,
1251 classes=1000,
1252 classifier_activation="softmax",
1253):
1254 return RegNet(
1255 MODEL_CONFIGS["x064"]["depths"],
1256 MODEL_CONFIGS["x064"]["widths"],
1257 MODEL_CONFIGS["x064"]["group_width"],
1258 MODEL_CONFIGS["x064"]["block_type"],
1259 MODEL_CONFIGS["x064"]["default_size"],
1260 model_name=model_name,
1261 include_top=include_top,
1262 include_preprocessing=include_preprocessing,
1263 weights=weights,
1264 input_tensor=input_tensor,
1265 input_shape=input_shape,
1266 pooling=pooling,
1267 classes=classes,
1268 classifier_activation=classifier_activation,
1269 )
1272@keras_export(
1273 "keras.applications.regnet.RegNetX080", "keras.applications.RegNetX080"
1274)
1275def RegNetX080(
1276 model_name="regnetx080",
1277 include_top=True,
1278 include_preprocessing=True,
1279 weights="imagenet",
1280 input_tensor=None,
1281 input_shape=None,
1282 pooling=None,
1283 classes=1000,
1284 classifier_activation="softmax",
1285):
1286 return RegNet(
1287 MODEL_CONFIGS["x080"]["depths"],
1288 MODEL_CONFIGS["x080"]["widths"],
1289 MODEL_CONFIGS["x080"]["group_width"],
1290 MODEL_CONFIGS["x080"]["block_type"],
1291 MODEL_CONFIGS["x080"]["default_size"],
1292 model_name=model_name,
1293 include_top=include_top,
1294 include_preprocessing=include_preprocessing,
1295 weights=weights,
1296 input_tensor=input_tensor,
1297 input_shape=input_shape,
1298 pooling=pooling,
1299 classes=classes,
1300 classifier_activation=classifier_activation,
1301 )
1304@keras_export(
1305 "keras.applications.regnet.RegNetX120", "keras.applications.RegNetX120"
1306)
1307def RegNetX120(
1308 model_name="regnetx120",
1309 include_top=True,
1310 include_preprocessing=True,
1311 weights="imagenet",
1312 input_tensor=None,
1313 input_shape=None,
1314 pooling=None,
1315 classes=1000,
1316 classifier_activation="softmax",
1317):
1318 return RegNet(
1319 MODEL_CONFIGS["x120"]["depths"],
1320 MODEL_CONFIGS["x120"]["widths"],
1321 MODEL_CONFIGS["x120"]["group_width"],
1322 MODEL_CONFIGS["x120"]["block_type"],
1323 MODEL_CONFIGS["x120"]["default_size"],
1324 model_name=model_name,
1325 include_top=include_top,
1326 include_preprocessing=include_preprocessing,
1327 weights=weights,
1328 input_tensor=input_tensor,
1329 input_shape=input_shape,
1330 pooling=pooling,
1331 classes=classes,
1332 classifier_activation=classifier_activation,
1333 )
1336@keras_export(
1337 "keras.applications.regnet.RegNetX160", "keras.applications.RegNetX160"
1338)
1339def RegNetX160(
1340 model_name="regnetx160",
1341 include_top=True,
1342 include_preprocessing=True,
1343 weights="imagenet",
1344 input_tensor=None,
1345 input_shape=None,
1346 pooling=None,
1347 classes=1000,
1348 classifier_activation="softmax",
1349):
1350 return RegNet(
1351 MODEL_CONFIGS["x160"]["depths"],
1352 MODEL_CONFIGS["x160"]["widths"],
1353 MODEL_CONFIGS["x160"]["group_width"],
1354 MODEL_CONFIGS["x160"]["block_type"],
1355 MODEL_CONFIGS["x160"]["default_size"],
1356 model_name=model_name,
1357 include_top=include_top,
1358 include_preprocessing=include_preprocessing,
1359 weights=weights,
1360 input_tensor=input_tensor,
1361 input_shape=input_shape,
1362 pooling=pooling,
1363 classes=classes,
1364 classifier_activation=classifier_activation,
1365 )
1368@keras_export(
1369 "keras.applications.regnet.RegNetX320", "keras.applications.RegNetX320"
1370)
1371def RegNetX320(
1372 model_name="regnetx320",
1373 include_top=True,
1374 include_preprocessing=True,
1375 weights="imagenet",
1376 input_tensor=None,
1377 input_shape=None,
1378 pooling=None,
1379 classes=1000,
1380 classifier_activation="softmax",
1381):
1382 return RegNet(
1383 MODEL_CONFIGS["x320"]["depths"],
1384 MODEL_CONFIGS["x320"]["widths"],
1385 MODEL_CONFIGS["x320"]["group_width"],
1386 MODEL_CONFIGS["x320"]["block_type"],
1387 MODEL_CONFIGS["x320"]["default_size"],
1388 model_name=model_name,
1389 include_top=include_top,
1390 include_preprocessing=include_preprocessing,
1391 weights=weights,
1392 input_tensor=input_tensor,
1393 input_shape=input_shape,
1394 pooling=pooling,
1395 classes=classes,
1396 classifier_activation=classifier_activation,
1397 )
1400@keras_export(
1401 "keras.applications.regnet.RegNetY002", "keras.applications.RegNetY002"
1402)
1403def RegNetY002(
1404 model_name="regnety002",
1405 include_top=True,
1406 include_preprocessing=True,
1407 weights="imagenet",
1408 input_tensor=None,
1409 input_shape=None,
1410 pooling=None,
1411 classes=1000,
1412 classifier_activation="softmax",
1413):
1414 return RegNet(
1415 MODEL_CONFIGS["y002"]["depths"],
1416 MODEL_CONFIGS["y002"]["widths"],
1417 MODEL_CONFIGS["y002"]["group_width"],
1418 MODEL_CONFIGS["y002"]["block_type"],
1419 MODEL_CONFIGS["y002"]["default_size"],
1420 model_name=model_name,
1421 include_top=include_top,
1422 include_preprocessing=include_preprocessing,
1423 weights=weights,
1424 input_tensor=input_tensor,
1425 input_shape=input_shape,
1426 pooling=pooling,
1427 classes=classes,
1428 classifier_activation=classifier_activation,
1429 )
1432@keras_export(
1433 "keras.applications.regnet.RegNetY004", "keras.applications.RegNetY004"
1434)
1435def RegNetY004(
1436 model_name="regnety004",
1437 include_top=True,
1438 include_preprocessing=True,
1439 weights="imagenet",
1440 input_tensor=None,
1441 input_shape=None,
1442 pooling=None,
1443 classes=1000,
1444 classifier_activation="softmax",
1445):
1446 return RegNet(
1447 MODEL_CONFIGS["y004"]["depths"],
1448 MODEL_CONFIGS["y004"]["widths"],
1449 MODEL_CONFIGS["y004"]["group_width"],
1450 MODEL_CONFIGS["y004"]["block_type"],
1451 MODEL_CONFIGS["y004"]["default_size"],
1452 model_name=model_name,
1453 include_top=include_top,
1454 include_preprocessing=include_preprocessing,
1455 weights=weights,
1456 input_tensor=input_tensor,
1457 input_shape=input_shape,
1458 pooling=pooling,
1459 classes=classes,
1460 classifier_activation=classifier_activation,
1461 )
1464@keras_export(
1465 "keras.applications.regnet.RegNetY006", "keras.applications.RegNetY006"
1466)
1467def RegNetY006(
1468 model_name="regnety006",
1469 include_top=True,
1470 include_preprocessing=True,
1471 weights="imagenet",
1472 input_tensor=None,
1473 input_shape=None,
1474 pooling=None,
1475 classes=1000,
1476 classifier_activation="softmax",
1477):
1478 return RegNet(
1479 MODEL_CONFIGS["y006"]["depths"],
1480 MODEL_CONFIGS["y006"]["widths"],
1481 MODEL_CONFIGS["y006"]["group_width"],
1482 MODEL_CONFIGS["y006"]["block_type"],
1483 MODEL_CONFIGS["y006"]["default_size"],
1484 model_name=model_name,
1485 include_top=include_top,
1486 include_preprocessing=include_preprocessing,
1487 weights=weights,
1488 input_tensor=input_tensor,
1489 input_shape=input_shape,
1490 pooling=pooling,
1491 classes=classes,
1492 classifier_activation=classifier_activation,
1493 )
1496@keras_export(
1497 "keras.applications.regnet.RegNetY008", "keras.applications.RegNetY008"
1498)
1499def RegNetY008(
1500 model_name="regnety008",
1501 include_top=True,
1502 include_preprocessing=True,
1503 weights="imagenet",
1504 input_tensor=None,
1505 input_shape=None,
1506 pooling=None,
1507 classes=1000,
1508 classifier_activation="softmax",
1509):
1510 return RegNet(
1511 MODEL_CONFIGS["y008"]["depths"],
1512 MODEL_CONFIGS["y008"]["widths"],
1513 MODEL_CONFIGS["y008"]["group_width"],
1514 MODEL_CONFIGS["y008"]["block_type"],
1515 MODEL_CONFIGS["y008"]["default_size"],
1516 model_name=model_name,
1517 include_top=include_top,
1518 include_preprocessing=include_preprocessing,
1519 weights=weights,
1520 input_tensor=input_tensor,
1521 input_shape=input_shape,
1522 pooling=pooling,
1523 classes=classes,
1524 classifier_activation=classifier_activation,
1525 )
1528@keras_export(
1529 "keras.applications.regnet.RegNetY016", "keras.applications.RegNetY016"
1530)
1531def RegNetY016(
1532 model_name="regnety016",
1533 include_top=True,
1534 include_preprocessing=True,
1535 weights="imagenet",
1536 input_tensor=None,
1537 input_shape=None,
1538 pooling=None,
1539 classes=1000,
1540 classifier_activation="softmax",
1541):
1542 return RegNet(
1543 MODEL_CONFIGS["y016"]["depths"],
1544 MODEL_CONFIGS["y016"]["widths"],
1545 MODEL_CONFIGS["y016"]["group_width"],
1546 MODEL_CONFIGS["y016"]["block_type"],
1547 MODEL_CONFIGS["y016"]["default_size"],
1548 model_name=model_name,
1549 include_top=include_top,
1550 include_preprocessing=include_preprocessing,
1551 weights=weights,
1552 input_tensor=input_tensor,
1553 input_shape=input_shape,
1554 pooling=pooling,
1555 classes=classes,
1556 classifier_activation=classifier_activation,
1557 )
1560@keras_export(
1561 "keras.applications.regnet.RegNetY032", "keras.applications.RegNetY032"
1562)
1563def RegNetY032(
1564 model_name="regnety032",
1565 include_top=True,
1566 include_preprocessing=True,
1567 weights="imagenet",
1568 input_tensor=None,
1569 input_shape=None,
1570 pooling=None,
1571 classes=1000,
1572 classifier_activation="softmax",
1573):
1574 return RegNet(
1575 MODEL_CONFIGS["y032"]["depths"],
1576 MODEL_CONFIGS["y032"]["widths"],
1577 MODEL_CONFIGS["y032"]["group_width"],
1578 MODEL_CONFIGS["y032"]["block_type"],
1579 MODEL_CONFIGS["y032"]["default_size"],
1580 model_name=model_name,
1581 include_top=include_top,
1582 include_preprocessing=include_preprocessing,
1583 weights=weights,
1584 input_tensor=input_tensor,
1585 input_shape=input_shape,
1586 pooling=pooling,
1587 classes=classes,
1588 classifier_activation=classifier_activation,
1589 )
1592@keras_export(
1593 "keras.applications.regnet.RegNetY040", "keras.applications.RegNetY040"
1594)
1595def RegNetY040(
1596 model_name="regnety040",
1597 include_top=True,
1598 include_preprocessing=True,
1599 weights="imagenet",
1600 input_tensor=None,
1601 input_shape=None,
1602 pooling=None,
1603 classes=1000,
1604 classifier_activation="softmax",
1605):
1606 return RegNet(
1607 MODEL_CONFIGS["y040"]["depths"],
1608 MODEL_CONFIGS["y040"]["widths"],
1609 MODEL_CONFIGS["y040"]["group_width"],
1610 MODEL_CONFIGS["y040"]["block_type"],
1611 MODEL_CONFIGS["y040"]["default_size"],
1612 model_name=model_name,
1613 include_top=include_top,
1614 include_preprocessing=include_preprocessing,
1615 weights=weights,
1616 input_tensor=input_tensor,
1617 input_shape=input_shape,
1618 pooling=pooling,
1619 classes=classes,
1620 classifier_activation=classifier_activation,
1621 )
1624@keras_export(
1625 "keras.applications.regnet.RegNetY064", "keras.applications.RegNetY064"
1626)
1627def RegNetY064(
1628 model_name="regnety064",
1629 include_top=True,
1630 include_preprocessing=True,
1631 weights="imagenet",
1632 input_tensor=None,
1633 input_shape=None,
1634 pooling=None,
1635 classes=1000,
1636 classifier_activation="softmax",
1637):
1638 return RegNet(
1639 MODEL_CONFIGS["y064"]["depths"],
1640 MODEL_CONFIGS["y064"]["widths"],
1641 MODEL_CONFIGS["y064"]["group_width"],
1642 MODEL_CONFIGS["y064"]["block_type"],
1643 MODEL_CONFIGS["y064"]["default_size"],
1644 model_name=model_name,
1645 include_top=include_top,
1646 include_preprocessing=include_preprocessing,
1647 weights=weights,
1648 input_tensor=input_tensor,
1649 input_shape=input_shape,
1650 pooling=pooling,
1651 classes=classes,
1652 classifier_activation=classifier_activation,
1653 )
1656@keras_export(
1657 "keras.applications.regnet.RegNetY080", "keras.applications.RegNetY080"
1658)
1659def RegNetY080(
1660 model_name="regnety080",
1661 include_top=True,
1662 include_preprocessing=True,
1663 weights="imagenet",
1664 input_tensor=None,
1665 input_shape=None,
1666 pooling=None,
1667 classes=1000,
1668 classifier_activation="softmax",
1669):
1670 return RegNet(
1671 MODEL_CONFIGS["y080"]["depths"],
1672 MODEL_CONFIGS["y080"]["widths"],
1673 MODEL_CONFIGS["y080"]["group_width"],
1674 MODEL_CONFIGS["y080"]["block_type"],
1675 MODEL_CONFIGS["y080"]["default_size"],
1676 model_name=model_name,
1677 include_top=include_top,
1678 include_preprocessing=include_preprocessing,
1679 weights=weights,
1680 input_tensor=input_tensor,
1681 input_shape=input_shape,
1682 pooling=pooling,
1683 classes=classes,
1684 classifier_activation=classifier_activation,
1685 )
1688@keras_export(
1689 "keras.applications.regnet.RegNetY120", "keras.applications.RegNetY120"
1690)
1691def RegNetY120(
1692 model_name="regnety120",
1693 include_top=True,
1694 include_preprocessing=True,
1695 weights="imagenet",
1696 input_tensor=None,
1697 input_shape=None,
1698 pooling=None,
1699 classes=1000,
1700 classifier_activation="softmax",
1701):
1702 return RegNet(
1703 MODEL_CONFIGS["y120"]["depths"],
1704 MODEL_CONFIGS["y120"]["widths"],
1705 MODEL_CONFIGS["y120"]["group_width"],
1706 MODEL_CONFIGS["y120"]["block_type"],
1707 MODEL_CONFIGS["y120"]["default_size"],
1708 model_name=model_name,
1709 include_top=include_top,
1710 include_preprocessing=include_preprocessing,
1711 weights=weights,
1712 input_tensor=input_tensor,
1713 input_shape=input_shape,
1714 pooling=pooling,
1715 classes=classes,
1716 classifier_activation=classifier_activation,
1717 )
1720@keras_export(
1721 "keras.applications.regnet.RegNetY160", "keras.applications.RegNetY160"
1722)
1723def RegNetY160(
1724 model_name="regnety160",
1725 include_top=True,
1726 include_preprocessing=True,
1727 weights="imagenet",
1728 input_tensor=None,
1729 input_shape=None,
1730 pooling=None,
1731 classes=1000,
1732 classifier_activation="softmax",
1733):
1734 return RegNet(
1735 MODEL_CONFIGS["y160"]["depths"],
1736 MODEL_CONFIGS["y160"]["widths"],
1737 MODEL_CONFIGS["y160"]["group_width"],
1738 MODEL_CONFIGS["y160"]["block_type"],
1739 MODEL_CONFIGS["y160"]["default_size"],
1740 model_name=model_name,
1741 include_top=include_top,
1742 include_preprocessing=include_preprocessing,
1743 weights=weights,
1744 input_tensor=input_tensor,
1745 input_shape=input_shape,
1746 pooling=pooling,
1747 classes=classes,
1748 classifier_activation=classifier_activation,
1749 )
1752@keras_export(
1753 "keras.applications.regnet.RegNetY320", "keras.applications.RegNetY320"
1754)
1755def RegNetY320(
1756 model_name="regnety320",
1757 include_top=True,
1758 include_preprocessing=True,
1759 weights="imagenet",
1760 input_tensor=None,
1761 input_shape=None,
1762 pooling=None,
1763 classes=1000,
1764 classifier_activation="softmax",
1765):
1766 return RegNet(
1767 MODEL_CONFIGS["y320"]["depths"],
1768 MODEL_CONFIGS["y320"]["widths"],
1769 MODEL_CONFIGS["y320"]["group_width"],
1770 MODEL_CONFIGS["y320"]["block_type"],
1771 MODEL_CONFIGS["y320"]["default_size"],
1772 model_name=model_name,
1773 include_top=include_top,
1774 include_preprocessing=include_preprocessing,
1775 weights=weights,
1776 input_tensor=input_tensor,
1777 input_shape=input_shape,
1778 pooling=pooling,
1779 classes=classes,
1780 classifier_activation=classifier_activation,
1781 )
1784RegNetX002.__doc__ = BASE_DOCSTRING.format(name="RegNetX002")
1785RegNetX004.__doc__ = BASE_DOCSTRING.format(name="RegNetX004")
1786RegNetX006.__doc__ = BASE_DOCSTRING.format(name="RegNetX006")
1787RegNetX008.__doc__ = BASE_DOCSTRING.format(name="RegNetX008")
1788RegNetX016.__doc__ = BASE_DOCSTRING.format(name="RegNetX016")
1789RegNetX032.__doc__ = BASE_DOCSTRING.format(name="RegNetX032")
1790RegNetX040.__doc__ = BASE_DOCSTRING.format(name="RegNetX040")
1791RegNetX064.__doc__ = BASE_DOCSTRING.format(name="RegNetX064")
1792RegNetX080.__doc__ = BASE_DOCSTRING.format(name="RegNetX080")
1793RegNetX120.__doc__ = BASE_DOCSTRING.format(name="RegNetX120")
1794RegNetX160.__doc__ = BASE_DOCSTRING.format(name="RegNetX160")
1795RegNetX320.__doc__ = BASE_DOCSTRING.format(name="RegNetX320")
1797RegNetY002.__doc__ = BASE_DOCSTRING.format(name="RegNetY002")
1798RegNetY004.__doc__ = BASE_DOCSTRING.format(name="RegNetY004")
1799RegNetY006.__doc__ = BASE_DOCSTRING.format(name="RegNetY006")
1800RegNetY008.__doc__ = BASE_DOCSTRING.format(name="RegNetY008")
1801RegNetY016.__doc__ = BASE_DOCSTRING.format(name="RegNetY016")
1802RegNetY032.__doc__ = BASE_DOCSTRING.format(name="RegNetY032")
1803RegNetY040.__doc__ = BASE_DOCSTRING.format(name="RegNetY040")
1804RegNetY064.__doc__ = BASE_DOCSTRING.format(name="RegNetY064")
1805RegNetY080.__doc__ = BASE_DOCSTRING.format(name="RegNetY080")
1806RegNetY120.__doc__ = BASE_DOCSTRING.format(name="RegNetY120")
1807RegNetY160.__doc__ = BASE_DOCSTRING.format(name="RegNetY160")
1808RegNetY320.__doc__ = BASE_DOCSTRING.format(name="RegNetY320")
1811@keras_export("keras.applications.regnet.preprocess_input")
1812def preprocess_input(x, data_format=None):
1813 """A placeholder method for backward compatibility.
1815 The preprocessing logic has been included in the regnet model
1816 implementation. Users are no longer required to call this method to
1817 normalize the input data. This method does nothing and only kept as a
1818 placeholder to align the API surface between old and new version of model.
1820 Args:
1821 x: A floating point `numpy.array` or a `tf.Tensor`.
1822 data_format: Optional data format of the image tensor/array. Defaults to
1823 None, in which case the global setting
1824 `tf.keras.backend.image_data_format()` is used (unless you changed it,
1825 it defaults to "channels_last").{mode}
1827 Returns:
1828 Unchanged `numpy.array` or `tf.Tensor`.
1829 """
1830 return x
1833@keras_export("keras.applications.regnet.decode_predictions")
1834def decode_predictions(preds, top=5):
1835 return imagenet_utils.decode_predictions(preds, top=top)
1838decode_predictions.__doc__ = imagenet_utils.decode_predictions.__doc__