Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/premade_models/linear.py: 25%
81 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Built-in linear model classes."""
17import tensorflow.compat.v2 as tf
19from keras.src import activations
20from keras.src import initializers
21from keras.src import regularizers
22from keras.src.engine import base_layer
23from keras.src.engine import input_spec
24from keras.src.engine import training
25from keras.src.layers import core
27# isort: off
28from tensorflow.python.util import deprecation
29from tensorflow.python.util.tf_export import keras_export
32@keras_export(
33 "keras.experimental.LinearModel",
34 v1=["keras.experimental.LinearModel", "keras.models.LinearModel"],
35)
36@deprecation.deprecated_endpoints("keras.experimental.LinearModel")
37class LinearModel(training.Model):
38 r"""Linear Model for regression and classification problems.
40 This model approximates the following function:
41 $$y = \beta + \sum_{i=1}^{N} w_{i} * x_{i}$$
42 where $$\beta$$ is the bias and $$w_{i}$$ is the weight for each feature.
44 Example:
46 ```python
47 model = LinearModel()
48 model.compile(optimizer='sgd', loss='mse')
49 model.fit(x, y, epochs=epochs)
50 ```
52 This model accepts sparse float inputs as well:
54 Example:
55 ```python
56 model = LinearModel()
57 opt = tf.keras.optimizers.Adam()
58 loss_fn = tf.keras.losses.MeanSquaredError()
59 with tf.GradientTape() as tape:
60 output = model(sparse_input)
61 loss = tf.reduce_mean(loss_fn(target, output))
62 grads = tape.gradient(loss, model.weights)
63 opt.apply_gradients(zip(grads, model.weights))
64 ```
66 """
68 def __init__(
69 self,
70 units=1,
71 activation=None,
72 use_bias=True,
73 kernel_initializer="zeros",
74 bias_initializer="zeros",
75 kernel_regularizer=None,
76 bias_regularizer=None,
77 **kwargs,
78 ):
79 """Create a Linear Model.
81 Args:
82 units: Positive integer, output dimension without the batch size.
83 activation: Activation function to use.
84 If you don't specify anything, no activation is applied.
85 use_bias: whether to calculate the bias/intercept for this model. If
86 set to False, no bias/intercept will be used in calculations, e.g.,
87 the data is already centered.
88 kernel_initializer: Initializer for the `kernel` weights matrices.
89 bias_initializer: Initializer for the bias vector.
90 kernel_regularizer: regularizer for kernel vectors.
91 bias_regularizer: regularizer for bias vector.
92 **kwargs: The keyword arguments that are passed on to
93 BaseLayer.__init__.
94 """
96 self.units = units
97 self.activation = activations.get(activation)
98 self.use_bias = use_bias
99 self.kernel_initializer = initializers.get(kernel_initializer)
100 self.bias_initializer = initializers.get(bias_initializer)
101 self.kernel_regularizer = regularizers.get(kernel_regularizer)
102 self.bias_regularizer = regularizers.get(bias_regularizer)
103 super().__init__(**kwargs)
104 base_layer.keras_premade_model_gauge.get_cell("Linear").set(True)
106 def build(self, input_shape):
107 if isinstance(input_shape, dict):
108 names = sorted(list(input_shape.keys()))
109 self.input_specs = []
110 self.dense_layers = []
111 for name in names:
112 shape = input_shape[name]
113 layer = core.Dense(
114 units=self.units,
115 use_bias=False,
116 kernel_initializer=self.kernel_initializer,
117 kernel_regularizer=self.kernel_regularizer,
118 name=name,
119 )
120 layer.build(shape)
121 self.input_specs.append(
122 input_spec.InputSpec(shape=shape, name=name)
123 )
124 self.dense_layers.append(layer)
125 elif isinstance(input_shape, (tuple, list)) and all(
126 isinstance(shape, tf.TensorShape) for shape in input_shape
127 ):
128 self.dense_layers = []
129 for shape in input_shape:
130 layer = core.Dense(
131 units=self.units,
132 use_bias=False,
133 kernel_initializer=self.kernel_initializer,
134 kernel_regularizer=self.kernel_regularizer,
135 )
136 layer.build(shape)
137 self.dense_layers.append(layer)
138 else:
139 # input_shape can be a single TensorShape or a tuple of ints.
140 layer = core.Dense(
141 units=self.units,
142 use_bias=False,
143 kernel_initializer=self.kernel_initializer,
144 kernel_regularizer=self.kernel_regularizer,
145 )
146 layer.build(input_shape)
147 self.dense_layers = [layer]
149 if self.use_bias:
150 self.bias = self.add_weight(
151 "bias",
152 shape=self.units,
153 initializer=self.bias_initializer,
154 regularizer=self.bias_regularizer,
155 dtype=self.dtype,
156 trainable=True,
157 )
158 else:
159 self.bias = None
160 self.built = True
162 def call(self, inputs):
163 result = None
164 if isinstance(inputs, dict):
165 names = [layer.name for layer in self.dense_layers]
166 different_keys = set(names) - set(inputs.keys())
167 if different_keys:
168 raise ValueError(
169 "The `inputs` dictionary does not match "
170 "the structure expected by the model."
171 f"\n\tExpected keys: {set(names)}"
172 f"\n\tReceived keys: {set(inputs.keys())}"
173 f"\n\tMissing keys: {different_keys}"
174 )
175 inputs = [inputs[name] for name in names]
176 for inp, layer in zip(inputs, self.dense_layers):
177 output = layer(inp)
178 if result is None:
179 result = output
180 else:
181 result += output
182 elif isinstance(inputs, (tuple, list)):
183 for inp, layer in zip(inputs, self.dense_layers):
184 output = layer(inp)
185 if result is None:
186 result = output
187 else:
188 result += output
189 else:
190 result = self.dense_layers[0](inputs)
192 if self.use_bias:
193 result = tf.nn.bias_add(result, self.bias)
194 if self.activation is not None:
195 return self.activation(result)
196 return result
198 def get_config(self):
199 config = {
200 "units": self.units,
201 "activation": activations.serialize(self.activation),
202 "use_bias": self.use_bias,
203 "kernel_initializer": initializers.serialize(
204 self.kernel_initializer
205 ),
206 "bias_initializer": initializers.serialize(self.bias_initializer),
207 "kernel_regularizer": regularizers.serialize(
208 self.kernel_regularizer
209 ),
210 "bias_regularizer": regularizers.serialize(self.bias_regularizer),
211 }
212 base_config = base_layer.Layer.get_config(self)
213 return dict(list(base_config.items()) + list(config.items()))
215 @classmethod
216 def from_config(cls, config, custom_objects=None):
217 del custom_objects
218 return cls(**config)