Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/keras/src/feature_column/dense_features.py: 45%
42 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""A layer that produces a dense `Tensor` based on given `feature_columns`."""
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
21import json
23import tensorflow.compat.v2 as tf
25from keras.src import backend
26from keras.src.feature_column import base_feature_layer as kfc
27from keras.src.saving.legacy.saved_model import json_utils
29# isort: off
30from tensorflow.python.util.tf_export import keras_export
33@keras_export(v1=["keras.layers.DenseFeatures"])
34class DenseFeatures(kfc._BaseFeaturesLayer):
35 """A layer that produces a dense `Tensor` based on given `feature_columns`.
37 Generally a single example in training data is described with
38 FeatureColumns. At the first layer of the model, this column-oriented data
39 should be converted to a single `Tensor`.
41 This layer can be called multiple times with different features.
43 This is the V1 version of this layer that uses variable_scope's or
44 partitioner to create variables which works well with PartitionedVariables.
45 Variable scopes are deprecated in V2, so the V2 version uses name_scopes
46 instead. But currently that lacks support for partitioned variables. Use
47 this if you need partitioned variables. Use the partitioner argument if you
48 have a Keras model and uses
49 `tf.compat.v1.keras.estimator.model_to_estimator` for training.
51 Example:
53 ```python
54 price = tf.feature_column.numeric_column('price')
55 keywords_embedded = tf.feature_column.embedding_column(
56 tf.feature_column.categorical_column_with_hash_bucket("keywords", 10K),
57 dimension=16)
58 columns = [price, keywords_embedded, ...]
59 partitioner = tf.compat.v1.fixed_size_partitioner(num_shards=4)
60 feature_layer = tf.compat.v1.keras.layers.DenseFeatures(
61 feature_columns=columns, partitioner=partitioner)
63 features = tf.io.parse_example(
64 ..., features=tf.feature_column.make_parse_example_spec(columns))
65 dense_tensor = feature_layer(features)
66 for units in [128, 64, 32]:
67 dense_tensor = tf.compat.v1.keras.layers.Dense(
68 units, activation='relu')(dense_tensor)
69 prediction = tf.compat.v1.keras.layers.Dense(1)(dense_tensor)
70 ```
71 """
73 def __init__(
74 self,
75 feature_columns,
76 trainable=True,
77 name=None,
78 partitioner=None,
79 **kwargs
80 ):
81 """Constructs a DenseFeatures layer.
83 Args:
84 feature_columns: An iterable containing the FeatureColumns to use as
85 inputs to your model. All items should be instances of classes
86 derived from `DenseColumn` such as `numeric_column`,
87 `embedding_column`, `bucketized_column`, `indicator_column`. If you
88 have categorical features, you can wrap them with an
89 `embedding_column` or `indicator_column`.
90 trainable: Boolean, whether the layer's variables will be updated via
91 gradient descent during training.
92 name: Name to give to the DenseFeatures.
93 partitioner: Partitioner for input layer. Defaults to `None`.
94 **kwargs: Keyword arguments to construct a layer.
96 Raises:
97 ValueError: if an item in `feature_columns` is not a `DenseColumn`.
98 """
99 super().__init__(
100 feature_columns=feature_columns,
101 trainable=trainable,
102 name=name,
103 partitioner=partitioner,
104 expected_column_type=tf.__internal__.feature_column.DenseColumn,
105 **kwargs
106 )
108 @property
109 def _is_feature_layer(self):
110 return True
112 @property
113 def _tracking_metadata(self):
114 """String stored in metadata field in the SavedModel proto.
116 Returns:
117 A serialized JSON storing information necessary for recreating this
118 layer.
119 """
120 metadata = json.loads(super()._tracking_metadata)
121 metadata["_is_feature_layer"] = True
122 return json.dumps(metadata, default=json_utils.get_json_type)
124 def _target_shape(self, input_shape, total_elements):
125 return (input_shape[0], total_elements)
127 def call(self, features, cols_to_output_tensors=None, training=None):
128 """Returns a dense tensor corresponding to the `feature_columns`.
130 Example usage:
132 >>> t1 = tf.feature_column.embedding_column(
133 ... tf.feature_column.categorical_column_with_hash_bucket("t1", 2),
134 ... dimension=8)
135 >>> t2 = tf.feature_column.numeric_column('t2')
136 >>> feature_layer = tf.compat.v1.keras.layers.DenseFeatures([t1, t2])
137 >>> features = {"t1": tf.constant(["a", "b"]),
138 ... "t2": tf.constant([1, 2])}
139 >>> dense_tensor = feature_layer(features, training=True)
141 Args:
142 features: A mapping from key to tensors. `FeatureColumn`s look up via
143 these keys. For example `numeric_column('price')` will look at
144 'price' key in this dict. Values can be a `SparseTensor` or a
145 `Tensor` depends on corresponding `FeatureColumn`.
146 cols_to_output_tensors: If not `None`, this will be filled with a dict
147 mapping feature columns to output tensors created.
148 training: Python boolean or None, indicating whether to the layer is
149 being run in training mode. This argument is passed to the call
150 method of any `FeatureColumn` that takes a `training` argument. For
151 example, if a `FeatureColumn` performed dropout, the column could
152 expose a `training` argument to control whether the dropout should
153 be applied. If `None`, becomes `tf.keras.backend.learning_phase()`.
154 Defaults to `None`.
157 Returns:
158 A `Tensor` which represents input layer of a model. Its shape
159 is (batch_size, first_layer_dimension) and its dtype is `float32`.
160 first_layer_dimension is determined based on given `feature_columns`.
162 Raises:
163 ValueError: If features are not a dictionary.
164 """
165 if training is None:
166 training = backend.learning_phase()
167 if not isinstance(features, dict):
168 raise ValueError(
169 "We expected a dictionary here. Instead we got: ", features
170 )
171 transformation_cache = (
172 tf.__internal__.feature_column.FeatureTransformationCache(features)
173 )
174 output_tensors = []
175 for column in self._feature_columns:
176 with backend.name_scope(column.name):
177 try:
178 tensor = column.get_dense_tensor(
179 transformation_cache,
180 self._state_manager,
181 training=training,
182 )
183 except TypeError:
184 tensor = column.get_dense_tensor(
185 transformation_cache, self._state_manager
186 )
187 processed_tensors = self._process_dense_tensor(column, tensor)
188 if cols_to_output_tensors is not None:
189 cols_to_output_tensors[column] = processed_tensors
190 output_tensors.append(processed_tensors)
191 return self._verify_and_concat_tensors(output_tensors)