Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_checkpoint_ops.py: 20%
107 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1"""Python wrappers around TensorFlow ops.
3This file is MACHINE GENERATED! Do not edit.
4"""
6import collections
8from tensorflow.python import pywrap_tfe as pywrap_tfe
9from tensorflow.python.eager import context as _context
10from tensorflow.python.eager import core as _core
11from tensorflow.python.eager import execute as _execute
12from tensorflow.python.framework import dtypes as _dtypes
13from tensorflow.security.fuzzing.py import annotation_types as _atypes
15from tensorflow.python.framework import op_def_registry as _op_def_registry
16from tensorflow.python.framework import ops as _ops
17from tensorflow.python.framework import op_def_library as _op_def_library
18from tensorflow.python.util.deprecation import deprecated_endpoints
19from tensorflow.python.util import dispatch as _dispatch
20from tensorflow.python.util.tf_export import tf_export
22from typing import TypeVar
23_GenerateVocabRemappingOutput = collections.namedtuple(
24 "GenerateVocabRemapping",
25 ["remapping", "num_present"])
28def generate_vocab_remapping(new_vocab_file, old_vocab_file, new_vocab_offset, num_new_vocab, old_vocab_size=-1, name=None):
29 r"""Given a path to new and old vocabulary files, returns a remapping Tensor of
31 length `num_new_vocab`, where `remapping[i]` contains the row number in the old
32 vocabulary that corresponds to row `i` in the new vocabulary (starting at line
33 `new_vocab_offset` and up to `num_new_vocab` entities), or `-1` if entry `i`
34 in the new vocabulary is not in the old vocabulary. The old vocabulary is
35 constrained to the first `old_vocab_size` entries if `old_vocab_size` is not the
36 default value of -1.
38 `num_vocab_offset` enables
39 use in the partitioned variable case, and should generally be set through
40 examining partitioning info. The format of the files should be a text file,
41 with each line containing a single entity within the vocabulary.
43 For example, with `new_vocab_file` a text file containing each of the following
44 elements on a single line: `[f0, f1, f2, f3]`, old_vocab_file = [f1, f0, f3],
45 `num_new_vocab = 3, new_vocab_offset = 1`, the returned remapping would be
46 `[0, -1, 2]`.
48 The op also returns a count of how many entries in the new vocabulary
49 were present in the old vocabulary, which is used to calculate the number of
50 values to initialize in a weight matrix remapping
52 This functionality can be used to remap both row vocabularies (typically,
53 features) and column vocabularies (typically, classes) from TensorFlow
54 checkpoints. Note that the partitioning logic relies on contiguous vocabularies
55 corresponding to div-partitioned variables. Moreover, the underlying remapping
56 uses an IndexTable (as opposed to an inexact CuckooTable), so client code should
57 use the corresponding index_table_from_file() as the FeatureColumn framework
58 does (as opposed to tf.feature_to_id(), which uses a CuckooTable).
60 Args:
61 new_vocab_file: A `Tensor` of type `string`. Path to the new vocab file.
62 old_vocab_file: A `Tensor` of type `string`. Path to the old vocab file.
63 new_vocab_offset: An `int` that is `>= 0`.
64 How many entries into the new vocab file to start reading.
65 num_new_vocab: An `int` that is `>= 0`.
66 Number of entries in the new vocab file to remap.
67 old_vocab_size: An optional `int` that is `>= -1`. Defaults to `-1`.
68 Number of entries in the old vocab file to consider. If -1,
69 use the entire old vocabulary.
70 name: A name for the operation (optional).
72 Returns:
73 A tuple of `Tensor` objects (remapping, num_present).
75 remapping: A `Tensor` of type `int64`.
76 num_present: A `Tensor` of type `int32`.
77 """
78 _ctx = _context._context or _context.context()
79 tld = _ctx._thread_local_data
80 if tld.is_eager:
81 try:
82 _result = pywrap_tfe.TFE_Py_FastPathExecute(
83 _ctx, "GenerateVocabRemapping", name, new_vocab_file, old_vocab_file,
84 "new_vocab_offset", new_vocab_offset, "num_new_vocab", num_new_vocab,
85 "old_vocab_size", old_vocab_size)
86 _result = _GenerateVocabRemappingOutput._make(_result)
87 return _result
88 except _core._NotOkStatusException as e:
89 _ops.raise_from_not_ok_status(e, name)
90 except _core._FallbackException:
91 pass
92 try:
93 return generate_vocab_remapping_eager_fallback(
94 new_vocab_file, old_vocab_file, new_vocab_offset=new_vocab_offset,
95 num_new_vocab=num_new_vocab, old_vocab_size=old_vocab_size,
96 name=name, ctx=_ctx)
97 except _core._SymbolicException:
98 pass # Add nodes to the TensorFlow graph.
99 # Add nodes to the TensorFlow graph.
100 new_vocab_offset = _execute.make_int(new_vocab_offset, "new_vocab_offset")
101 num_new_vocab = _execute.make_int(num_new_vocab, "num_new_vocab")
102 if old_vocab_size is None:
103 old_vocab_size = -1
104 old_vocab_size = _execute.make_int(old_vocab_size, "old_vocab_size")
105 _, _, _op, _outputs = _op_def_library._apply_op_helper(
106 "GenerateVocabRemapping", new_vocab_file=new_vocab_file,
107 old_vocab_file=old_vocab_file,
108 new_vocab_offset=new_vocab_offset,
109 num_new_vocab=num_new_vocab,
110 old_vocab_size=old_vocab_size, name=name)
111 _result = _outputs[:]
112 if _execute.must_record_gradient():
113 _attrs = ("new_vocab_offset", _op._get_attr_int("new_vocab_offset"),
114 "num_new_vocab", _op._get_attr_int("num_new_vocab"),
115 "old_vocab_size", _op._get_attr_int("old_vocab_size"))
116 _inputs_flat = _op.inputs
117 _execute.record_gradient(
118 "GenerateVocabRemapping", _inputs_flat, _attrs, _result)
119 _result = _GenerateVocabRemappingOutput._make(_result)
120 return _result
122GenerateVocabRemapping = tf_export("raw_ops.GenerateVocabRemapping")(_ops.to_raw_op(generate_vocab_remapping))
125def generate_vocab_remapping_eager_fallback(new_vocab_file, old_vocab_file, new_vocab_offset, num_new_vocab, old_vocab_size, name, ctx):
126 new_vocab_offset = _execute.make_int(new_vocab_offset, "new_vocab_offset")
127 num_new_vocab = _execute.make_int(num_new_vocab, "num_new_vocab")
128 if old_vocab_size is None:
129 old_vocab_size = -1
130 old_vocab_size = _execute.make_int(old_vocab_size, "old_vocab_size")
131 new_vocab_file = _ops.convert_to_tensor(new_vocab_file, _dtypes.string)
132 old_vocab_file = _ops.convert_to_tensor(old_vocab_file, _dtypes.string)
133 _inputs_flat = [new_vocab_file, old_vocab_file]
134 _attrs = ("new_vocab_offset", new_vocab_offset, "num_new_vocab",
135 num_new_vocab, "old_vocab_size", old_vocab_size)
136 _result = _execute.execute(b"GenerateVocabRemapping", 2,
137 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
138 name=name)
139 if _execute.must_record_gradient():
140 _execute.record_gradient(
141 "GenerateVocabRemapping", _inputs_flat, _attrs, _result)
142 _result = _GenerateVocabRemappingOutput._make(_result)
143 return _result
146def load_and_remap_matrix(ckpt_path, old_tensor_name, row_remapping, col_remapping, initializing_values, num_rows, num_cols, max_rows_in_memory=-1, name=None):
147 r"""Loads a 2-D (matrix) `Tensor` with name `old_tensor_name` from the checkpoint
149 at `ckpt_path` and potentially reorders its rows and columns using the
150 specified remappings.
152 Most users should use one of the wrapper initializers (such as
153 `tf.contrib.framework.load_and_remap_matrix_initializer`) instead of this
154 function directly.
156 The remappings are 1-D tensors with the following properties:
158 * `row_remapping` must have exactly `num_rows` entries. Row `i` of the output
159 matrix will be initialized from the row corresponding to index
160 `row_remapping[i]` in the old `Tensor` from the checkpoint.
161 * `col_remapping` must have either 0 entries (indicating that no column
162 reordering is needed) or `num_cols` entries. If specified, column `j` of the
163 output matrix will be initialized from the column corresponding to index
164 `col_remapping[j]` in the old `Tensor` from the checkpoint.
165 * A value of -1 in either of the remappings signifies a "missing" entry. In that
166 case, values from the `initializing_values` tensor will be used to fill that
167 missing row or column. If `row_remapping` has `r` missing entries and
168 `col_remapping` has `c` missing entries, then the following condition must be
169 true:
171 `(r * num_cols) + (c * num_rows) - (r * c) == len(initializing_values)`
173 The remapping tensors can be generated using the GenerateVocabRemapping op.
175 As an example, with row_remapping = [1, 0, -1], col_remapping = [0, 2, -1],
176 initializing_values = [0.5, -0.5, 0.25, -0.25, 42], and w(i, j) representing
177 the value from row i, column j of the old tensor in the checkpoint, the output
178 matrix will look like the following:
180 [[w(1, 0), w(1, 2), 0.5],
181 [w(0, 0), w(0, 2), -0.5],
182 [0.25, -0.25, 42]]
184 Args:
185 ckpt_path: A `Tensor` of type `string`.
186 Path to the TensorFlow checkpoint (version 2, `TensorBundle`) from
187 which the old matrix `Tensor` will be loaded.
188 old_tensor_name: A `Tensor` of type `string`.
189 Name of the 2-D `Tensor` to load from checkpoint.
190 row_remapping: A `Tensor` of type `int64`.
191 An int `Tensor` of row remappings (generally created by
192 `generate_vocab_remapping`). Even if no row remapping is needed, this must
193 still be an index-valued Tensor (e.g. [0, 1, 2, ...]), or a shifted
194 index-valued `Tensor` (e.g. [8, 9, 10, ...], for partitioned `Variables`).
195 col_remapping: A `Tensor` of type `int64`.
196 An int `Tensor` of column remappings (generally created by
197 `generate_vocab_remapping`). May be a size-0 `Tensor` if only row remapping
198 is to be done (e.g. column ordering is the same).
199 initializing_values: A `Tensor` of type `float32`.
200 A float `Tensor` containing values to fill in for cells
201 in the output matrix that are not loaded from the checkpoint. Length must be
202 exactly the same as the number of missing / new cells.
203 num_rows: An `int` that is `>= 0`.
204 Number of rows (length of the 1st dimension) in the output matrix.
205 num_cols: An `int` that is `>= 1`.
206 Number of columns (length of the 2nd dimension) in the output matrix.
207 max_rows_in_memory: An optional `int`. Defaults to `-1`.
208 The maximum number of rows to load from the checkpoint at
209 once. If less than or equal to 0, the entire matrix will be loaded into
210 memory. Setting this arg trades increased disk reads for lower memory usage.
211 name: A name for the operation (optional).
213 Returns:
214 A `Tensor` of type `float32`.
215 """
216 _ctx = _context._context or _context.context()
217 tld = _ctx._thread_local_data
218 if tld.is_eager:
219 try:
220 _result = pywrap_tfe.TFE_Py_FastPathExecute(
221 _ctx, "LoadAndRemapMatrix", name, ckpt_path, old_tensor_name,
222 row_remapping, col_remapping, initializing_values, "num_rows",
223 num_rows, "num_cols", num_cols, "max_rows_in_memory",
224 max_rows_in_memory)
225 return _result
226 except _core._NotOkStatusException as e:
227 _ops.raise_from_not_ok_status(e, name)
228 except _core._FallbackException:
229 pass
230 try:
231 return load_and_remap_matrix_eager_fallback(
232 ckpt_path, old_tensor_name, row_remapping, col_remapping,
233 initializing_values, num_rows=num_rows, num_cols=num_cols,
234 max_rows_in_memory=max_rows_in_memory, name=name, ctx=_ctx)
235 except _core._SymbolicException:
236 pass # Add nodes to the TensorFlow graph.
237 # Add nodes to the TensorFlow graph.
238 num_rows = _execute.make_int(num_rows, "num_rows")
239 num_cols = _execute.make_int(num_cols, "num_cols")
240 if max_rows_in_memory is None:
241 max_rows_in_memory = -1
242 max_rows_in_memory = _execute.make_int(max_rows_in_memory, "max_rows_in_memory")
243 _, _, _op, _outputs = _op_def_library._apply_op_helper(
244 "LoadAndRemapMatrix", ckpt_path=ckpt_path,
245 old_tensor_name=old_tensor_name,
246 row_remapping=row_remapping,
247 col_remapping=col_remapping,
248 initializing_values=initializing_values,
249 num_rows=num_rows, num_cols=num_cols,
250 max_rows_in_memory=max_rows_in_memory,
251 name=name)
252 _result = _outputs[:]
253 if _execute.must_record_gradient():
254 _attrs = ("num_rows", _op._get_attr_int("num_rows"), "num_cols",
255 _op._get_attr_int("num_cols"), "max_rows_in_memory",
256 _op._get_attr_int("max_rows_in_memory"))
257 _inputs_flat = _op.inputs
258 _execute.record_gradient(
259 "LoadAndRemapMatrix", _inputs_flat, _attrs, _result)
260 _result, = _result
261 return _result
263LoadAndRemapMatrix = tf_export("raw_ops.LoadAndRemapMatrix")(_ops.to_raw_op(load_and_remap_matrix))
266def load_and_remap_matrix_eager_fallback(ckpt_path, old_tensor_name, row_remapping, col_remapping, initializing_values, num_rows, num_cols, max_rows_in_memory, name, ctx):
267 num_rows = _execute.make_int(num_rows, "num_rows")
268 num_cols = _execute.make_int(num_cols, "num_cols")
269 if max_rows_in_memory is None:
270 max_rows_in_memory = -1
271 max_rows_in_memory = _execute.make_int(max_rows_in_memory, "max_rows_in_memory")
272 ckpt_path = _ops.convert_to_tensor(ckpt_path, _dtypes.string)
273 old_tensor_name = _ops.convert_to_tensor(old_tensor_name, _dtypes.string)
274 row_remapping = _ops.convert_to_tensor(row_remapping, _dtypes.int64)
275 col_remapping = _ops.convert_to_tensor(col_remapping, _dtypes.int64)
276 initializing_values = _ops.convert_to_tensor(initializing_values, _dtypes.float32)
277 _inputs_flat = [ckpt_path, old_tensor_name, row_remapping, col_remapping, initializing_values]
278 _attrs = ("num_rows", num_rows, "num_cols", num_cols, "max_rows_in_memory",
279 max_rows_in_memory)
280 _result = _execute.execute(b"LoadAndRemapMatrix", 1, inputs=_inputs_flat,
281 attrs=_attrs, ctx=ctx, name=name)
282 if _execute.must_record_gradient():
283 _execute.record_gradient(
284 "LoadAndRemapMatrix", _inputs_flat, _attrs, _result)
285 _result, = _result
286 return _result