Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_lookup_ops.py: 12%
905 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1"""Python wrappers around TensorFlow ops.
3This file is MACHINE GENERATED! Do not edit.
4"""
6import collections
8from tensorflow.python import pywrap_tfe as pywrap_tfe
9from tensorflow.python.eager import context as _context
10from tensorflow.python.eager import core as _core
11from tensorflow.python.eager import execute as _execute
12from tensorflow.python.framework import dtypes as _dtypes
13from tensorflow.security.fuzzing.py import annotation_types as _atypes
15from tensorflow.python.framework import op_def_registry as _op_def_registry
16from tensorflow.python.framework import ops as _ops
17from tensorflow.python.framework import op_def_library as _op_def_library
18from tensorflow.python.util.deprecation import deprecated_endpoints
19from tensorflow.python.util import dispatch as _dispatch
20from tensorflow.python.util.tf_export import tf_export
22from typing import TypeVar
24def anonymous_hash_table(key_dtype, value_dtype, name=None):
25 r"""Creates a uninitialized anonymous hash table.
27 This op creates a new anonymous hash table (as a resource) everytime
28 it is executed, with the specified dtype of its keys and values,
29 returning the resource handle. Before using the table you will have
30 to initialize it. After initialization the table will be
31 immutable. The table is anonymous in the sense that it can only be
32 accessed by the returned resource handle (e.g. it cannot be looked up
33 by a name in a resource manager). The table will be automatically
34 deleted when all resource handles pointing to it are gone.
36 Args:
37 key_dtype: A `tf.DType`. Type of the table keys.
38 value_dtype: A `tf.DType`. Type of the table values.
39 name: A name for the operation (optional).
41 Returns:
42 A `Tensor` of type `resource`.
43 """
44 _ctx = _context._context or _context.context()
45 tld = _ctx._thread_local_data
46 if tld.is_eager:
47 try:
48 _result = pywrap_tfe.TFE_Py_FastPathExecute(
49 _ctx, "AnonymousHashTable", name, "key_dtype", key_dtype,
50 "value_dtype", value_dtype)
51 return _result
52 except _core._NotOkStatusException as e:
53 _ops.raise_from_not_ok_status(e, name)
54 except _core._FallbackException:
55 pass
56 try:
57 return anonymous_hash_table_eager_fallback(
58 key_dtype=key_dtype, value_dtype=value_dtype, name=name, ctx=_ctx)
59 except _core._SymbolicException:
60 pass # Add nodes to the TensorFlow graph.
61 # Add nodes to the TensorFlow graph.
62 key_dtype = _execute.make_type(key_dtype, "key_dtype")
63 value_dtype = _execute.make_type(value_dtype, "value_dtype")
64 _, _, _op, _outputs = _op_def_library._apply_op_helper(
65 "AnonymousHashTable", key_dtype=key_dtype, value_dtype=value_dtype,
66 name=name)
67 _result = _outputs[:]
68 if _execute.must_record_gradient():
69 _attrs = ("key_dtype", _op._get_attr_type("key_dtype"), "value_dtype",
70 _op._get_attr_type("value_dtype"))
71 _inputs_flat = _op.inputs
72 _execute.record_gradient(
73 "AnonymousHashTable", _inputs_flat, _attrs, _result)
74 _result, = _result
75 return _result
77AnonymousHashTable = tf_export("raw_ops.AnonymousHashTable")(_ops.to_raw_op(anonymous_hash_table))
80def anonymous_hash_table_eager_fallback(key_dtype, value_dtype, name, ctx):
81 key_dtype = _execute.make_type(key_dtype, "key_dtype")
82 value_dtype = _execute.make_type(value_dtype, "value_dtype")
83 _inputs_flat = []
84 _attrs = ("key_dtype", key_dtype, "value_dtype", value_dtype)
85 _result = _execute.execute(b"AnonymousHashTable", 1, inputs=_inputs_flat,
86 attrs=_attrs, ctx=ctx, name=name)
87 if _execute.must_record_gradient():
88 _execute.record_gradient(
89 "AnonymousHashTable", _inputs_flat, _attrs, _result)
90 _result, = _result
91 return _result
94def anonymous_mutable_dense_hash_table(empty_key, deleted_key, value_dtype, value_shape=[], initial_num_buckets=131072, max_load_factor=0.8, name=None):
95 r"""Creates an empty anonymous mutable hash table that uses tensors as the backing store.
97 This op creates a new anonymous mutable hash table (as a resource) everytime
98 it is executed, with the specified dtype of its keys and values,
99 returning the resource handle. Each value must be a scalar.
100 Data can be inserted into the table using
101 the insert operations. It does not support the initialization operation.
103 It uses "open addressing" with quadratic reprobing to resolve
104 collisions.
106 The table is anonymous in the sense that it can only be
107 accessed by the returned resource handle (e.g. it cannot be looked up
108 by a name in a resource manager). The table will be automatically
109 deleted when all resource handles pointing to it are gone.
111 Args:
112 empty_key: A `Tensor`.
113 The key used to represent empty key buckets internally. Must not
114 be used in insert or lookup operations.
115 deleted_key: A `Tensor`. Must have the same type as `empty_key`.
116 value_dtype: A `tf.DType`. Type of the table values.
117 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
118 The shape of each value.
119 initial_num_buckets: An optional `int`. Defaults to `131072`.
120 The initial number of hash table buckets. Must be a power
121 to 2.
122 max_load_factor: An optional `float`. Defaults to `0.8`.
123 The maximum ratio between number of entries and number of
124 buckets before growing the table. Must be between 0 and 1.
125 name: A name for the operation (optional).
127 Returns:
128 A `Tensor` of type `resource`.
129 """
130 _ctx = _context._context or _context.context()
131 tld = _ctx._thread_local_data
132 if tld.is_eager:
133 try:
134 _result = pywrap_tfe.TFE_Py_FastPathExecute(
135 _ctx, "AnonymousMutableDenseHashTable", name, empty_key, deleted_key,
136 "value_dtype", value_dtype, "value_shape", value_shape,
137 "initial_num_buckets", initial_num_buckets, "max_load_factor",
138 max_load_factor)
139 return _result
140 except _core._NotOkStatusException as e:
141 _ops.raise_from_not_ok_status(e, name)
142 except _core._FallbackException:
143 pass
144 try:
145 return anonymous_mutable_dense_hash_table_eager_fallback(
146 empty_key, deleted_key, value_dtype=value_dtype,
147 value_shape=value_shape, initial_num_buckets=initial_num_buckets,
148 max_load_factor=max_load_factor, name=name, ctx=_ctx)
149 except _core._SymbolicException:
150 pass # Add nodes to the TensorFlow graph.
151 # Add nodes to the TensorFlow graph.
152 value_dtype = _execute.make_type(value_dtype, "value_dtype")
153 if value_shape is None:
154 value_shape = []
155 value_shape = _execute.make_shape(value_shape, "value_shape")
156 if initial_num_buckets is None:
157 initial_num_buckets = 131072
158 initial_num_buckets = _execute.make_int(initial_num_buckets, "initial_num_buckets")
159 if max_load_factor is None:
160 max_load_factor = 0.8
161 max_load_factor = _execute.make_float(max_load_factor, "max_load_factor")
162 _, _, _op, _outputs = _op_def_library._apply_op_helper(
163 "AnonymousMutableDenseHashTable", empty_key=empty_key,
164 deleted_key=deleted_key,
165 value_dtype=value_dtype,
166 value_shape=value_shape,
167 initial_num_buckets=initial_num_buckets,
168 max_load_factor=max_load_factor,
169 name=name)
170 _result = _outputs[:]
171 if _execute.must_record_gradient():
172 _attrs = ("key_dtype", _op._get_attr_type("key_dtype"), "value_dtype",
173 _op._get_attr_type("value_dtype"), "value_shape",
174 _op.get_attr("value_shape"), "initial_num_buckets",
175 _op._get_attr_int("initial_num_buckets"), "max_load_factor",
176 _op.get_attr("max_load_factor"))
177 _inputs_flat = _op.inputs
178 _execute.record_gradient(
179 "AnonymousMutableDenseHashTable", _inputs_flat, _attrs, _result)
180 _result, = _result
181 return _result
183AnonymousMutableDenseHashTable = tf_export("raw_ops.AnonymousMutableDenseHashTable")(_ops.to_raw_op(anonymous_mutable_dense_hash_table))
186def anonymous_mutable_dense_hash_table_eager_fallback(empty_key, deleted_key, value_dtype, value_shape, initial_num_buckets, max_load_factor, name, ctx):
187 value_dtype = _execute.make_type(value_dtype, "value_dtype")
188 if value_shape is None:
189 value_shape = []
190 value_shape = _execute.make_shape(value_shape, "value_shape")
191 if initial_num_buckets is None:
192 initial_num_buckets = 131072
193 initial_num_buckets = _execute.make_int(initial_num_buckets, "initial_num_buckets")
194 if max_load_factor is None:
195 max_load_factor = 0.8
196 max_load_factor = _execute.make_float(max_load_factor, "max_load_factor")
197 _attr_key_dtype, _inputs_key_dtype = _execute.args_to_matching_eager([empty_key, deleted_key], ctx, [])
198 (empty_key, deleted_key) = _inputs_key_dtype
199 _inputs_flat = [empty_key, deleted_key]
200 _attrs = ("key_dtype", _attr_key_dtype, "value_dtype", value_dtype,
201 "value_shape", value_shape, "initial_num_buckets", initial_num_buckets,
202 "max_load_factor", max_load_factor)
203 _result = _execute.execute(b"AnonymousMutableDenseHashTable", 1,
204 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
205 name=name)
206 if _execute.must_record_gradient():
207 _execute.record_gradient(
208 "AnonymousMutableDenseHashTable", _inputs_flat, _attrs, _result)
209 _result, = _result
210 return _result
213def anonymous_mutable_hash_table(key_dtype, value_dtype, name=None):
214 r"""Creates an empty anonymous mutable hash table.
216 This op creates a new anonymous mutable hash table (as a resource) everytime
217 it is executed, with the specified dtype of its keys and values,
218 returning the resource handle. Each value must be a scalar.
219 Data can be inserted into the table using
220 the insert operations. It does not support the initialization operation.
221 The table is anonymous in the sense that it can only be
222 accessed by the returned resource handle (e.g. it cannot be looked up
223 by a name in a resource manager). The table will be automatically
224 deleted when all resource handles pointing to it are gone.
226 Args:
227 key_dtype: A `tf.DType`. Type of the table keys.
228 value_dtype: A `tf.DType`. Type of the table values.
229 name: A name for the operation (optional).
231 Returns:
232 A `Tensor` of type `resource`.
233 """
234 _ctx = _context._context or _context.context()
235 tld = _ctx._thread_local_data
236 if tld.is_eager:
237 try:
238 _result = pywrap_tfe.TFE_Py_FastPathExecute(
239 _ctx, "AnonymousMutableHashTable", name, "key_dtype", key_dtype,
240 "value_dtype", value_dtype)
241 return _result
242 except _core._NotOkStatusException as e:
243 _ops.raise_from_not_ok_status(e, name)
244 except _core._FallbackException:
245 pass
246 try:
247 return anonymous_mutable_hash_table_eager_fallback(
248 key_dtype=key_dtype, value_dtype=value_dtype, name=name, ctx=_ctx)
249 except _core._SymbolicException:
250 pass # Add nodes to the TensorFlow graph.
251 # Add nodes to the TensorFlow graph.
252 key_dtype = _execute.make_type(key_dtype, "key_dtype")
253 value_dtype = _execute.make_type(value_dtype, "value_dtype")
254 _, _, _op, _outputs = _op_def_library._apply_op_helper(
255 "AnonymousMutableHashTable", key_dtype=key_dtype,
256 value_dtype=value_dtype, name=name)
257 _result = _outputs[:]
258 if _execute.must_record_gradient():
259 _attrs = ("key_dtype", _op._get_attr_type("key_dtype"), "value_dtype",
260 _op._get_attr_type("value_dtype"))
261 _inputs_flat = _op.inputs
262 _execute.record_gradient(
263 "AnonymousMutableHashTable", _inputs_flat, _attrs, _result)
264 _result, = _result
265 return _result
267AnonymousMutableHashTable = tf_export("raw_ops.AnonymousMutableHashTable")(_ops.to_raw_op(anonymous_mutable_hash_table))
270def anonymous_mutable_hash_table_eager_fallback(key_dtype, value_dtype, name, ctx):
271 key_dtype = _execute.make_type(key_dtype, "key_dtype")
272 value_dtype = _execute.make_type(value_dtype, "value_dtype")
273 _inputs_flat = []
274 _attrs = ("key_dtype", key_dtype, "value_dtype", value_dtype)
275 _result = _execute.execute(b"AnonymousMutableHashTable", 1,
276 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
277 name=name)
278 if _execute.must_record_gradient():
279 _execute.record_gradient(
280 "AnonymousMutableHashTable", _inputs_flat, _attrs, _result)
281 _result, = _result
282 return _result
285def anonymous_mutable_hash_table_of_tensors(key_dtype, value_dtype, value_shape=[], name=None):
286 r"""Creates an empty anonymous mutable hash table of vector values.
288 This op creates a new anonymous mutable hash table (as a resource) everytime
289 it is executed, with the specified dtype of its keys and values,
290 returning the resource handle. Each value must be a vector.
291 Data can be inserted into the table using
292 the insert operations. It does not support the initialization operation.
293 The table is anonymous in the sense that it can only be
294 accessed by the returned resource handle (e.g. it cannot be looked up
295 by a name in a resource manager). The table will be automatically
296 deleted when all resource handles pointing to it are gone.
298 Args:
299 key_dtype: A `tf.DType`. Type of the table keys.
300 value_dtype: A `tf.DType`. Type of the table values.
301 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
302 name: A name for the operation (optional).
304 Returns:
305 A `Tensor` of type `resource`.
306 """
307 _ctx = _context._context or _context.context()
308 tld = _ctx._thread_local_data
309 if tld.is_eager:
310 try:
311 _result = pywrap_tfe.TFE_Py_FastPathExecute(
312 _ctx, "AnonymousMutableHashTableOfTensors", name, "key_dtype",
313 key_dtype, "value_dtype", value_dtype, "value_shape", value_shape)
314 return _result
315 except _core._NotOkStatusException as e:
316 _ops.raise_from_not_ok_status(e, name)
317 except _core._FallbackException:
318 pass
319 try:
320 return anonymous_mutable_hash_table_of_tensors_eager_fallback(
321 key_dtype=key_dtype, value_dtype=value_dtype,
322 value_shape=value_shape, name=name, ctx=_ctx)
323 except _core._SymbolicException:
324 pass # Add nodes to the TensorFlow graph.
325 # Add nodes to the TensorFlow graph.
326 key_dtype = _execute.make_type(key_dtype, "key_dtype")
327 value_dtype = _execute.make_type(value_dtype, "value_dtype")
328 if value_shape is None:
329 value_shape = []
330 value_shape = _execute.make_shape(value_shape, "value_shape")
331 _, _, _op, _outputs = _op_def_library._apply_op_helper(
332 "AnonymousMutableHashTableOfTensors", key_dtype=key_dtype,
333 value_dtype=value_dtype,
334 value_shape=value_shape,
335 name=name)
336 _result = _outputs[:]
337 if _execute.must_record_gradient():
338 _attrs = ("key_dtype", _op._get_attr_type("key_dtype"), "value_dtype",
339 _op._get_attr_type("value_dtype"), "value_shape",
340 _op.get_attr("value_shape"))
341 _inputs_flat = _op.inputs
342 _execute.record_gradient(
343 "AnonymousMutableHashTableOfTensors", _inputs_flat, _attrs, _result)
344 _result, = _result
345 return _result
347AnonymousMutableHashTableOfTensors = tf_export("raw_ops.AnonymousMutableHashTableOfTensors")(_ops.to_raw_op(anonymous_mutable_hash_table_of_tensors))
350def anonymous_mutable_hash_table_of_tensors_eager_fallback(key_dtype, value_dtype, value_shape, name, ctx):
351 key_dtype = _execute.make_type(key_dtype, "key_dtype")
352 value_dtype = _execute.make_type(value_dtype, "value_dtype")
353 if value_shape is None:
354 value_shape = []
355 value_shape = _execute.make_shape(value_shape, "value_shape")
356 _inputs_flat = []
357 _attrs = ("key_dtype", key_dtype, "value_dtype", value_dtype, "value_shape",
358 value_shape)
359 _result = _execute.execute(b"AnonymousMutableHashTableOfTensors", 1,
360 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
361 name=name)
362 if _execute.must_record_gradient():
363 _execute.record_gradient(
364 "AnonymousMutableHashTableOfTensors", _inputs_flat, _attrs, _result)
365 _result, = _result
366 return _result
369def hash_table(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, name=None):
370 r"""Creates a non-initialized hash table.
372 This op creates a hash table, specifying the type of its keys and values.
373 Before using the table you will have to initialize it. After initialization the
374 table will be immutable.
376 Args:
377 key_dtype: A `tf.DType`. Type of the table keys.
378 value_dtype: A `tf.DType`. Type of the table values.
379 container: An optional `string`. Defaults to `""`.
380 If non-empty, this table is placed in the given container.
381 Otherwise, a default container is used.
382 shared_name: An optional `string`. Defaults to `""`.
383 If non-empty, this table is shared under the given name across
384 multiple sessions.
385 use_node_name_sharing: An optional `bool`. Defaults to `False`.
386 If true and shared_name is empty, the table is shared
387 using the node name.
388 name: A name for the operation (optional).
390 Returns:
391 A `Tensor` of type mutable `string`.
392 """
393 _ctx = _context._context or _context.context()
394 tld = _ctx._thread_local_data
395 if tld.is_eager:
396 raise RuntimeError("hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
397 # Add nodes to the TensorFlow graph.
398 key_dtype = _execute.make_type(key_dtype, "key_dtype")
399 value_dtype = _execute.make_type(value_dtype, "value_dtype")
400 if container is None:
401 container = ""
402 container = _execute.make_str(container, "container")
403 if shared_name is None:
404 shared_name = ""
405 shared_name = _execute.make_str(shared_name, "shared_name")
406 if use_node_name_sharing is None:
407 use_node_name_sharing = False
408 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
409 _, _, _op, _outputs = _op_def_library._apply_op_helper(
410 "HashTable", key_dtype=key_dtype, value_dtype=value_dtype,
411 container=container, shared_name=shared_name,
412 use_node_name_sharing=use_node_name_sharing, name=name)
413 _result = _outputs[:]
414 if _execute.must_record_gradient():
415 _attrs = ("container", _op.get_attr("container"), "shared_name",
416 _op.get_attr("shared_name"), "use_node_name_sharing",
417 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
418 _op._get_attr_type("key_dtype"), "value_dtype",
419 _op._get_attr_type("value_dtype"))
420 _inputs_flat = _op.inputs
421 _execute.record_gradient(
422 "HashTable", _inputs_flat, _attrs, _result)
423 _result, = _result
424 return _result
426HashTable = tf_export("raw_ops.HashTable")(_ops.to_raw_op(hash_table))
429def hash_table_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, name, ctx):
430 raise RuntimeError("hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
432def hash_table_v2(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, name=None):
433 r"""Creates a non-initialized hash table.
435 This op creates a hash table, specifying the type of its keys and values.
436 Before using the table you will have to initialize it. After initialization the
437 table will be immutable.
439 Args:
440 key_dtype: A `tf.DType`. Type of the table keys.
441 value_dtype: A `tf.DType`. Type of the table values.
442 container: An optional `string`. Defaults to `""`.
443 If non-empty, this table is placed in the given container.
444 Otherwise, a default container is used.
445 shared_name: An optional `string`. Defaults to `""`.
446 If non-empty, this table is shared under the given name across
447 multiple sessions.
448 use_node_name_sharing: An optional `bool`. Defaults to `False`.
449 If true and shared_name is empty, the table is shared
450 using the node name.
451 name: A name for the operation (optional).
453 Returns:
454 A `Tensor` of type `resource`.
455 """
456 _ctx = _context._context or _context.context()
457 tld = _ctx._thread_local_data
458 if tld.is_eager:
459 try:
460 _result = pywrap_tfe.TFE_Py_FastPathExecute(
461 _ctx, "HashTableV2", name, "container", container, "shared_name",
462 shared_name, "use_node_name_sharing", use_node_name_sharing,
463 "key_dtype", key_dtype, "value_dtype", value_dtype)
464 return _result
465 except _core._NotOkStatusException as e:
466 _ops.raise_from_not_ok_status(e, name)
467 except _core._FallbackException:
468 pass
469 try:
470 return hash_table_v2_eager_fallback(
471 container=container, shared_name=shared_name,
472 use_node_name_sharing=use_node_name_sharing, key_dtype=key_dtype,
473 value_dtype=value_dtype, name=name, ctx=_ctx)
474 except _core._SymbolicException:
475 pass # Add nodes to the TensorFlow graph.
476 # Add nodes to the TensorFlow graph.
477 key_dtype = _execute.make_type(key_dtype, "key_dtype")
478 value_dtype = _execute.make_type(value_dtype, "value_dtype")
479 if container is None:
480 container = ""
481 container = _execute.make_str(container, "container")
482 if shared_name is None:
483 shared_name = ""
484 shared_name = _execute.make_str(shared_name, "shared_name")
485 if use_node_name_sharing is None:
486 use_node_name_sharing = False
487 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
488 _, _, _op, _outputs = _op_def_library._apply_op_helper(
489 "HashTableV2", key_dtype=key_dtype, value_dtype=value_dtype,
490 container=container, shared_name=shared_name,
491 use_node_name_sharing=use_node_name_sharing, name=name)
492 _result = _outputs[:]
493 if _execute.must_record_gradient():
494 _attrs = ("container", _op.get_attr("container"), "shared_name",
495 _op.get_attr("shared_name"), "use_node_name_sharing",
496 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
497 _op._get_attr_type("key_dtype"), "value_dtype",
498 _op._get_attr_type("value_dtype"))
499 _inputs_flat = _op.inputs
500 _execute.record_gradient(
501 "HashTableV2", _inputs_flat, _attrs, _result)
502 _result, = _result
503 return _result
505HashTableV2 = tf_export("raw_ops.HashTableV2")(_ops.to_raw_op(hash_table_v2))
508def hash_table_v2_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, name, ctx):
509 key_dtype = _execute.make_type(key_dtype, "key_dtype")
510 value_dtype = _execute.make_type(value_dtype, "value_dtype")
511 if container is None:
512 container = ""
513 container = _execute.make_str(container, "container")
514 if shared_name is None:
515 shared_name = ""
516 shared_name = _execute.make_str(shared_name, "shared_name")
517 if use_node_name_sharing is None:
518 use_node_name_sharing = False
519 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
520 _inputs_flat = []
521 _attrs = ("container", container, "shared_name", shared_name,
522 "use_node_name_sharing", use_node_name_sharing, "key_dtype", key_dtype,
523 "value_dtype", value_dtype)
524 _result = _execute.execute(b"HashTableV2", 1, inputs=_inputs_flat,
525 attrs=_attrs, ctx=ctx, name=name)
526 if _execute.must_record_gradient():
527 _execute.record_gradient(
528 "HashTableV2", _inputs_flat, _attrs, _result)
529 _result, = _result
530 return _result
533def initialize_table(table_handle, keys, values, name=None):
534 r"""Table initializer that takes two tensors for keys and values respectively.
536 Args:
537 table_handle: A `Tensor` of type mutable `string`.
538 Handle to a table which will be initialized.
539 keys: A `Tensor`. Keys of type Tkey.
540 values: A `Tensor`. Values of type Tval.
541 name: A name for the operation (optional).
543 Returns:
544 The created Operation.
545 """
546 _ctx = _context._context or _context.context()
547 tld = _ctx._thread_local_data
548 if tld.is_eager:
549 raise RuntimeError("initialize_table op does not support eager execution. Arg 'table_handle' is a ref.")
550 # Add nodes to the TensorFlow graph.
551 _, _, _op, _outputs = _op_def_library._apply_op_helper(
552 "InitializeTable", table_handle=table_handle, keys=keys,
553 values=values, name=name)
554 return _op
555InitializeTable = tf_export("raw_ops.InitializeTable")(_ops.to_raw_op(initialize_table))
558def initialize_table_eager_fallback(table_handle, keys, values, name, ctx):
559 raise RuntimeError("initialize_table op does not support eager execution. Arg 'table_handle' is a ref.")
561def initialize_table_from_text_file(table_handle, filename, key_index, value_index, vocab_size=-1, delimiter="\t", offset=0, name=None):
562 r"""Initializes a table from a text file.
564 It inserts one key-value pair into the table for each line of the file.
565 The key and value is extracted from the whole line content, elements from the
566 split line based on `delimiter` or the line number (starting from zero).
567 Where to extract the key and value from a line is specified by `key_index` and
568 `value_index`.
570 - A value of -1 means use the line number(starting from zero), expects `int64`.
571 - A value of -2 means use the whole line content, expects `string`.
572 - A value >= 0 means use the index (starting at zero) of the split line based
573 on `delimiter`.
575 Args:
576 table_handle: A `Tensor` of type mutable `string`.
577 Handle to a table which will be initialized.
578 filename: A `Tensor` of type `string`. Filename of a vocabulary text file.
579 key_index: An `int` that is `>= -2`.
580 Column index in a line to get the table `key` values from.
581 value_index: An `int` that is `>= -2`.
582 Column index that represents information of a line to get the table
583 `value` values from.
584 vocab_size: An optional `int` that is `>= -1`. Defaults to `-1`.
585 Number of elements of the file, use -1 if unknown.
586 delimiter: An optional `string`. Defaults to `"\t"`.
587 Delimiter to separate fields in a line.
588 offset: An optional `int`. Defaults to `0`.
589 name: A name for the operation (optional).
591 Returns:
592 The created Operation.
593 """
594 _ctx = _context._context or _context.context()
595 tld = _ctx._thread_local_data
596 if tld.is_eager:
597 raise RuntimeError("initialize_table_from_text_file op does not support eager execution. Arg 'table_handle' is a ref.")
598 # Add nodes to the TensorFlow graph.
599 key_index = _execute.make_int(key_index, "key_index")
600 value_index = _execute.make_int(value_index, "value_index")
601 if vocab_size is None:
602 vocab_size = -1
603 vocab_size = _execute.make_int(vocab_size, "vocab_size")
604 if delimiter is None:
605 delimiter = "\t"
606 delimiter = _execute.make_str(delimiter, "delimiter")
607 if offset is None:
608 offset = 0
609 offset = _execute.make_int(offset, "offset")
610 _, _, _op, _outputs = _op_def_library._apply_op_helper(
611 "InitializeTableFromTextFile", table_handle=table_handle,
612 filename=filename, key_index=key_index,
613 value_index=value_index,
614 vocab_size=vocab_size,
615 delimiter=delimiter, offset=offset,
616 name=name)
617 return _op
618InitializeTableFromTextFile = tf_export("raw_ops.InitializeTableFromTextFile")(_ops.to_raw_op(initialize_table_from_text_file))
621def initialize_table_from_text_file_eager_fallback(table_handle, filename, key_index, value_index, vocab_size, delimiter, offset, name, ctx):
622 raise RuntimeError("initialize_table_from_text_file op does not support eager execution. Arg 'table_handle' is a ref.")
624def initialize_table_from_text_file_v2(table_handle, filename, key_index, value_index, vocab_size=-1, delimiter="\t", offset=0, name=None):
625 r"""Initializes a table from a text file.
627 It inserts one key-value pair into the table for each line of the file.
628 The key and value is extracted from the whole line content, elements from the
629 split line based on `delimiter` or the line number (starting from zero).
630 Where to extract the key and value from a line is specified by `key_index` and
631 `value_index`.
633 - A value of -1 means use the line number(starting from zero), expects `int64`.
634 - A value of -2 means use the whole line content, expects `string`.
635 - A value >= 0 means use the index (starting at zero) of the split line based
636 on `delimiter`.
638 Args:
639 table_handle: A `Tensor` of type `resource`.
640 Handle to a table which will be initialized.
641 filename: A `Tensor` of type `string`. Filename of a vocabulary text file.
642 key_index: An `int` that is `>= -2`.
643 Column index in a line to get the table `key` values from.
644 value_index: An `int` that is `>= -2`.
645 Column index that represents information of a line to get the table
646 `value` values from.
647 vocab_size: An optional `int` that is `>= -1`. Defaults to `-1`.
648 Number of elements of the file, use -1 if unknown.
649 delimiter: An optional `string`. Defaults to `"\t"`.
650 Delimiter to separate fields in a line.
651 offset: An optional `int`. Defaults to `0`.
652 name: A name for the operation (optional).
654 Returns:
655 The created Operation.
656 """
657 _ctx = _context._context or _context.context()
658 tld = _ctx._thread_local_data
659 if tld.is_eager:
660 try:
661 _result = pywrap_tfe.TFE_Py_FastPathExecute(
662 _ctx, "InitializeTableFromTextFileV2", name, table_handle, filename,
663 "key_index", key_index, "value_index", value_index, "vocab_size",
664 vocab_size, "delimiter", delimiter, "offset", offset)
665 return _result
666 except _core._NotOkStatusException as e:
667 _ops.raise_from_not_ok_status(e, name)
668 except _core._FallbackException:
669 pass
670 try:
671 return initialize_table_from_text_file_v2_eager_fallback(
672 table_handle, filename, key_index=key_index,
673 value_index=value_index, vocab_size=vocab_size, delimiter=delimiter,
674 offset=offset, name=name, ctx=_ctx)
675 except _core._SymbolicException:
676 pass # Add nodes to the TensorFlow graph.
677 # Add nodes to the TensorFlow graph.
678 key_index = _execute.make_int(key_index, "key_index")
679 value_index = _execute.make_int(value_index, "value_index")
680 if vocab_size is None:
681 vocab_size = -1
682 vocab_size = _execute.make_int(vocab_size, "vocab_size")
683 if delimiter is None:
684 delimiter = "\t"
685 delimiter = _execute.make_str(delimiter, "delimiter")
686 if offset is None:
687 offset = 0
688 offset = _execute.make_int(offset, "offset")
689 _, _, _op, _outputs = _op_def_library._apply_op_helper(
690 "InitializeTableFromTextFileV2", table_handle=table_handle,
691 filename=filename,
692 key_index=key_index,
693 value_index=value_index,
694 vocab_size=vocab_size,
695 delimiter=delimiter, offset=offset,
696 name=name)
697 return _op
698InitializeTableFromTextFileV2 = tf_export("raw_ops.InitializeTableFromTextFileV2")(_ops.to_raw_op(initialize_table_from_text_file_v2))
701def initialize_table_from_text_file_v2_eager_fallback(table_handle, filename, key_index, value_index, vocab_size, delimiter, offset, name, ctx):
702 key_index = _execute.make_int(key_index, "key_index")
703 value_index = _execute.make_int(value_index, "value_index")
704 if vocab_size is None:
705 vocab_size = -1
706 vocab_size = _execute.make_int(vocab_size, "vocab_size")
707 if delimiter is None:
708 delimiter = "\t"
709 delimiter = _execute.make_str(delimiter, "delimiter")
710 if offset is None:
711 offset = 0
712 offset = _execute.make_int(offset, "offset")
713 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
714 filename = _ops.convert_to_tensor(filename, _dtypes.string)
715 _inputs_flat = [table_handle, filename]
716 _attrs = ("key_index", key_index, "value_index", value_index, "vocab_size",
717 vocab_size, "delimiter", delimiter, "offset", offset)
718 _result = _execute.execute(b"InitializeTableFromTextFileV2", 0,
719 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
720 name=name)
721 _result = None
722 return _result
725def initialize_table_v2(table_handle, keys, values, name=None):
726 r"""Table initializer that takes two tensors for keys and values respectively.
728 Args:
729 table_handle: A `Tensor` of type `resource`.
730 Handle to a table which will be initialized.
731 keys: A `Tensor`. Keys of type Tkey.
732 values: A `Tensor`. Values of type Tval.
733 name: A name for the operation (optional).
735 Returns:
736 The created Operation.
737 """
738 _ctx = _context._context or _context.context()
739 tld = _ctx._thread_local_data
740 if tld.is_eager:
741 try:
742 _result = pywrap_tfe.TFE_Py_FastPathExecute(
743 _ctx, "InitializeTableV2", name, table_handle, keys, values)
744 return _result
745 except _core._NotOkStatusException as e:
746 _ops.raise_from_not_ok_status(e, name)
747 except _core._FallbackException:
748 pass
749 try:
750 return initialize_table_v2_eager_fallback(
751 table_handle, keys, values, name=name, ctx=_ctx)
752 except _core._SymbolicException:
753 pass # Add nodes to the TensorFlow graph.
754 # Add nodes to the TensorFlow graph.
755 _, _, _op, _outputs = _op_def_library._apply_op_helper(
756 "InitializeTableV2", table_handle=table_handle, keys=keys,
757 values=values, name=name)
758 return _op
759InitializeTableV2 = tf_export("raw_ops.InitializeTableV2")(_ops.to_raw_op(initialize_table_v2))
762def initialize_table_v2_eager_fallback(table_handle, keys, values, name, ctx):
763 _attr_Tkey, (keys,) = _execute.args_to_matching_eager([keys], ctx, [])
764 _attr_Tval, (values,) = _execute.args_to_matching_eager([values], ctx, [])
765 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
766 _inputs_flat = [table_handle, keys, values]
767 _attrs = ("Tkey", _attr_Tkey, "Tval", _attr_Tval)
768 _result = _execute.execute(b"InitializeTableV2", 0, inputs=_inputs_flat,
769 attrs=_attrs, ctx=ctx, name=name)
770 _result = None
771 return _result
773_LookupTableExportOutput = collections.namedtuple(
774 "LookupTableExport",
775 ["keys", "values"])
778def lookup_table_export(table_handle, Tkeys, Tvalues, name=None):
779 r"""Outputs all keys and values in the table.
781 Args:
782 table_handle: A `Tensor` of type mutable `string`. Handle to the table.
783 Tkeys: A `tf.DType`.
784 Tvalues: A `tf.DType`.
785 name: A name for the operation (optional).
787 Returns:
788 A tuple of `Tensor` objects (keys, values).
790 keys: A `Tensor` of type `Tkeys`.
791 values: A `Tensor` of type `Tvalues`.
792 """
793 _ctx = _context._context or _context.context()
794 tld = _ctx._thread_local_data
795 if tld.is_eager:
796 raise RuntimeError("lookup_table_export op does not support eager execution. Arg 'table_handle' is a ref.")
797 # Add nodes to the TensorFlow graph.
798 Tkeys = _execute.make_type(Tkeys, "Tkeys")
799 Tvalues = _execute.make_type(Tvalues, "Tvalues")
800 _, _, _op, _outputs = _op_def_library._apply_op_helper(
801 "LookupTableExport", table_handle=table_handle, Tkeys=Tkeys,
802 Tvalues=Tvalues, name=name)
803 _result = _outputs[:]
804 if _execute.must_record_gradient():
805 _attrs = ("Tkeys", _op._get_attr_type("Tkeys"), "Tvalues",
806 _op._get_attr_type("Tvalues"))
807 _inputs_flat = _op.inputs
808 _execute.record_gradient(
809 "LookupTableExport", _inputs_flat, _attrs, _result)
810 _result = _LookupTableExportOutput._make(_result)
811 return _result
813LookupTableExport = tf_export("raw_ops.LookupTableExport")(_ops.to_raw_op(lookup_table_export))
816def lookup_table_export_eager_fallback(table_handle, Tkeys, Tvalues, name, ctx):
817 raise RuntimeError("lookup_table_export op does not support eager execution. Arg 'table_handle' is a ref.")
818_LookupTableExportV2Output = collections.namedtuple(
819 "LookupTableExportV2",
820 ["keys", "values"])
823def lookup_table_export_v2(table_handle, Tkeys, Tvalues, name=None):
824 r"""Outputs all keys and values in the table.
826 Args:
827 table_handle: A `Tensor` of type `resource`. Handle to the table.
828 Tkeys: A `tf.DType`.
829 Tvalues: A `tf.DType`.
830 name: A name for the operation (optional).
832 Returns:
833 A tuple of `Tensor` objects (keys, values).
835 keys: A `Tensor` of type `Tkeys`.
836 values: A `Tensor` of type `Tvalues`.
837 """
838 _ctx = _context._context or _context.context()
839 tld = _ctx._thread_local_data
840 if tld.is_eager:
841 try:
842 _result = pywrap_tfe.TFE_Py_FastPathExecute(
843 _ctx, "LookupTableExportV2", name, table_handle, "Tkeys", Tkeys,
844 "Tvalues", Tvalues)
845 _result = _LookupTableExportV2Output._make(_result)
846 return _result
847 except _core._NotOkStatusException as e:
848 _ops.raise_from_not_ok_status(e, name)
849 except _core._FallbackException:
850 pass
851 try:
852 return lookup_table_export_v2_eager_fallback(
853 table_handle, Tkeys=Tkeys, Tvalues=Tvalues, name=name, ctx=_ctx)
854 except _core._SymbolicException:
855 pass # Add nodes to the TensorFlow graph.
856 # Add nodes to the TensorFlow graph.
857 Tkeys = _execute.make_type(Tkeys, "Tkeys")
858 Tvalues = _execute.make_type(Tvalues, "Tvalues")
859 _, _, _op, _outputs = _op_def_library._apply_op_helper(
860 "LookupTableExportV2", table_handle=table_handle, Tkeys=Tkeys,
861 Tvalues=Tvalues, name=name)
862 _result = _outputs[:]
863 if _execute.must_record_gradient():
864 _attrs = ("Tkeys", _op._get_attr_type("Tkeys"), "Tvalues",
865 _op._get_attr_type("Tvalues"))
866 _inputs_flat = _op.inputs
867 _execute.record_gradient(
868 "LookupTableExportV2", _inputs_flat, _attrs, _result)
869 _result = _LookupTableExportV2Output._make(_result)
870 return _result
872LookupTableExportV2 = tf_export("raw_ops.LookupTableExportV2")(_ops.to_raw_op(lookup_table_export_v2))
875def lookup_table_export_v2_eager_fallback(table_handle, Tkeys, Tvalues, name, ctx):
876 Tkeys = _execute.make_type(Tkeys, "Tkeys")
877 Tvalues = _execute.make_type(Tvalues, "Tvalues")
878 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
879 _inputs_flat = [table_handle]
880 _attrs = ("Tkeys", Tkeys, "Tvalues", Tvalues)
881 _result = _execute.execute(b"LookupTableExportV2", 2, inputs=_inputs_flat,
882 attrs=_attrs, ctx=ctx, name=name)
883 if _execute.must_record_gradient():
884 _execute.record_gradient(
885 "LookupTableExportV2", _inputs_flat, _attrs, _result)
886 _result = _LookupTableExportV2Output._make(_result)
887 return _result
890def lookup_table_find(table_handle, keys, default_value, name=None):
891 r"""Looks up keys in a table, outputs the corresponding values.
893 The tensor `keys` must of the same type as the keys of the table.
894 The output `values` is of the type of the table values.
896 The scalar `default_value` is the value output for keys not present in the
897 table. It must also be of the same type as the table values.
899 Args:
900 table_handle: A `Tensor` of type mutable `string`. Handle to the table.
901 keys: A `Tensor`. Any shape. Keys to look up.
902 default_value: A `Tensor`.
903 name: A name for the operation (optional).
905 Returns:
906 A `Tensor`. Has the same type as `default_value`.
907 """
908 _ctx = _context._context or _context.context()
909 tld = _ctx._thread_local_data
910 if tld.is_eager:
911 raise RuntimeError("lookup_table_find op does not support eager execution. Arg 'table_handle' is a ref.")
912 # Add nodes to the TensorFlow graph.
913 _, _, _op, _outputs = _op_def_library._apply_op_helper(
914 "LookupTableFind", table_handle=table_handle, keys=keys,
915 default_value=default_value, name=name)
916 _result = _outputs[:]
917 if _execute.must_record_gradient():
918 _attrs = ("Tin", _op._get_attr_type("Tin"), "Tout",
919 _op._get_attr_type("Tout"))
920 _inputs_flat = _op.inputs
921 _execute.record_gradient(
922 "LookupTableFind", _inputs_flat, _attrs, _result)
923 _result, = _result
924 return _result
926LookupTableFind = tf_export("raw_ops.LookupTableFind")(_ops.to_raw_op(lookup_table_find))
929def lookup_table_find_eager_fallback(table_handle, keys, default_value, name, ctx):
930 raise RuntimeError("lookup_table_find op does not support eager execution. Arg 'table_handle' is a ref.")
932def lookup_table_find_v2(table_handle, keys, default_value, name=None):
933 r"""Looks up keys in a table, outputs the corresponding values.
935 The tensor `keys` must of the same type as the keys of the table.
936 The output `values` is of the type of the table values.
938 The scalar `default_value` is the value output for keys not present in the
939 table. It must also be of the same type as the table values.
941 Args:
942 table_handle: A `Tensor` of type `resource`. Handle to the table.
943 keys: A `Tensor`. Any shape. Keys to look up.
944 default_value: A `Tensor`.
945 name: A name for the operation (optional).
947 Returns:
948 A `Tensor`. Has the same type as `default_value`.
949 """
950 _ctx = _context._context or _context.context()
951 tld = _ctx._thread_local_data
952 if tld.is_eager:
953 try:
954 _result = pywrap_tfe.TFE_Py_FastPathExecute(
955 _ctx, "LookupTableFindV2", name, table_handle, keys, default_value)
956 return _result
957 except _core._NotOkStatusException as e:
958 _ops.raise_from_not_ok_status(e, name)
959 except _core._FallbackException:
960 pass
961 try:
962 return lookup_table_find_v2_eager_fallback(
963 table_handle, keys, default_value, name=name, ctx=_ctx)
964 except _core._SymbolicException:
965 pass # Add nodes to the TensorFlow graph.
966 # Add nodes to the TensorFlow graph.
967 _, _, _op, _outputs = _op_def_library._apply_op_helper(
968 "LookupTableFindV2", table_handle=table_handle, keys=keys,
969 default_value=default_value, name=name)
970 _result = _outputs[:]
971 if _execute.must_record_gradient():
972 _attrs = ("Tin", _op._get_attr_type("Tin"), "Tout",
973 _op._get_attr_type("Tout"))
974 _inputs_flat = _op.inputs
975 _execute.record_gradient(
976 "LookupTableFindV2", _inputs_flat, _attrs, _result)
977 _result, = _result
978 return _result
980LookupTableFindV2 = tf_export("raw_ops.LookupTableFindV2")(_ops.to_raw_op(lookup_table_find_v2))
983def lookup_table_find_v2_eager_fallback(table_handle, keys, default_value, name, ctx):
984 _attr_Tin, (keys,) = _execute.args_to_matching_eager([keys], ctx, [])
985 _attr_Tout, (default_value,) = _execute.args_to_matching_eager([default_value], ctx, [])
986 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
987 _inputs_flat = [table_handle, keys, default_value]
988 _attrs = ("Tin", _attr_Tin, "Tout", _attr_Tout)
989 _result = _execute.execute(b"LookupTableFindV2", 1, inputs=_inputs_flat,
990 attrs=_attrs, ctx=ctx, name=name)
991 if _execute.must_record_gradient():
992 _execute.record_gradient(
993 "LookupTableFindV2", _inputs_flat, _attrs, _result)
994 _result, = _result
995 return _result
998def lookup_table_import(table_handle, keys, values, name=None):
999 r"""Replaces the contents of the table with the specified keys and values.
1001 The tensor `keys` must be of the same type as the keys of the table.
1002 The tensor `values` must be of the type of the table values.
1004 Args:
1005 table_handle: A `Tensor` of type mutable `string`. Handle to the table.
1006 keys: A `Tensor`. Any shape. Keys to look up.
1007 values: A `Tensor`. Values to associate with keys.
1008 name: A name for the operation (optional).
1010 Returns:
1011 The created Operation.
1012 """
1013 _ctx = _context._context or _context.context()
1014 tld = _ctx._thread_local_data
1015 if tld.is_eager:
1016 raise RuntimeError("lookup_table_import op does not support eager execution. Arg 'table_handle' is a ref.")
1017 # Add nodes to the TensorFlow graph.
1018 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1019 "LookupTableImport", table_handle=table_handle, keys=keys,
1020 values=values, name=name)
1021 return _op
1022LookupTableImport = tf_export("raw_ops.LookupTableImport")(_ops.to_raw_op(lookup_table_import))
1025def lookup_table_import_eager_fallback(table_handle, keys, values, name, ctx):
1026 raise RuntimeError("lookup_table_import op does not support eager execution. Arg 'table_handle' is a ref.")
1028def lookup_table_import_v2(table_handle, keys, values, name=None):
1029 r"""Replaces the contents of the table with the specified keys and values.
1031 The tensor `keys` must be of the same type as the keys of the table.
1032 The tensor `values` must be of the type of the table values.
1034 Args:
1035 table_handle: A `Tensor` of type `resource`. Handle to the table.
1036 keys: A `Tensor`. Any shape. Keys to look up.
1037 values: A `Tensor`. Values to associate with keys.
1038 name: A name for the operation (optional).
1040 Returns:
1041 The created Operation.
1042 """
1043 _ctx = _context._context or _context.context()
1044 tld = _ctx._thread_local_data
1045 if tld.is_eager:
1046 try:
1047 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1048 _ctx, "LookupTableImportV2", name, table_handle, keys, values)
1049 return _result
1050 except _core._NotOkStatusException as e:
1051 _ops.raise_from_not_ok_status(e, name)
1052 except _core._FallbackException:
1053 pass
1054 try:
1055 return lookup_table_import_v2_eager_fallback(
1056 table_handle, keys, values, name=name, ctx=_ctx)
1057 except _core._SymbolicException:
1058 pass # Add nodes to the TensorFlow graph.
1059 # Add nodes to the TensorFlow graph.
1060 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1061 "LookupTableImportV2", table_handle=table_handle, keys=keys,
1062 values=values, name=name)
1063 return _op
1064LookupTableImportV2 = tf_export("raw_ops.LookupTableImportV2")(_ops.to_raw_op(lookup_table_import_v2))
1067def lookup_table_import_v2_eager_fallback(table_handle, keys, values, name, ctx):
1068 _attr_Tin, (keys,) = _execute.args_to_matching_eager([keys], ctx, [])
1069 _attr_Tout, (values,) = _execute.args_to_matching_eager([values], ctx, [])
1070 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
1071 _inputs_flat = [table_handle, keys, values]
1072 _attrs = ("Tin", _attr_Tin, "Tout", _attr_Tout)
1073 _result = _execute.execute(b"LookupTableImportV2", 0, inputs=_inputs_flat,
1074 attrs=_attrs, ctx=ctx, name=name)
1075 _result = None
1076 return _result
1079def lookup_table_insert(table_handle, keys, values, name=None):
1080 r"""Updates the table to associates keys with values.
1082 The tensor `keys` must be of the same type as the keys of the table.
1083 The tensor `values` must be of the type of the table values.
1085 Args:
1086 table_handle: A `Tensor` of type mutable `string`. Handle to the table.
1087 keys: A `Tensor`. Any shape. Keys to look up.
1088 values: A `Tensor`. Values to associate with keys.
1089 name: A name for the operation (optional).
1091 Returns:
1092 The created Operation.
1093 """
1094 _ctx = _context._context or _context.context()
1095 tld = _ctx._thread_local_data
1096 if tld.is_eager:
1097 raise RuntimeError("lookup_table_insert op does not support eager execution. Arg 'table_handle' is a ref.")
1098 # Add nodes to the TensorFlow graph.
1099 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1100 "LookupTableInsert", table_handle=table_handle, keys=keys,
1101 values=values, name=name)
1102 return _op
1103LookupTableInsert = tf_export("raw_ops.LookupTableInsert")(_ops.to_raw_op(lookup_table_insert))
1106def lookup_table_insert_eager_fallback(table_handle, keys, values, name, ctx):
1107 raise RuntimeError("lookup_table_insert op does not support eager execution. Arg 'table_handle' is a ref.")
1109def lookup_table_insert_v2(table_handle, keys, values, name=None):
1110 r"""Updates the table to associates keys with values.
1112 The tensor `keys` must be of the same type as the keys of the table.
1113 The tensor `values` must be of the type of the table values.
1115 Args:
1116 table_handle: A `Tensor` of type `resource`. Handle to the table.
1117 keys: A `Tensor`. Any shape. Keys to look up.
1118 values: A `Tensor`. Values to associate with keys.
1119 name: A name for the operation (optional).
1121 Returns:
1122 The created Operation.
1123 """
1124 _ctx = _context._context or _context.context()
1125 tld = _ctx._thread_local_data
1126 if tld.is_eager:
1127 try:
1128 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1129 _ctx, "LookupTableInsertV2", name, table_handle, keys, values)
1130 return _result
1131 except _core._NotOkStatusException as e:
1132 _ops.raise_from_not_ok_status(e, name)
1133 except _core._FallbackException:
1134 pass
1135 try:
1136 return lookup_table_insert_v2_eager_fallback(
1137 table_handle, keys, values, name=name, ctx=_ctx)
1138 except _core._SymbolicException:
1139 pass # Add nodes to the TensorFlow graph.
1140 # Add nodes to the TensorFlow graph.
1141 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1142 "LookupTableInsertV2", table_handle=table_handle, keys=keys,
1143 values=values, name=name)
1144 return _op
1145LookupTableInsertV2 = tf_export("raw_ops.LookupTableInsertV2")(_ops.to_raw_op(lookup_table_insert_v2))
1148def lookup_table_insert_v2_eager_fallback(table_handle, keys, values, name, ctx):
1149 _attr_Tin, (keys,) = _execute.args_to_matching_eager([keys], ctx, [])
1150 _attr_Tout, (values,) = _execute.args_to_matching_eager([values], ctx, [])
1151 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
1152 _inputs_flat = [table_handle, keys, values]
1153 _attrs = ("Tin", _attr_Tin, "Tout", _attr_Tout)
1154 _result = _execute.execute(b"LookupTableInsertV2", 0, inputs=_inputs_flat,
1155 attrs=_attrs, ctx=ctx, name=name)
1156 _result = None
1157 return _result
1160def lookup_table_remove_v2(table_handle, keys, name=None):
1161 r"""Removes keys and its associated values from a table.
1163 The tensor `keys` must of the same type as the keys of the table. Keys not
1164 already in the table are silently ignored.
1166 Args:
1167 table_handle: A `Tensor` of type `resource`. Handle to the table.
1168 keys: A `Tensor`. Any shape. Keys of the elements to remove.
1169 name: A name for the operation (optional).
1171 Returns:
1172 The created Operation.
1173 """
1174 _ctx = _context._context or _context.context()
1175 tld = _ctx._thread_local_data
1176 if tld.is_eager:
1177 try:
1178 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1179 _ctx, "LookupTableRemoveV2", name, table_handle, keys)
1180 return _result
1181 except _core._NotOkStatusException as e:
1182 _ops.raise_from_not_ok_status(e, name)
1183 except _core._FallbackException:
1184 pass
1185 try:
1186 return lookup_table_remove_v2_eager_fallback(
1187 table_handle, keys, name=name, ctx=_ctx)
1188 except _core._SymbolicException:
1189 pass # Add nodes to the TensorFlow graph.
1190 # Add nodes to the TensorFlow graph.
1191 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1192 "LookupTableRemoveV2", table_handle=table_handle, keys=keys,
1193 name=name)
1194 return _op
1195LookupTableRemoveV2 = tf_export("raw_ops.LookupTableRemoveV2")(_ops.to_raw_op(lookup_table_remove_v2))
1198def lookup_table_remove_v2_eager_fallback(table_handle, keys, name, ctx):
1199 _attr_Tin, (keys,) = _execute.args_to_matching_eager([keys], ctx, [])
1200 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
1201 _inputs_flat = [table_handle, keys]
1202 _attrs = ("Tin", _attr_Tin)
1203 _result = _execute.execute(b"LookupTableRemoveV2", 0, inputs=_inputs_flat,
1204 attrs=_attrs, ctx=ctx, name=name)
1205 _result = None
1206 return _result
1209def lookup_table_size(table_handle, name=None):
1210 r"""Computes the number of elements in the given table.
1212 Args:
1213 table_handle: A `Tensor` of type mutable `string`. Handle to the table.
1214 name: A name for the operation (optional).
1216 Returns:
1217 A `Tensor` of type `int64`.
1218 """
1219 _ctx = _context._context or _context.context()
1220 tld = _ctx._thread_local_data
1221 if tld.is_eager:
1222 raise RuntimeError("lookup_table_size op does not support eager execution. Arg 'table_handle' is a ref.")
1223 # Add nodes to the TensorFlow graph.
1224 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1225 "LookupTableSize", table_handle=table_handle, name=name)
1226 _result = _outputs[:]
1227 if _execute.must_record_gradient():
1228 _attrs = ()
1229 _inputs_flat = _op.inputs
1230 _execute.record_gradient(
1231 "LookupTableSize", _inputs_flat, _attrs, _result)
1232 _result, = _result
1233 return _result
1235LookupTableSize = tf_export("raw_ops.LookupTableSize")(_ops.to_raw_op(lookup_table_size))
1238def lookup_table_size_eager_fallback(table_handle, name, ctx):
1239 raise RuntimeError("lookup_table_size op does not support eager execution. Arg 'table_handle' is a ref.")
1241def lookup_table_size_v2(table_handle, name=None):
1242 r"""Computes the number of elements in the given table.
1244 Args:
1245 table_handle: A `Tensor` of type `resource`. Handle to the table.
1246 name: A name for the operation (optional).
1248 Returns:
1249 A `Tensor` of type `int64`.
1250 """
1251 _ctx = _context._context or _context.context()
1252 tld = _ctx._thread_local_data
1253 if tld.is_eager:
1254 try:
1255 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1256 _ctx, "LookupTableSizeV2", name, table_handle)
1257 return _result
1258 except _core._NotOkStatusException as e:
1259 _ops.raise_from_not_ok_status(e, name)
1260 except _core._FallbackException:
1261 pass
1262 try:
1263 return lookup_table_size_v2_eager_fallback(
1264 table_handle, name=name, ctx=_ctx)
1265 except _core._SymbolicException:
1266 pass # Add nodes to the TensorFlow graph.
1267 # Add nodes to the TensorFlow graph.
1268 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1269 "LookupTableSizeV2", table_handle=table_handle, name=name)
1270 _result = _outputs[:]
1271 if _execute.must_record_gradient():
1272 _attrs = ()
1273 _inputs_flat = _op.inputs
1274 _execute.record_gradient(
1275 "LookupTableSizeV2", _inputs_flat, _attrs, _result)
1276 _result, = _result
1277 return _result
1279LookupTableSizeV2 = tf_export("raw_ops.LookupTableSizeV2")(_ops.to_raw_op(lookup_table_size_v2))
1282def lookup_table_size_v2_eager_fallback(table_handle, name, ctx):
1283 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
1284 _inputs_flat = [table_handle]
1285 _attrs = None
1286 _result = _execute.execute(b"LookupTableSizeV2", 1, inputs=_inputs_flat,
1287 attrs=_attrs, ctx=ctx, name=name)
1288 if _execute.must_record_gradient():
1289 _execute.record_gradient(
1290 "LookupTableSizeV2", _inputs_flat, _attrs, _result)
1291 _result, = _result
1292 return _result
1295def mutable_dense_hash_table(empty_key, value_dtype, container="", shared_name="", use_node_name_sharing=False, value_shape=[], initial_num_buckets=131072, max_load_factor=0.8, name=None):
1296 r"""Creates an empty hash table that uses tensors as the backing store.
1298 It uses "open addressing" with quadratic reprobing to resolve
1299 collisions.
1301 This op creates a mutable hash table, specifying the type of its keys and
1302 values. Each value must be a scalar. Data can be inserted into the table using
1303 the insert operations. It does not support the initialization operation.
1305 Args:
1306 empty_key: A `Tensor`.
1307 The key used to represent empty key buckets internally. Must not
1308 be used in insert or lookup operations.
1309 value_dtype: A `tf.DType`. Type of the table values.
1310 container: An optional `string`. Defaults to `""`.
1311 If non-empty, this table is placed in the given container.
1312 Otherwise, a default container is used.
1313 shared_name: An optional `string`. Defaults to `""`.
1314 If non-empty, this table is shared under the given name across
1315 multiple sessions.
1316 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1317 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
1318 The shape of each value.
1319 initial_num_buckets: An optional `int`. Defaults to `131072`.
1320 The initial number of hash table buckets. Must be a power
1321 to 2.
1322 max_load_factor: An optional `float`. Defaults to `0.8`.
1323 The maximum ratio between number of entries and number of
1324 buckets before growing the table. Must be between 0 and 1.
1325 name: A name for the operation (optional).
1327 Returns:
1328 A `Tensor` of type mutable `string`.
1329 """
1330 _ctx = _context._context or _context.context()
1331 tld = _ctx._thread_local_data
1332 if tld.is_eager:
1333 raise RuntimeError("mutable_dense_hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
1334 # Add nodes to the TensorFlow graph.
1335 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1336 if container is None:
1337 container = ""
1338 container = _execute.make_str(container, "container")
1339 if shared_name is None:
1340 shared_name = ""
1341 shared_name = _execute.make_str(shared_name, "shared_name")
1342 if use_node_name_sharing is None:
1343 use_node_name_sharing = False
1344 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1345 if value_shape is None:
1346 value_shape = []
1347 value_shape = _execute.make_shape(value_shape, "value_shape")
1348 if initial_num_buckets is None:
1349 initial_num_buckets = 131072
1350 initial_num_buckets = _execute.make_int(initial_num_buckets, "initial_num_buckets")
1351 if max_load_factor is None:
1352 max_load_factor = 0.8
1353 max_load_factor = _execute.make_float(max_load_factor, "max_load_factor")
1354 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1355 "MutableDenseHashTable", empty_key=empty_key, value_dtype=value_dtype,
1356 container=container, shared_name=shared_name,
1357 use_node_name_sharing=use_node_name_sharing,
1358 value_shape=value_shape,
1359 initial_num_buckets=initial_num_buckets,
1360 max_load_factor=max_load_factor, name=name)
1361 _result = _outputs[:]
1362 if _execute.must_record_gradient():
1363 _attrs = ("container", _op.get_attr("container"), "shared_name",
1364 _op.get_attr("shared_name"), "use_node_name_sharing",
1365 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1366 _op._get_attr_type("key_dtype"), "value_dtype",
1367 _op._get_attr_type("value_dtype"), "value_shape",
1368 _op.get_attr("value_shape"), "initial_num_buckets",
1369 _op._get_attr_int("initial_num_buckets"), "max_load_factor",
1370 _op.get_attr("max_load_factor"))
1371 _inputs_flat = _op.inputs
1372 _execute.record_gradient(
1373 "MutableDenseHashTable", _inputs_flat, _attrs, _result)
1374 _result, = _result
1375 return _result
1377MutableDenseHashTable = tf_export("raw_ops.MutableDenseHashTable")(_ops.to_raw_op(mutable_dense_hash_table))
1380def mutable_dense_hash_table_eager_fallback(empty_key, value_dtype, container, shared_name, use_node_name_sharing, value_shape, initial_num_buckets, max_load_factor, name, ctx):
1381 raise RuntimeError("mutable_dense_hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
1383def mutable_dense_hash_table_v2(empty_key, deleted_key, value_dtype, container="", shared_name="", use_node_name_sharing=False, value_shape=[], initial_num_buckets=131072, max_load_factor=0.8, name=None):
1384 r"""Creates an empty hash table that uses tensors as the backing store.
1386 It uses "open addressing" with quadratic reprobing to resolve
1387 collisions.
1389 This op creates a mutable hash table, specifying the type of its keys and
1390 values. Each value must be a scalar. Data can be inserted into the table using
1391 the insert operations. It does not support the initialization operation.
1393 Args:
1394 empty_key: A `Tensor`.
1395 The key used to represent empty key buckets internally. Must not
1396 be used in insert or lookup operations.
1397 deleted_key: A `Tensor`. Must have the same type as `empty_key`.
1398 value_dtype: A `tf.DType`. Type of the table values.
1399 container: An optional `string`. Defaults to `""`.
1400 If non-empty, this table is placed in the given container.
1401 Otherwise, a default container is used.
1402 shared_name: An optional `string`. Defaults to `""`.
1403 If non-empty, this table is shared under the given name across
1404 multiple sessions.
1405 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1406 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
1407 The shape of each value.
1408 initial_num_buckets: An optional `int`. Defaults to `131072`.
1409 The initial number of hash table buckets. Must be a power
1410 to 2.
1411 max_load_factor: An optional `float`. Defaults to `0.8`.
1412 The maximum ratio between number of entries and number of
1413 buckets before growing the table. Must be between 0 and 1.
1414 name: A name for the operation (optional).
1416 Returns:
1417 A `Tensor` of type `resource`.
1418 """
1419 _ctx = _context._context or _context.context()
1420 tld = _ctx._thread_local_data
1421 if tld.is_eager:
1422 try:
1423 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1424 _ctx, "MutableDenseHashTableV2", name, empty_key, deleted_key,
1425 "container", container, "shared_name", shared_name,
1426 "use_node_name_sharing", use_node_name_sharing, "value_dtype",
1427 value_dtype, "value_shape", value_shape, "initial_num_buckets",
1428 initial_num_buckets, "max_load_factor", max_load_factor)
1429 return _result
1430 except _core._NotOkStatusException as e:
1431 _ops.raise_from_not_ok_status(e, name)
1432 except _core._FallbackException:
1433 pass
1434 try:
1435 return mutable_dense_hash_table_v2_eager_fallback(
1436 empty_key, deleted_key, container=container,
1437 shared_name=shared_name,
1438 use_node_name_sharing=use_node_name_sharing,
1439 value_dtype=value_dtype, value_shape=value_shape,
1440 initial_num_buckets=initial_num_buckets,
1441 max_load_factor=max_load_factor, name=name, ctx=_ctx)
1442 except _core._SymbolicException:
1443 pass # Add nodes to the TensorFlow graph.
1444 # Add nodes to the TensorFlow graph.
1445 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1446 if container is None:
1447 container = ""
1448 container = _execute.make_str(container, "container")
1449 if shared_name is None:
1450 shared_name = ""
1451 shared_name = _execute.make_str(shared_name, "shared_name")
1452 if use_node_name_sharing is None:
1453 use_node_name_sharing = False
1454 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1455 if value_shape is None:
1456 value_shape = []
1457 value_shape = _execute.make_shape(value_shape, "value_shape")
1458 if initial_num_buckets is None:
1459 initial_num_buckets = 131072
1460 initial_num_buckets = _execute.make_int(initial_num_buckets, "initial_num_buckets")
1461 if max_load_factor is None:
1462 max_load_factor = 0.8
1463 max_load_factor = _execute.make_float(max_load_factor, "max_load_factor")
1464 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1465 "MutableDenseHashTableV2", empty_key=empty_key,
1466 deleted_key=deleted_key,
1467 value_dtype=value_dtype,
1468 container=container,
1469 shared_name=shared_name,
1470 use_node_name_sharing=use_node_name_sharing,
1471 value_shape=value_shape,
1472 initial_num_buckets=initial_num_buckets,
1473 max_load_factor=max_load_factor, name=name)
1474 _result = _outputs[:]
1475 if _execute.must_record_gradient():
1476 _attrs = ("container", _op.get_attr("container"), "shared_name",
1477 _op.get_attr("shared_name"), "use_node_name_sharing",
1478 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1479 _op._get_attr_type("key_dtype"), "value_dtype",
1480 _op._get_attr_type("value_dtype"), "value_shape",
1481 _op.get_attr("value_shape"), "initial_num_buckets",
1482 _op._get_attr_int("initial_num_buckets"), "max_load_factor",
1483 _op.get_attr("max_load_factor"))
1484 _inputs_flat = _op.inputs
1485 _execute.record_gradient(
1486 "MutableDenseHashTableV2", _inputs_flat, _attrs, _result)
1487 _result, = _result
1488 return _result
1490MutableDenseHashTableV2 = tf_export("raw_ops.MutableDenseHashTableV2")(_ops.to_raw_op(mutable_dense_hash_table_v2))
1493def mutable_dense_hash_table_v2_eager_fallback(empty_key, deleted_key, value_dtype, container, shared_name, use_node_name_sharing, value_shape, initial_num_buckets, max_load_factor, name, ctx):
1494 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1495 if container is None:
1496 container = ""
1497 container = _execute.make_str(container, "container")
1498 if shared_name is None:
1499 shared_name = ""
1500 shared_name = _execute.make_str(shared_name, "shared_name")
1501 if use_node_name_sharing is None:
1502 use_node_name_sharing = False
1503 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1504 if value_shape is None:
1505 value_shape = []
1506 value_shape = _execute.make_shape(value_shape, "value_shape")
1507 if initial_num_buckets is None:
1508 initial_num_buckets = 131072
1509 initial_num_buckets = _execute.make_int(initial_num_buckets, "initial_num_buckets")
1510 if max_load_factor is None:
1511 max_load_factor = 0.8
1512 max_load_factor = _execute.make_float(max_load_factor, "max_load_factor")
1513 _attr_key_dtype, _inputs_key_dtype = _execute.args_to_matching_eager([empty_key, deleted_key], ctx, [])
1514 (empty_key, deleted_key) = _inputs_key_dtype
1515 _inputs_flat = [empty_key, deleted_key]
1516 _attrs = ("container", container, "shared_name", shared_name,
1517 "use_node_name_sharing", use_node_name_sharing, "key_dtype",
1518 _attr_key_dtype, "value_dtype", value_dtype, "value_shape", value_shape,
1519 "initial_num_buckets", initial_num_buckets, "max_load_factor",
1520 max_load_factor)
1521 _result = _execute.execute(b"MutableDenseHashTableV2", 1,
1522 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
1523 name=name)
1524 if _execute.must_record_gradient():
1525 _execute.record_gradient(
1526 "MutableDenseHashTableV2", _inputs_flat, _attrs, _result)
1527 _result, = _result
1528 return _result
1531def mutable_hash_table(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, name=None):
1532 r"""Creates an empty hash table.
1534 This op creates a mutable hash table, specifying the type of its keys and
1535 values. Each value must be a scalar. Data can be inserted into the table using
1536 the insert operations. It does not support the initialization operation.
1538 Args:
1539 key_dtype: A `tf.DType`. Type of the table keys.
1540 value_dtype: A `tf.DType`. Type of the table values.
1541 container: An optional `string`. Defaults to `""`.
1542 If non-empty, this table is placed in the given container.
1543 Otherwise, a default container is used.
1544 shared_name: An optional `string`. Defaults to `""`.
1545 If non-empty, this table is shared under the given name across
1546 multiple sessions.
1547 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1548 If true and shared_name is empty, the table is shared
1549 using the node name.
1550 name: A name for the operation (optional).
1552 Returns:
1553 A `Tensor` of type mutable `string`.
1554 """
1555 _ctx = _context._context or _context.context()
1556 tld = _ctx._thread_local_data
1557 if tld.is_eager:
1558 raise RuntimeError("mutable_hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
1559 # Add nodes to the TensorFlow graph.
1560 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1561 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1562 if container is None:
1563 container = ""
1564 container = _execute.make_str(container, "container")
1565 if shared_name is None:
1566 shared_name = ""
1567 shared_name = _execute.make_str(shared_name, "shared_name")
1568 if use_node_name_sharing is None:
1569 use_node_name_sharing = False
1570 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1571 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1572 "MutableHashTable", key_dtype=key_dtype, value_dtype=value_dtype,
1573 container=container, shared_name=shared_name,
1574 use_node_name_sharing=use_node_name_sharing,
1575 name=name)
1576 _result = _outputs[:]
1577 if _execute.must_record_gradient():
1578 _attrs = ("container", _op.get_attr("container"), "shared_name",
1579 _op.get_attr("shared_name"), "use_node_name_sharing",
1580 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1581 _op._get_attr_type("key_dtype"), "value_dtype",
1582 _op._get_attr_type("value_dtype"))
1583 _inputs_flat = _op.inputs
1584 _execute.record_gradient(
1585 "MutableHashTable", _inputs_flat, _attrs, _result)
1586 _result, = _result
1587 return _result
1589MutableHashTable = tf_export("raw_ops.MutableHashTable")(_ops.to_raw_op(mutable_hash_table))
1592def mutable_hash_table_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, name, ctx):
1593 raise RuntimeError("mutable_hash_table op does not support eager execution. Arg 'table_handle' is a ref.")
1595def mutable_hash_table_of_tensors(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, value_shape=[], name=None):
1596 r"""Creates an empty hash table.
1598 This op creates a mutable hash table, specifying the type of its keys and
1599 values. Each value must be a vector. Data can be inserted into the table using
1600 the insert operations. It does not support the initialization operation.
1602 Args:
1603 key_dtype: A `tf.DType`. Type of the table keys.
1604 value_dtype: A `tf.DType`. Type of the table values.
1605 container: An optional `string`. Defaults to `""`.
1606 If non-empty, this table is placed in the given container.
1607 Otherwise, a default container is used.
1608 shared_name: An optional `string`. Defaults to `""`.
1609 If non-empty, this table is shared under the given name across
1610 multiple sessions.
1611 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1612 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
1613 name: A name for the operation (optional).
1615 Returns:
1616 A `Tensor` of type mutable `string`.
1617 """
1618 _ctx = _context._context or _context.context()
1619 tld = _ctx._thread_local_data
1620 if tld.is_eager:
1621 raise RuntimeError("mutable_hash_table_of_tensors op does not support eager execution. Arg 'table_handle' is a ref.")
1622 # Add nodes to the TensorFlow graph.
1623 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1624 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1625 if container is None:
1626 container = ""
1627 container = _execute.make_str(container, "container")
1628 if shared_name is None:
1629 shared_name = ""
1630 shared_name = _execute.make_str(shared_name, "shared_name")
1631 if use_node_name_sharing is None:
1632 use_node_name_sharing = False
1633 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1634 if value_shape is None:
1635 value_shape = []
1636 value_shape = _execute.make_shape(value_shape, "value_shape")
1637 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1638 "MutableHashTableOfTensors", key_dtype=key_dtype,
1639 value_dtype=value_dtype,
1640 container=container,
1641 shared_name=shared_name,
1642 use_node_name_sharing=use_node_name_sharing,
1643 value_shape=value_shape, name=name)
1644 _result = _outputs[:]
1645 if _execute.must_record_gradient():
1646 _attrs = ("container", _op.get_attr("container"), "shared_name",
1647 _op.get_attr("shared_name"), "use_node_name_sharing",
1648 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1649 _op._get_attr_type("key_dtype"), "value_dtype",
1650 _op._get_attr_type("value_dtype"), "value_shape",
1651 _op.get_attr("value_shape"))
1652 _inputs_flat = _op.inputs
1653 _execute.record_gradient(
1654 "MutableHashTableOfTensors", _inputs_flat, _attrs, _result)
1655 _result, = _result
1656 return _result
1658MutableHashTableOfTensors = tf_export("raw_ops.MutableHashTableOfTensors")(_ops.to_raw_op(mutable_hash_table_of_tensors))
1661def mutable_hash_table_of_tensors_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, value_shape, name, ctx):
1662 raise RuntimeError("mutable_hash_table_of_tensors op does not support eager execution. Arg 'table_handle' is a ref.")
1664def mutable_hash_table_of_tensors_v2(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, value_shape=[], name=None):
1665 r"""Creates an empty hash table.
1667 This op creates a mutable hash table, specifying the type of its keys and
1668 values. Each value must be a vector. Data can be inserted into the table using
1669 the insert operations. It does not support the initialization operation.
1671 Args:
1672 key_dtype: A `tf.DType`. Type of the table keys.
1673 value_dtype: A `tf.DType`. Type of the table values.
1674 container: An optional `string`. Defaults to `""`.
1675 If non-empty, this table is placed in the given container.
1676 Otherwise, a default container is used.
1677 shared_name: An optional `string`. Defaults to `""`.
1678 If non-empty, this table is shared under the given name across
1679 multiple sessions.
1680 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1681 value_shape: An optional `tf.TensorShape` or list of `ints`. Defaults to `[]`.
1682 name: A name for the operation (optional).
1684 Returns:
1685 A `Tensor` of type `resource`.
1686 """
1687 _ctx = _context._context or _context.context()
1688 tld = _ctx._thread_local_data
1689 if tld.is_eager:
1690 try:
1691 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1692 _ctx, "MutableHashTableOfTensorsV2", name, "container", container,
1693 "shared_name", shared_name, "use_node_name_sharing",
1694 use_node_name_sharing, "key_dtype", key_dtype, "value_dtype",
1695 value_dtype, "value_shape", value_shape)
1696 return _result
1697 except _core._NotOkStatusException as e:
1698 _ops.raise_from_not_ok_status(e, name)
1699 except _core._FallbackException:
1700 pass
1701 try:
1702 return mutable_hash_table_of_tensors_v2_eager_fallback(
1703 container=container, shared_name=shared_name,
1704 use_node_name_sharing=use_node_name_sharing, key_dtype=key_dtype,
1705 value_dtype=value_dtype, value_shape=value_shape, name=name,
1706 ctx=_ctx)
1707 except _core._SymbolicException:
1708 pass # Add nodes to the TensorFlow graph.
1709 # Add nodes to the TensorFlow graph.
1710 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1711 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1712 if container is None:
1713 container = ""
1714 container = _execute.make_str(container, "container")
1715 if shared_name is None:
1716 shared_name = ""
1717 shared_name = _execute.make_str(shared_name, "shared_name")
1718 if use_node_name_sharing is None:
1719 use_node_name_sharing = False
1720 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1721 if value_shape is None:
1722 value_shape = []
1723 value_shape = _execute.make_shape(value_shape, "value_shape")
1724 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1725 "MutableHashTableOfTensorsV2", key_dtype=key_dtype,
1726 value_dtype=value_dtype,
1727 container=container,
1728 shared_name=shared_name,
1729 use_node_name_sharing=use_node_name_sharing,
1730 value_shape=value_shape, name=name)
1731 _result = _outputs[:]
1732 if _execute.must_record_gradient():
1733 _attrs = ("container", _op.get_attr("container"), "shared_name",
1734 _op.get_attr("shared_name"), "use_node_name_sharing",
1735 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1736 _op._get_attr_type("key_dtype"), "value_dtype",
1737 _op._get_attr_type("value_dtype"), "value_shape",
1738 _op.get_attr("value_shape"))
1739 _inputs_flat = _op.inputs
1740 _execute.record_gradient(
1741 "MutableHashTableOfTensorsV2", _inputs_flat, _attrs, _result)
1742 _result, = _result
1743 return _result
1745MutableHashTableOfTensorsV2 = tf_export("raw_ops.MutableHashTableOfTensorsV2")(_ops.to_raw_op(mutable_hash_table_of_tensors_v2))
1748def mutable_hash_table_of_tensors_v2_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, value_shape, name, ctx):
1749 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1750 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1751 if container is None:
1752 container = ""
1753 container = _execute.make_str(container, "container")
1754 if shared_name is None:
1755 shared_name = ""
1756 shared_name = _execute.make_str(shared_name, "shared_name")
1757 if use_node_name_sharing is None:
1758 use_node_name_sharing = False
1759 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1760 if value_shape is None:
1761 value_shape = []
1762 value_shape = _execute.make_shape(value_shape, "value_shape")
1763 _inputs_flat = []
1764 _attrs = ("container", container, "shared_name", shared_name,
1765 "use_node_name_sharing", use_node_name_sharing, "key_dtype", key_dtype,
1766 "value_dtype", value_dtype, "value_shape", value_shape)
1767 _result = _execute.execute(b"MutableHashTableOfTensorsV2", 1,
1768 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
1769 name=name)
1770 if _execute.must_record_gradient():
1771 _execute.record_gradient(
1772 "MutableHashTableOfTensorsV2", _inputs_flat, _attrs, _result)
1773 _result, = _result
1774 return _result
1777def mutable_hash_table_v2(key_dtype, value_dtype, container="", shared_name="", use_node_name_sharing=False, name=None):
1778 r"""Creates an empty hash table.
1780 This op creates a mutable hash table, specifying the type of its keys and
1781 values. Each value must be a scalar. Data can be inserted into the table using
1782 the insert operations. It does not support the initialization operation.
1784 Args:
1785 key_dtype: A `tf.DType`. Type of the table keys.
1786 value_dtype: A `tf.DType`. Type of the table values.
1787 container: An optional `string`. Defaults to `""`.
1788 If non-empty, this table is placed in the given container.
1789 Otherwise, a default container is used.
1790 shared_name: An optional `string`. Defaults to `""`.
1791 If non-empty, this table is shared under the given name across
1792 multiple sessions.
1793 use_node_name_sharing: An optional `bool`. Defaults to `False`.
1794 If true and shared_name is empty, the table is shared
1795 using the node name.
1796 name: A name for the operation (optional).
1798 Returns:
1799 A `Tensor` of type `resource`.
1800 """
1801 _ctx = _context._context or _context.context()
1802 tld = _ctx._thread_local_data
1803 if tld.is_eager:
1804 try:
1805 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1806 _ctx, "MutableHashTableV2", name, "container", container,
1807 "shared_name", shared_name, "use_node_name_sharing",
1808 use_node_name_sharing, "key_dtype", key_dtype, "value_dtype",
1809 value_dtype)
1810 return _result
1811 except _core._NotOkStatusException as e:
1812 _ops.raise_from_not_ok_status(e, name)
1813 except _core._FallbackException:
1814 pass
1815 try:
1816 return mutable_hash_table_v2_eager_fallback(
1817 container=container, shared_name=shared_name,
1818 use_node_name_sharing=use_node_name_sharing, key_dtype=key_dtype,
1819 value_dtype=value_dtype, name=name, ctx=_ctx)
1820 except _core._SymbolicException:
1821 pass # Add nodes to the TensorFlow graph.
1822 # Add nodes to the TensorFlow graph.
1823 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1824 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1825 if container is None:
1826 container = ""
1827 container = _execute.make_str(container, "container")
1828 if shared_name is None:
1829 shared_name = ""
1830 shared_name = _execute.make_str(shared_name, "shared_name")
1831 if use_node_name_sharing is None:
1832 use_node_name_sharing = False
1833 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1834 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1835 "MutableHashTableV2", key_dtype=key_dtype, value_dtype=value_dtype,
1836 container=container, shared_name=shared_name,
1837 use_node_name_sharing=use_node_name_sharing,
1838 name=name)
1839 _result = _outputs[:]
1840 if _execute.must_record_gradient():
1841 _attrs = ("container", _op.get_attr("container"), "shared_name",
1842 _op.get_attr("shared_name"), "use_node_name_sharing",
1843 _op._get_attr_bool("use_node_name_sharing"), "key_dtype",
1844 _op._get_attr_type("key_dtype"), "value_dtype",
1845 _op._get_attr_type("value_dtype"))
1846 _inputs_flat = _op.inputs
1847 _execute.record_gradient(
1848 "MutableHashTableV2", _inputs_flat, _attrs, _result)
1849 _result, = _result
1850 return _result
1852MutableHashTableV2 = tf_export("raw_ops.MutableHashTableV2")(_ops.to_raw_op(mutable_hash_table_v2))
1855def mutable_hash_table_v2_eager_fallback(key_dtype, value_dtype, container, shared_name, use_node_name_sharing, name, ctx):
1856 key_dtype = _execute.make_type(key_dtype, "key_dtype")
1857 value_dtype = _execute.make_type(value_dtype, "value_dtype")
1858 if container is None:
1859 container = ""
1860 container = _execute.make_str(container, "container")
1861 if shared_name is None:
1862 shared_name = ""
1863 shared_name = _execute.make_str(shared_name, "shared_name")
1864 if use_node_name_sharing is None:
1865 use_node_name_sharing = False
1866 use_node_name_sharing = _execute.make_bool(use_node_name_sharing, "use_node_name_sharing")
1867 _inputs_flat = []
1868 _attrs = ("container", container, "shared_name", shared_name,
1869 "use_node_name_sharing", use_node_name_sharing, "key_dtype", key_dtype,
1870 "value_dtype", value_dtype)
1871 _result = _execute.execute(b"MutableHashTableV2", 1, inputs=_inputs_flat,
1872 attrs=_attrs, ctx=ctx, name=name)
1873 if _execute.must_record_gradient():
1874 _execute.record_gradient(
1875 "MutableHashTableV2", _inputs_flat, _attrs, _result)
1876 _result, = _result
1877 return _result