Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_experimental_dataset_ops.py: 6%
5368 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1"""Python wrappers around TensorFlow ops.
3This file is MACHINE GENERATED! Do not edit.
4"""
6import collections
8from tensorflow.python import pywrap_tfe as pywrap_tfe
9from tensorflow.python.eager import context as _context
10from tensorflow.python.eager import core as _core
11from tensorflow.python.eager import execute as _execute
12from tensorflow.python.framework import dtypes as _dtypes
13from tensorflow.security.fuzzing.py import annotation_types as _atypes
15from tensorflow.python.framework import op_def_registry as _op_def_registry
16from tensorflow.python.framework import ops as _ops
17from tensorflow.python.framework import op_def_library as _op_def_library
18from tensorflow.python.util.deprecation import deprecated_endpoints
19from tensorflow.python.util import dispatch as _dispatch
20from tensorflow.python.util.tf_export import tf_export
22from typing import TypeVar
24def assert_cardinality_dataset(input_dataset, cardinality, output_types, output_shapes, name=None):
25 r"""TODO: add doc.
27 Args:
28 input_dataset: A `Tensor` of type `variant`.
29 cardinality: A `Tensor` of type `int64`.
30 output_types: A list of `tf.DTypes` that has length `>= 1`.
31 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
32 name: A name for the operation (optional).
34 Returns:
35 A `Tensor` of type `variant`.
36 """
37 _ctx = _context._context or _context.context()
38 tld = _ctx._thread_local_data
39 if tld.is_eager:
40 try:
41 _result = pywrap_tfe.TFE_Py_FastPathExecute(
42 _ctx, "AssertCardinalityDataset", name, input_dataset, cardinality,
43 "output_types", output_types, "output_shapes", output_shapes)
44 return _result
45 except _core._NotOkStatusException as e:
46 _ops.raise_from_not_ok_status(e, name)
47 except _core._FallbackException:
48 pass
49 try:
50 return assert_cardinality_dataset_eager_fallback(
51 input_dataset, cardinality, output_types=output_types,
52 output_shapes=output_shapes, name=name, ctx=_ctx)
53 except _core._SymbolicException:
54 pass # Add nodes to the TensorFlow graph.
55 # Add nodes to the TensorFlow graph.
56 if not isinstance(output_types, (list, tuple)):
57 raise TypeError(
58 "Expected list for 'output_types' argument to "
59 "'assert_cardinality_dataset' Op, not %r." % output_types)
60 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
61 if not isinstance(output_shapes, (list, tuple)):
62 raise TypeError(
63 "Expected list for 'output_shapes' argument to "
64 "'assert_cardinality_dataset' Op, not %r." % output_shapes)
65 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
66 _, _, _op, _outputs = _op_def_library._apply_op_helper(
67 "AssertCardinalityDataset", input_dataset=input_dataset,
68 cardinality=cardinality,
69 output_types=output_types,
70 output_shapes=output_shapes, name=name)
71 _result = _outputs[:]
72 if _execute.must_record_gradient():
73 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
74 _op.get_attr("output_shapes"))
75 _inputs_flat = _op.inputs
76 _execute.record_gradient(
77 "AssertCardinalityDataset", _inputs_flat, _attrs, _result)
78 _result, = _result
79 return _result
81AssertCardinalityDataset = tf_export("raw_ops.AssertCardinalityDataset")(_ops.to_raw_op(assert_cardinality_dataset))
84def assert_cardinality_dataset_eager_fallback(input_dataset, cardinality, output_types, output_shapes, name, ctx):
85 if not isinstance(output_types, (list, tuple)):
86 raise TypeError(
87 "Expected list for 'output_types' argument to "
88 "'assert_cardinality_dataset' Op, not %r." % output_types)
89 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
90 if not isinstance(output_shapes, (list, tuple)):
91 raise TypeError(
92 "Expected list for 'output_shapes' argument to "
93 "'assert_cardinality_dataset' Op, not %r." % output_shapes)
94 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
95 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
96 cardinality = _ops.convert_to_tensor(cardinality, _dtypes.int64)
97 _inputs_flat = [input_dataset, cardinality]
98 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
99 _result = _execute.execute(b"AssertCardinalityDataset", 1,
100 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
101 name=name)
102 if _execute.must_record_gradient():
103 _execute.record_gradient(
104 "AssertCardinalityDataset", _inputs_flat, _attrs, _result)
105 _result, = _result
106 return _result
109def assert_next_dataset(input_dataset, transformations, output_types, output_shapes, name=None):
110 r"""A transformation that asserts which transformations happen next.
112 This transformation checks whether the camel-case names (i.e. "FlatMap", not
113 "flat_map") of the transformations following this transformation match the list
114 of names in the `transformations` argument. If there is a mismatch, the
115 transformation raises an exception.
117 The check occurs when iterating over the contents of the dataset, which
118 means that the check happens *after* any static optimizations are applied
119 to the dataset graph.
121 Args:
122 input_dataset: A `Tensor` of type `variant`.
123 A variant tensor representing the input dataset.
124 `AssertNextDataset` passes through the outputs of its input dataset.
125 transformations: A `Tensor` of type `string`.
126 A `tf.string` vector `tf.Tensor` identifying the transformations that are
127 expected to happen next.
128 output_types: A list of `tf.DTypes` that has length `>= 1`.
129 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
130 name: A name for the operation (optional).
132 Returns:
133 A `Tensor` of type `variant`.
134 """
135 _ctx = _context._context or _context.context()
136 tld = _ctx._thread_local_data
137 if tld.is_eager:
138 try:
139 _result = pywrap_tfe.TFE_Py_FastPathExecute(
140 _ctx, "AssertNextDataset", name, input_dataset, transformations,
141 "output_types", output_types, "output_shapes", output_shapes)
142 return _result
143 except _core._NotOkStatusException as e:
144 _ops.raise_from_not_ok_status(e, name)
145 except _core._FallbackException:
146 pass
147 try:
148 return assert_next_dataset_eager_fallback(
149 input_dataset, transformations, output_types=output_types,
150 output_shapes=output_shapes, name=name, ctx=_ctx)
151 except _core._SymbolicException:
152 pass # Add nodes to the TensorFlow graph.
153 # Add nodes to the TensorFlow graph.
154 if not isinstance(output_types, (list, tuple)):
155 raise TypeError(
156 "Expected list for 'output_types' argument to "
157 "'assert_next_dataset' Op, not %r." % output_types)
158 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
159 if not isinstance(output_shapes, (list, tuple)):
160 raise TypeError(
161 "Expected list for 'output_shapes' argument to "
162 "'assert_next_dataset' Op, not %r." % output_shapes)
163 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
164 _, _, _op, _outputs = _op_def_library._apply_op_helper(
165 "AssertNextDataset", input_dataset=input_dataset,
166 transformations=transformations,
167 output_types=output_types,
168 output_shapes=output_shapes, name=name)
169 _result = _outputs[:]
170 if _execute.must_record_gradient():
171 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
172 _op.get_attr("output_shapes"))
173 _inputs_flat = _op.inputs
174 _execute.record_gradient(
175 "AssertNextDataset", _inputs_flat, _attrs, _result)
176 _result, = _result
177 return _result
179AssertNextDataset = tf_export("raw_ops.AssertNextDataset")(_ops.to_raw_op(assert_next_dataset))
182def assert_next_dataset_eager_fallback(input_dataset, transformations, output_types, output_shapes, name, ctx):
183 if not isinstance(output_types, (list, tuple)):
184 raise TypeError(
185 "Expected list for 'output_types' argument to "
186 "'assert_next_dataset' Op, not %r." % output_types)
187 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
188 if not isinstance(output_shapes, (list, tuple)):
189 raise TypeError(
190 "Expected list for 'output_shapes' argument to "
191 "'assert_next_dataset' Op, not %r." % output_shapes)
192 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
193 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
194 transformations = _ops.convert_to_tensor(transformations, _dtypes.string)
195 _inputs_flat = [input_dataset, transformations]
196 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
197 _result = _execute.execute(b"AssertNextDataset", 1, inputs=_inputs_flat,
198 attrs=_attrs, ctx=ctx, name=name)
199 if _execute.must_record_gradient():
200 _execute.record_gradient(
201 "AssertNextDataset", _inputs_flat, _attrs, _result)
202 _result, = _result
203 return _result
206def assert_prev_dataset(input_dataset, transformations, output_types, output_shapes, name=None):
207 r"""A transformation that asserts which transformations happened previously.
209 This transformation checks the names and, optionally, the attribute name-value
210 pairs in the `transformations` argument against those of the transformations
211 that preceded this transformation. If there is a mismatch, the transformation
212 raises an exception.
214 The check occurs when iterating over the contents of the dataset, which
215 means that the check happens *after* any static optimizations are applied
216 to the dataset graph.
218 Args:
219 input_dataset: A `Tensor` of type `variant`.
220 A variant tensor representing the input dataset.
221 `AssertPrevDataset` passes through the outputs of its input dataset.
222 transformations: A `Tensor` of type `string`.
223 A `tf.string` vector `tf.Tensor` identifying the transformations, with optional
224 attribute name-value pairs, that are expected to have happened previously.
225 output_types: A list of `tf.DTypes` that has length `>= 1`.
226 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
227 name: A name for the operation (optional).
229 Returns:
230 A `Tensor` of type `variant`.
231 """
232 _ctx = _context._context or _context.context()
233 tld = _ctx._thread_local_data
234 if tld.is_eager:
235 try:
236 _result = pywrap_tfe.TFE_Py_FastPathExecute(
237 _ctx, "AssertPrevDataset", name, input_dataset, transformations,
238 "output_types", output_types, "output_shapes", output_shapes)
239 return _result
240 except _core._NotOkStatusException as e:
241 _ops.raise_from_not_ok_status(e, name)
242 except _core._FallbackException:
243 pass
244 try:
245 return assert_prev_dataset_eager_fallback(
246 input_dataset, transformations, output_types=output_types,
247 output_shapes=output_shapes, name=name, ctx=_ctx)
248 except _core._SymbolicException:
249 pass # Add nodes to the TensorFlow graph.
250 # Add nodes to the TensorFlow graph.
251 if not isinstance(output_types, (list, tuple)):
252 raise TypeError(
253 "Expected list for 'output_types' argument to "
254 "'assert_prev_dataset' Op, not %r." % output_types)
255 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
256 if not isinstance(output_shapes, (list, tuple)):
257 raise TypeError(
258 "Expected list for 'output_shapes' argument to "
259 "'assert_prev_dataset' Op, not %r." % output_shapes)
260 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
261 _, _, _op, _outputs = _op_def_library._apply_op_helper(
262 "AssertPrevDataset", input_dataset=input_dataset,
263 transformations=transformations,
264 output_types=output_types,
265 output_shapes=output_shapes, name=name)
266 _result = _outputs[:]
267 if _execute.must_record_gradient():
268 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
269 _op.get_attr("output_shapes"))
270 _inputs_flat = _op.inputs
271 _execute.record_gradient(
272 "AssertPrevDataset", _inputs_flat, _attrs, _result)
273 _result, = _result
274 return _result
276AssertPrevDataset = tf_export("raw_ops.AssertPrevDataset")(_ops.to_raw_op(assert_prev_dataset))
279def assert_prev_dataset_eager_fallback(input_dataset, transformations, output_types, output_shapes, name, ctx):
280 if not isinstance(output_types, (list, tuple)):
281 raise TypeError(
282 "Expected list for 'output_types' argument to "
283 "'assert_prev_dataset' Op, not %r." % output_types)
284 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
285 if not isinstance(output_shapes, (list, tuple)):
286 raise TypeError(
287 "Expected list for 'output_shapes' argument to "
288 "'assert_prev_dataset' Op, not %r." % output_shapes)
289 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
290 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
291 transformations = _ops.convert_to_tensor(transformations, _dtypes.string)
292 _inputs_flat = [input_dataset, transformations]
293 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
294 _result = _execute.execute(b"AssertPrevDataset", 1, inputs=_inputs_flat,
295 attrs=_attrs, ctx=ctx, name=name)
296 if _execute.must_record_gradient():
297 _execute.record_gradient(
298 "AssertPrevDataset", _inputs_flat, _attrs, _result)
299 _result, = _result
300 return _result
303def auto_shard_dataset(input_dataset, num_workers, index, output_types, output_shapes, auto_shard_policy=0, num_replicas=0, name=None):
304 r"""Creates a dataset that shards the input dataset.
306 Creates a dataset that shards the input dataset by num_workers, returning a
307 sharded dataset for the index-th worker. This attempts to automatically shard
308 a dataset by examining the Dataset graph and inserting a shard op before the
309 inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
311 This dataset will throw a NotFound error if we cannot shard the dataset
312 automatically.
314 Args:
315 input_dataset: A `Tensor` of type `variant`.
316 A variant tensor representing the input dataset.
317 num_workers: A `Tensor` of type `int64`.
318 A scalar representing the number of workers to distribute this dataset across.
319 index: A `Tensor` of type `int64`.
320 A scalar representing the index of the current worker out of num_workers.
321 output_types: A list of `tf.DTypes` that has length `>= 1`.
322 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
323 auto_shard_policy: An optional `int`. Defaults to `0`.
324 num_replicas: An optional `int`. Defaults to `0`.
325 name: A name for the operation (optional).
327 Returns:
328 A `Tensor` of type `variant`.
329 """
330 _ctx = _context._context or _context.context()
331 tld = _ctx._thread_local_data
332 if tld.is_eager:
333 try:
334 _result = pywrap_tfe.TFE_Py_FastPathExecute(
335 _ctx, "AutoShardDataset", name, input_dataset, num_workers, index,
336 "auto_shard_policy", auto_shard_policy, "output_types", output_types,
337 "output_shapes", output_shapes, "num_replicas", num_replicas)
338 return _result
339 except _core._NotOkStatusException as e:
340 _ops.raise_from_not_ok_status(e, name)
341 except _core._FallbackException:
342 pass
343 try:
344 return auto_shard_dataset_eager_fallback(
345 input_dataset, num_workers, index,
346 auto_shard_policy=auto_shard_policy, output_types=output_types,
347 output_shapes=output_shapes, num_replicas=num_replicas, name=name,
348 ctx=_ctx)
349 except _core._SymbolicException:
350 pass # Add nodes to the TensorFlow graph.
351 # Add nodes to the TensorFlow graph.
352 if not isinstance(output_types, (list, tuple)):
353 raise TypeError(
354 "Expected list for 'output_types' argument to "
355 "'auto_shard_dataset' Op, not %r." % output_types)
356 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
357 if not isinstance(output_shapes, (list, tuple)):
358 raise TypeError(
359 "Expected list for 'output_shapes' argument to "
360 "'auto_shard_dataset' Op, not %r." % output_shapes)
361 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
362 if auto_shard_policy is None:
363 auto_shard_policy = 0
364 auto_shard_policy = _execute.make_int(auto_shard_policy, "auto_shard_policy")
365 if num_replicas is None:
366 num_replicas = 0
367 num_replicas = _execute.make_int(num_replicas, "num_replicas")
368 _, _, _op, _outputs = _op_def_library._apply_op_helper(
369 "AutoShardDataset", input_dataset=input_dataset,
370 num_workers=num_workers, index=index,
371 output_types=output_types,
372 output_shapes=output_shapes,
373 auto_shard_policy=auto_shard_policy,
374 num_replicas=num_replicas, name=name)
375 _result = _outputs[:]
376 if _execute.must_record_gradient():
377 _attrs = ("auto_shard_policy", _op._get_attr_int("auto_shard_policy"),
378 "output_types", _op.get_attr("output_types"), "output_shapes",
379 _op.get_attr("output_shapes"), "num_replicas",
380 _op._get_attr_int("num_replicas"))
381 _inputs_flat = _op.inputs
382 _execute.record_gradient(
383 "AutoShardDataset", _inputs_flat, _attrs, _result)
384 _result, = _result
385 return _result
387AutoShardDataset = tf_export("raw_ops.AutoShardDataset")(_ops.to_raw_op(auto_shard_dataset))
390def auto_shard_dataset_eager_fallback(input_dataset, num_workers, index, output_types, output_shapes, auto_shard_policy, num_replicas, name, ctx):
391 if not isinstance(output_types, (list, tuple)):
392 raise TypeError(
393 "Expected list for 'output_types' argument to "
394 "'auto_shard_dataset' Op, not %r." % output_types)
395 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
396 if not isinstance(output_shapes, (list, tuple)):
397 raise TypeError(
398 "Expected list for 'output_shapes' argument to "
399 "'auto_shard_dataset' Op, not %r." % output_shapes)
400 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
401 if auto_shard_policy is None:
402 auto_shard_policy = 0
403 auto_shard_policy = _execute.make_int(auto_shard_policy, "auto_shard_policy")
404 if num_replicas is None:
405 num_replicas = 0
406 num_replicas = _execute.make_int(num_replicas, "num_replicas")
407 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
408 num_workers = _ops.convert_to_tensor(num_workers, _dtypes.int64)
409 index = _ops.convert_to_tensor(index, _dtypes.int64)
410 _inputs_flat = [input_dataset, num_workers, index]
411 _attrs = ("auto_shard_policy", auto_shard_policy, "output_types",
412 output_types, "output_shapes", output_shapes, "num_replicas", num_replicas)
413 _result = _execute.execute(b"AutoShardDataset", 1, inputs=_inputs_flat,
414 attrs=_attrs, ctx=ctx, name=name)
415 if _execute.must_record_gradient():
416 _execute.record_gradient(
417 "AutoShardDataset", _inputs_flat, _attrs, _result)
418 _result, = _result
419 return _result
422def bytes_produced_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
423 r"""Records the bytes size of each element of `input_dataset` in a StatsAggregator.
425 Args:
426 input_dataset: A `Tensor` of type `variant`.
427 tag: A `Tensor` of type `string`.
428 output_types: A list of `tf.DTypes` that has length `>= 1`.
429 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
430 name: A name for the operation (optional).
432 Returns:
433 A `Tensor` of type `variant`.
434 """
435 _ctx = _context._context or _context.context()
436 tld = _ctx._thread_local_data
437 if tld.is_eager:
438 try:
439 _result = pywrap_tfe.TFE_Py_FastPathExecute(
440 _ctx, "BytesProducedStatsDataset", name, input_dataset, tag,
441 "output_types", output_types, "output_shapes", output_shapes)
442 return _result
443 except _core._NotOkStatusException as e:
444 _ops.raise_from_not_ok_status(e, name)
445 except _core._FallbackException:
446 pass
447 try:
448 return bytes_produced_stats_dataset_eager_fallback(
449 input_dataset, tag, output_types=output_types,
450 output_shapes=output_shapes, name=name, ctx=_ctx)
451 except _core._SymbolicException:
452 pass # Add nodes to the TensorFlow graph.
453 # Add nodes to the TensorFlow graph.
454 if not isinstance(output_types, (list, tuple)):
455 raise TypeError(
456 "Expected list for 'output_types' argument to "
457 "'bytes_produced_stats_dataset' Op, not %r." % output_types)
458 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
459 if not isinstance(output_shapes, (list, tuple)):
460 raise TypeError(
461 "Expected list for 'output_shapes' argument to "
462 "'bytes_produced_stats_dataset' Op, not %r." % output_shapes)
463 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
464 _, _, _op, _outputs = _op_def_library._apply_op_helper(
465 "BytesProducedStatsDataset", input_dataset=input_dataset, tag=tag,
466 output_types=output_types,
467 output_shapes=output_shapes, name=name)
468 _result = _outputs[:]
469 if _execute.must_record_gradient():
470 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
471 _op.get_attr("output_shapes"))
472 _inputs_flat = _op.inputs
473 _execute.record_gradient(
474 "BytesProducedStatsDataset", _inputs_flat, _attrs, _result)
475 _result, = _result
476 return _result
478BytesProducedStatsDataset = tf_export("raw_ops.BytesProducedStatsDataset")(_ops.to_raw_op(bytes_produced_stats_dataset))
481def bytes_produced_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name, ctx):
482 if not isinstance(output_types, (list, tuple)):
483 raise TypeError(
484 "Expected list for 'output_types' argument to "
485 "'bytes_produced_stats_dataset' Op, not %r." % output_types)
486 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
487 if not isinstance(output_shapes, (list, tuple)):
488 raise TypeError(
489 "Expected list for 'output_shapes' argument to "
490 "'bytes_produced_stats_dataset' Op, not %r." % output_shapes)
491 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
492 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
493 tag = _ops.convert_to_tensor(tag, _dtypes.string)
494 _inputs_flat = [input_dataset, tag]
495 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
496 _result = _execute.execute(b"BytesProducedStatsDataset", 1,
497 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
498 name=name)
499 if _execute.must_record_gradient():
500 _execute.record_gradient(
501 "BytesProducedStatsDataset", _inputs_flat, _attrs, _result)
502 _result, = _result
503 return _result
506def csv_dataset(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name=None):
507 r"""TODO: add doc.
509 Args:
510 filenames: A `Tensor` of type `string`.
511 compression_type: A `Tensor` of type `string`.
512 buffer_size: A `Tensor` of type `int64`.
513 header: A `Tensor` of type `bool`.
514 field_delim: A `Tensor` of type `string`.
515 use_quote_delim: A `Tensor` of type `bool`.
516 na_value: A `Tensor` of type `string`.
517 select_cols: A `Tensor` of type `int64`.
518 record_defaults: A list of `Tensor` objects with types from: `float32`, `float64`, `int32`, `int64`, `string`.
519 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
520 name: A name for the operation (optional).
522 Returns:
523 A `Tensor` of type `variant`.
524 """
525 _ctx = _context._context or _context.context()
526 tld = _ctx._thread_local_data
527 if tld.is_eager:
528 try:
529 _result = pywrap_tfe.TFE_Py_FastPathExecute(
530 _ctx, "CSVDataset", name, filenames, compression_type, buffer_size,
531 header, field_delim, use_quote_delim, na_value, select_cols,
532 record_defaults, "output_shapes", output_shapes)
533 return _result
534 except _core._NotOkStatusException as e:
535 _ops.raise_from_not_ok_status(e, name)
536 except _core._FallbackException:
537 pass
538 try:
539 return csv_dataset_eager_fallback(
540 filenames, compression_type, buffer_size, header, field_delim,
541 use_quote_delim, na_value, select_cols, record_defaults,
542 output_shapes=output_shapes, name=name, ctx=_ctx)
543 except _core._SymbolicException:
544 pass # Add nodes to the TensorFlow graph.
545 # Add nodes to the TensorFlow graph.
546 if not isinstance(output_shapes, (list, tuple)):
547 raise TypeError(
548 "Expected list for 'output_shapes' argument to "
549 "'csv_dataset' Op, not %r." % output_shapes)
550 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
551 _, _, _op, _outputs = _op_def_library._apply_op_helper(
552 "CSVDataset", filenames=filenames, compression_type=compression_type,
553 buffer_size=buffer_size, header=header,
554 field_delim=field_delim,
555 use_quote_delim=use_quote_delim, na_value=na_value,
556 select_cols=select_cols,
557 record_defaults=record_defaults,
558 output_shapes=output_shapes, name=name)
559 _result = _outputs[:]
560 if _execute.must_record_gradient():
561 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
562 _op.get_attr("output_shapes"))
563 _inputs_flat = _op.inputs
564 _execute.record_gradient(
565 "CSVDataset", _inputs_flat, _attrs, _result)
566 _result, = _result
567 return _result
569CSVDataset = tf_export("raw_ops.CSVDataset")(_ops.to_raw_op(csv_dataset))
572def csv_dataset_eager_fallback(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name, ctx):
573 if not isinstance(output_shapes, (list, tuple)):
574 raise TypeError(
575 "Expected list for 'output_shapes' argument to "
576 "'csv_dataset' Op, not %r." % output_shapes)
577 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
578 _attr_output_types, record_defaults = _execute.convert_to_mixed_eager_tensors(record_defaults, ctx)
579 filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
580 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
581 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
582 header = _ops.convert_to_tensor(header, _dtypes.bool)
583 field_delim = _ops.convert_to_tensor(field_delim, _dtypes.string)
584 use_quote_delim = _ops.convert_to_tensor(use_quote_delim, _dtypes.bool)
585 na_value = _ops.convert_to_tensor(na_value, _dtypes.string)
586 select_cols = _ops.convert_to_tensor(select_cols, _dtypes.int64)
587 _inputs_flat = [filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols] + list(record_defaults)
588 _attrs = ("output_types", _attr_output_types, "output_shapes",
589 output_shapes)
590 _result = _execute.execute(b"CSVDataset", 1, inputs=_inputs_flat,
591 attrs=_attrs, ctx=ctx, name=name)
592 if _execute.must_record_gradient():
593 _execute.record_gradient(
594 "CSVDataset", _inputs_flat, _attrs, _result)
595 _result, = _result
596 return _result
599def csv_dataset_v2(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, exclude_cols, output_shapes, name=None):
600 r"""TODO: add doc.
602 Args:
603 filenames: A `Tensor` of type `string`.
604 compression_type: A `Tensor` of type `string`.
605 buffer_size: A `Tensor` of type `int64`.
606 header: A `Tensor` of type `bool`.
607 field_delim: A `Tensor` of type `string`.
608 use_quote_delim: A `Tensor` of type `bool`.
609 na_value: A `Tensor` of type `string`.
610 select_cols: A `Tensor` of type `int64`.
611 record_defaults: A list of `Tensor` objects with types from: `float32`, `float64`, `int32`, `int64`, `string`.
612 exclude_cols: A `Tensor` of type `int64`.
613 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
614 name: A name for the operation (optional).
616 Returns:
617 A `Tensor` of type `variant`.
618 """
619 _ctx = _context._context or _context.context()
620 tld = _ctx._thread_local_data
621 if tld.is_eager:
622 try:
623 _result = pywrap_tfe.TFE_Py_FastPathExecute(
624 _ctx, "CSVDatasetV2", name, filenames, compression_type, buffer_size,
625 header, field_delim, use_quote_delim, na_value, select_cols,
626 record_defaults, exclude_cols, "output_shapes", output_shapes)
627 return _result
628 except _core._NotOkStatusException as e:
629 _ops.raise_from_not_ok_status(e, name)
630 except _core._FallbackException:
631 pass
632 try:
633 return csv_dataset_v2_eager_fallback(
634 filenames, compression_type, buffer_size, header, field_delim,
635 use_quote_delim, na_value, select_cols, record_defaults,
636 exclude_cols, output_shapes=output_shapes, name=name, ctx=_ctx)
637 except _core._SymbolicException:
638 pass # Add nodes to the TensorFlow graph.
639 # Add nodes to the TensorFlow graph.
640 if not isinstance(output_shapes, (list, tuple)):
641 raise TypeError(
642 "Expected list for 'output_shapes' argument to "
643 "'csv_dataset_v2' Op, not %r." % output_shapes)
644 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
645 _, _, _op, _outputs = _op_def_library._apply_op_helper(
646 "CSVDatasetV2", filenames=filenames,
647 compression_type=compression_type,
648 buffer_size=buffer_size, header=header,
649 field_delim=field_delim,
650 use_quote_delim=use_quote_delim, na_value=na_value,
651 select_cols=select_cols,
652 record_defaults=record_defaults,
653 exclude_cols=exclude_cols,
654 output_shapes=output_shapes, name=name)
655 _result = _outputs[:]
656 if _execute.must_record_gradient():
657 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
658 _op.get_attr("output_shapes"))
659 _inputs_flat = _op.inputs
660 _execute.record_gradient(
661 "CSVDatasetV2", _inputs_flat, _attrs, _result)
662 _result, = _result
663 return _result
665CSVDatasetV2 = tf_export("raw_ops.CSVDatasetV2")(_ops.to_raw_op(csv_dataset_v2))
668def csv_dataset_v2_eager_fallback(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, exclude_cols, output_shapes, name, ctx):
669 if not isinstance(output_shapes, (list, tuple)):
670 raise TypeError(
671 "Expected list for 'output_shapes' argument to "
672 "'csv_dataset_v2' Op, not %r." % output_shapes)
673 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
674 _attr_output_types, record_defaults = _execute.convert_to_mixed_eager_tensors(record_defaults, ctx)
675 filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
676 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
677 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
678 header = _ops.convert_to_tensor(header, _dtypes.bool)
679 field_delim = _ops.convert_to_tensor(field_delim, _dtypes.string)
680 use_quote_delim = _ops.convert_to_tensor(use_quote_delim, _dtypes.bool)
681 na_value = _ops.convert_to_tensor(na_value, _dtypes.string)
682 select_cols = _ops.convert_to_tensor(select_cols, _dtypes.int64)
683 exclude_cols = _ops.convert_to_tensor(exclude_cols, _dtypes.int64)
684 _inputs_flat = [filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols] + list(record_defaults) + [exclude_cols]
685 _attrs = ("output_types", _attr_output_types, "output_shapes",
686 output_shapes)
687 _result = _execute.execute(b"CSVDatasetV2", 1, inputs=_inputs_flat,
688 attrs=_attrs, ctx=ctx, name=name)
689 if _execute.must_record_gradient():
690 _execute.record_gradient(
691 "CSVDatasetV2", _inputs_flat, _attrs, _result)
692 _result, = _result
693 return _result
696def choose_fastest_branch_dataset(input_dataset, ratio_numerator, ratio_denominator, other_arguments, num_elements_per_branch, branches, other_arguments_lengths, output_types, output_shapes, name=None):
697 r"""TODO: add doc.
699 Args:
700 input_dataset: A `Tensor` of type `variant`.
701 ratio_numerator: A `Tensor` of type `int64`.
702 ratio_denominator: A `Tensor` of type `int64`.
703 other_arguments: A list of `Tensor` objects.
704 num_elements_per_branch: An `int` that is `>= 1`.
705 branches: A list of functions decorated with @Defun that has length `>= 1`.
706 other_arguments_lengths: A list of `ints` that has length `>= 1`.
707 output_types: A list of `tf.DTypes` that has length `>= 1`.
708 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
709 name: A name for the operation (optional).
711 Returns:
712 A `Tensor` of type `variant`.
713 """
714 _ctx = _context._context or _context.context()
715 tld = _ctx._thread_local_data
716 if tld.is_eager:
717 try:
718 _result = pywrap_tfe.TFE_Py_FastPathExecute(
719 _ctx, "ChooseFastestBranchDataset", name, input_dataset,
720 ratio_numerator, ratio_denominator, other_arguments,
721 "num_elements_per_branch", num_elements_per_branch, "branches",
722 branches, "other_arguments_lengths", other_arguments_lengths,
723 "output_types", output_types, "output_shapes", output_shapes)
724 return _result
725 except _core._NotOkStatusException as e:
726 _ops.raise_from_not_ok_status(e, name)
727 except _core._FallbackException:
728 pass
729 try:
730 return choose_fastest_branch_dataset_eager_fallback(
731 input_dataset, ratio_numerator, ratio_denominator, other_arguments,
732 num_elements_per_branch=num_elements_per_branch, branches=branches,
733 other_arguments_lengths=other_arguments_lengths,
734 output_types=output_types, output_shapes=output_shapes, name=name,
735 ctx=_ctx)
736 except _core._SymbolicException:
737 pass # Add nodes to the TensorFlow graph.
738 # Add nodes to the TensorFlow graph.
739 num_elements_per_branch = _execute.make_int(num_elements_per_branch, "num_elements_per_branch")
740 if not isinstance(branches, (list, tuple)):
741 raise TypeError(
742 "Expected list for 'branches' argument to "
743 "'choose_fastest_branch_dataset' Op, not %r." % branches)
744 if not isinstance(other_arguments_lengths, (list, tuple)):
745 raise TypeError(
746 "Expected list for 'other_arguments_lengths' argument to "
747 "'choose_fastest_branch_dataset' Op, not %r." % other_arguments_lengths)
748 other_arguments_lengths = [_execute.make_int(_i, "other_arguments_lengths") for _i in other_arguments_lengths]
749 if not isinstance(output_types, (list, tuple)):
750 raise TypeError(
751 "Expected list for 'output_types' argument to "
752 "'choose_fastest_branch_dataset' Op, not %r." % output_types)
753 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
754 if not isinstance(output_shapes, (list, tuple)):
755 raise TypeError(
756 "Expected list for 'output_shapes' argument to "
757 "'choose_fastest_branch_dataset' Op, not %r." % output_shapes)
758 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
759 _, _, _op, _outputs = _op_def_library._apply_op_helper(
760 "ChooseFastestBranchDataset", input_dataset=input_dataset,
761 ratio_numerator=ratio_numerator,
762 ratio_denominator=ratio_denominator,
763 other_arguments=other_arguments,
764 num_elements_per_branch=num_elements_per_branch,
765 branches=branches,
766 other_arguments_lengths=other_arguments_lengths,
767 output_types=output_types,
768 output_shapes=output_shapes, name=name)
769 _result = _outputs[:]
770 if _execute.must_record_gradient():
771 _attrs = ("Targuments", _op.get_attr("Targuments"),
772 "num_elements_per_branch",
773 _op._get_attr_int("num_elements_per_branch"), "branches",
774 _op.get_attr("branches"), "other_arguments_lengths",
775 _op.get_attr("other_arguments_lengths"), "output_types",
776 _op.get_attr("output_types"), "output_shapes",
777 _op.get_attr("output_shapes"))
778 _inputs_flat = _op.inputs
779 _execute.record_gradient(
780 "ChooseFastestBranchDataset", _inputs_flat, _attrs, _result)
781 _result, = _result
782 return _result
784ChooseFastestBranchDataset = tf_export("raw_ops.ChooseFastestBranchDataset")(_ops.to_raw_op(choose_fastest_branch_dataset))
787def choose_fastest_branch_dataset_eager_fallback(input_dataset, ratio_numerator, ratio_denominator, other_arguments, num_elements_per_branch, branches, other_arguments_lengths, output_types, output_shapes, name, ctx):
788 num_elements_per_branch = _execute.make_int(num_elements_per_branch, "num_elements_per_branch")
789 if not isinstance(branches, (list, tuple)):
790 raise TypeError(
791 "Expected list for 'branches' argument to "
792 "'choose_fastest_branch_dataset' Op, not %r." % branches)
793 if not isinstance(other_arguments_lengths, (list, tuple)):
794 raise TypeError(
795 "Expected list for 'other_arguments_lengths' argument to "
796 "'choose_fastest_branch_dataset' Op, not %r." % other_arguments_lengths)
797 other_arguments_lengths = [_execute.make_int(_i, "other_arguments_lengths") for _i in other_arguments_lengths]
798 if not isinstance(output_types, (list, tuple)):
799 raise TypeError(
800 "Expected list for 'output_types' argument to "
801 "'choose_fastest_branch_dataset' Op, not %r." % output_types)
802 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
803 if not isinstance(output_shapes, (list, tuple)):
804 raise TypeError(
805 "Expected list for 'output_shapes' argument to "
806 "'choose_fastest_branch_dataset' Op, not %r." % output_shapes)
807 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
808 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
809 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
810 ratio_numerator = _ops.convert_to_tensor(ratio_numerator, _dtypes.int64)
811 ratio_denominator = _ops.convert_to_tensor(ratio_denominator, _dtypes.int64)
812 _inputs_flat = [input_dataset, ratio_numerator, ratio_denominator] + list(other_arguments)
813 _attrs = ("Targuments", _attr_Targuments, "num_elements_per_branch",
814 num_elements_per_branch, "branches", branches, "other_arguments_lengths",
815 other_arguments_lengths, "output_types", output_types, "output_shapes",
816 output_shapes)
817 _result = _execute.execute(b"ChooseFastestBranchDataset", 1,
818 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
819 name=name)
820 if _execute.must_record_gradient():
821 _execute.record_gradient(
822 "ChooseFastestBranchDataset", _inputs_flat, _attrs, _result)
823 _result, = _result
824 return _result
827def choose_fastest_dataset(input_datasets, num_experiments, output_types, output_shapes, name=None):
828 r"""TODO: add doc.
830 Args:
831 input_datasets: A list of at least 2 `Tensor` objects with type `variant`.
832 num_experiments: An `int`.
833 output_types: A list of `tf.DTypes` that has length `>= 1`.
834 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
835 name: A name for the operation (optional).
837 Returns:
838 A `Tensor` of type `variant`.
839 """
840 _ctx = _context._context or _context.context()
841 tld = _ctx._thread_local_data
842 if tld.is_eager:
843 try:
844 _result = pywrap_tfe.TFE_Py_FastPathExecute(
845 _ctx, "ChooseFastestDataset", name, input_datasets, "num_experiments",
846 num_experiments, "output_types", output_types, "output_shapes",
847 output_shapes)
848 return _result
849 except _core._NotOkStatusException as e:
850 _ops.raise_from_not_ok_status(e, name)
851 except _core._FallbackException:
852 pass
853 try:
854 return choose_fastest_dataset_eager_fallback(
855 input_datasets, num_experiments=num_experiments,
856 output_types=output_types, output_shapes=output_shapes, name=name,
857 ctx=_ctx)
858 except _core._SymbolicException:
859 pass # Add nodes to the TensorFlow graph.
860 # Add nodes to the TensorFlow graph.
861 if not isinstance(input_datasets, (list, tuple)):
862 raise TypeError(
863 "Expected list for 'input_datasets' argument to "
864 "'choose_fastest_dataset' Op, not %r." % input_datasets)
865 _attr_N = len(input_datasets)
866 num_experiments = _execute.make_int(num_experiments, "num_experiments")
867 if not isinstance(output_types, (list, tuple)):
868 raise TypeError(
869 "Expected list for 'output_types' argument to "
870 "'choose_fastest_dataset' Op, not %r." % output_types)
871 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
872 if not isinstance(output_shapes, (list, tuple)):
873 raise TypeError(
874 "Expected list for 'output_shapes' argument to "
875 "'choose_fastest_dataset' Op, not %r." % output_shapes)
876 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
877 _, _, _op, _outputs = _op_def_library._apply_op_helper(
878 "ChooseFastestDataset", input_datasets=input_datasets,
879 num_experiments=num_experiments,
880 output_types=output_types,
881 output_shapes=output_shapes, name=name)
882 _result = _outputs[:]
883 if _execute.must_record_gradient():
884 _attrs = ("N", _op._get_attr_int("N"), "num_experiments",
885 _op._get_attr_int("num_experiments"), "output_types",
886 _op.get_attr("output_types"), "output_shapes",
887 _op.get_attr("output_shapes"))
888 _inputs_flat = _op.inputs
889 _execute.record_gradient(
890 "ChooseFastestDataset", _inputs_flat, _attrs, _result)
891 _result, = _result
892 return _result
894ChooseFastestDataset = tf_export("raw_ops.ChooseFastestDataset")(_ops.to_raw_op(choose_fastest_dataset))
897def choose_fastest_dataset_eager_fallback(input_datasets, num_experiments, output_types, output_shapes, name, ctx):
898 if not isinstance(input_datasets, (list, tuple)):
899 raise TypeError(
900 "Expected list for 'input_datasets' argument to "
901 "'choose_fastest_dataset' Op, not %r." % input_datasets)
902 _attr_N = len(input_datasets)
903 num_experiments = _execute.make_int(num_experiments, "num_experiments")
904 if not isinstance(output_types, (list, tuple)):
905 raise TypeError(
906 "Expected list for 'output_types' argument to "
907 "'choose_fastest_dataset' Op, not %r." % output_types)
908 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
909 if not isinstance(output_shapes, (list, tuple)):
910 raise TypeError(
911 "Expected list for 'output_shapes' argument to "
912 "'choose_fastest_dataset' Op, not %r." % output_shapes)
913 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
914 input_datasets = _ops.convert_n_to_tensor(input_datasets, _dtypes.variant)
915 _inputs_flat = list(input_datasets)
916 _attrs = ("N", _attr_N, "num_experiments", num_experiments, "output_types",
917 output_types, "output_shapes", output_shapes)
918 _result = _execute.execute(b"ChooseFastestDataset", 1, inputs=_inputs_flat,
919 attrs=_attrs, ctx=ctx, name=name)
920 if _execute.must_record_gradient():
921 _execute.record_gradient(
922 "ChooseFastestDataset", _inputs_flat, _attrs, _result)
923 _result, = _result
924 return _result
927def compress_element(components, name=None):
928 r"""Compresses a dataset element.
930 Args:
931 components: A list of `Tensor` objects.
932 name: A name for the operation (optional).
934 Returns:
935 A `Tensor` of type `variant`.
936 """
937 _ctx = _context._context or _context.context()
938 tld = _ctx._thread_local_data
939 if tld.is_eager:
940 try:
941 _result = pywrap_tfe.TFE_Py_FastPathExecute(
942 _ctx, "CompressElement", name, components)
943 return _result
944 except _core._NotOkStatusException as e:
945 _ops.raise_from_not_ok_status(e, name)
946 except _core._FallbackException:
947 pass
948 try:
949 return compress_element_eager_fallback(
950 components, name=name, ctx=_ctx)
951 except _core._SymbolicException:
952 pass # Add nodes to the TensorFlow graph.
953 # Add nodes to the TensorFlow graph.
954 _, _, _op, _outputs = _op_def_library._apply_op_helper(
955 "CompressElement", components=components, name=name)
956 _result = _outputs[:]
957 if _execute.must_record_gradient():
958 _attrs = ("input_types", _op.get_attr("input_types"))
959 _inputs_flat = _op.inputs
960 _execute.record_gradient(
961 "CompressElement", _inputs_flat, _attrs, _result)
962 _result, = _result
963 return _result
965CompressElement = tf_export("raw_ops.CompressElement")(_ops.to_raw_op(compress_element))
968def compress_element_eager_fallback(components, name, ctx):
969 _attr_input_types, components = _execute.convert_to_mixed_eager_tensors(components, ctx)
970 _inputs_flat = list(components)
971 _attrs = ("input_types", _attr_input_types)
972 _result = _execute.execute(b"CompressElement", 1, inputs=_inputs_flat,
973 attrs=_attrs, ctx=ctx, name=name)
974 if _execute.must_record_gradient():
975 _execute.record_gradient(
976 "CompressElement", _inputs_flat, _attrs, _result)
977 _result, = _result
978 return _result
981def compute_batch_size(input_dataset, name=None):
982 r"""Computes the static batch size of a dataset sans partial batches.
984 Args:
985 input_dataset: A `Tensor` of type `variant`.
986 name: A name for the operation (optional).
988 Returns:
989 A `Tensor` of type `int64`.
990 """
991 _ctx = _context._context or _context.context()
992 tld = _ctx._thread_local_data
993 if tld.is_eager:
994 try:
995 _result = pywrap_tfe.TFE_Py_FastPathExecute(
996 _ctx, "ComputeBatchSize", name, input_dataset)
997 return _result
998 except _core._NotOkStatusException as e:
999 _ops.raise_from_not_ok_status(e, name)
1000 except _core._FallbackException:
1001 pass
1002 try:
1003 return compute_batch_size_eager_fallback(
1004 input_dataset, name=name, ctx=_ctx)
1005 except _core._SymbolicException:
1006 pass # Add nodes to the TensorFlow graph.
1007 # Add nodes to the TensorFlow graph.
1008 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1009 "ComputeBatchSize", input_dataset=input_dataset, name=name)
1010 _result = _outputs[:]
1011 if _execute.must_record_gradient():
1012 _attrs = ()
1013 _inputs_flat = _op.inputs
1014 _execute.record_gradient(
1015 "ComputeBatchSize", _inputs_flat, _attrs, _result)
1016 _result, = _result
1017 return _result
1019ComputeBatchSize = tf_export("raw_ops.ComputeBatchSize")(_ops.to_raw_op(compute_batch_size))
1022def compute_batch_size_eager_fallback(input_dataset, name, ctx):
1023 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1024 _inputs_flat = [input_dataset]
1025 _attrs = None
1026 _result = _execute.execute(b"ComputeBatchSize", 1, inputs=_inputs_flat,
1027 attrs=_attrs, ctx=ctx, name=name)
1028 if _execute.must_record_gradient():
1029 _execute.record_gradient(
1030 "ComputeBatchSize", _inputs_flat, _attrs, _result)
1031 _result, = _result
1032 return _result
1035def data_service_dataset(dataset_id, processing_mode, address, protocol, job_name, max_outstanding_requests, iteration_counter, output_types, output_shapes, task_refresh_interval_hint_ms=-1, data_transfer_protocol="", target_workers="AUTO", cross_trainer_cache_options="", name=None):
1036 r"""Creates a dataset that reads data from the tf.data service.
1038 Args:
1039 dataset_id: A `Tensor` of type `int64`.
1040 processing_mode: A `Tensor` of type `string`.
1041 address: A `Tensor` of type `string`.
1042 protocol: A `Tensor` of type `string`.
1043 job_name: A `Tensor` of type `string`.
1044 max_outstanding_requests: A `Tensor` of type `int64`.
1045 iteration_counter: A `Tensor` of type `resource`.
1046 output_types: A list of `tf.DTypes` that has length `>= 1`.
1047 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1048 task_refresh_interval_hint_ms: An optional `int`. Defaults to `-1`.
1049 data_transfer_protocol: An optional `string`. Defaults to `""`.
1050 target_workers: An optional `string`. Defaults to `"AUTO"`.
1051 cross_trainer_cache_options: An optional `string`. Defaults to `""`.
1052 name: A name for the operation (optional).
1054 Returns:
1055 A `Tensor` of type `variant`.
1056 """
1057 _ctx = _context._context or _context.context()
1058 tld = _ctx._thread_local_data
1059 if tld.is_eager:
1060 try:
1061 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1062 _ctx, "DataServiceDataset", name, dataset_id, processing_mode,
1063 address, protocol, job_name, max_outstanding_requests,
1064 iteration_counter, "task_refresh_interval_hint_ms",
1065 task_refresh_interval_hint_ms, "output_types", output_types,
1066 "output_shapes", output_shapes, "data_transfer_protocol",
1067 data_transfer_protocol, "target_workers", target_workers,
1068 "cross_trainer_cache_options", cross_trainer_cache_options)
1069 return _result
1070 except _core._NotOkStatusException as e:
1071 _ops.raise_from_not_ok_status(e, name)
1072 except _core._FallbackException:
1073 pass
1074 try:
1075 return data_service_dataset_eager_fallback(
1076 dataset_id, processing_mode, address, protocol, job_name,
1077 max_outstanding_requests, iteration_counter,
1078 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1079 output_types=output_types, output_shapes=output_shapes,
1080 data_transfer_protocol=data_transfer_protocol,
1081 target_workers=target_workers,
1082 cross_trainer_cache_options=cross_trainer_cache_options, name=name,
1083 ctx=_ctx)
1084 except _core._SymbolicException:
1085 pass # Add nodes to the TensorFlow graph.
1086 # Add nodes to the TensorFlow graph.
1087 if not isinstance(output_types, (list, tuple)):
1088 raise TypeError(
1089 "Expected list for 'output_types' argument to "
1090 "'data_service_dataset' Op, not %r." % output_types)
1091 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1092 if not isinstance(output_shapes, (list, tuple)):
1093 raise TypeError(
1094 "Expected list for 'output_shapes' argument to "
1095 "'data_service_dataset' Op, not %r." % output_shapes)
1096 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1097 if task_refresh_interval_hint_ms is None:
1098 task_refresh_interval_hint_ms = -1
1099 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1100 if data_transfer_protocol is None:
1101 data_transfer_protocol = ""
1102 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1103 if target_workers is None:
1104 target_workers = "AUTO"
1105 target_workers = _execute.make_str(target_workers, "target_workers")
1106 if cross_trainer_cache_options is None:
1107 cross_trainer_cache_options = ""
1108 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1109 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1110 "DataServiceDataset", dataset_id=dataset_id,
1111 processing_mode=processing_mode,
1112 address=address, protocol=protocol,
1113 job_name=job_name,
1114 max_outstanding_requests=max_outstanding_requests,
1115 iteration_counter=iteration_counter,
1116 output_types=output_types,
1117 output_shapes=output_shapes,
1118 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1119 data_transfer_protocol=data_transfer_protocol,
1120 target_workers=target_workers,
1121 cross_trainer_cache_options=cross_trainer_cache_options,
1122 name=name)
1123 _result = _outputs[:]
1124 if _execute.must_record_gradient():
1125 _attrs = ("task_refresh_interval_hint_ms",
1126 _op._get_attr_int("task_refresh_interval_hint_ms"),
1127 "output_types", _op.get_attr("output_types"), "output_shapes",
1128 _op.get_attr("output_shapes"), "data_transfer_protocol",
1129 _op.get_attr("data_transfer_protocol"), "target_workers",
1130 _op.get_attr("target_workers"), "cross_trainer_cache_options",
1131 _op.get_attr("cross_trainer_cache_options"))
1132 _inputs_flat = _op.inputs
1133 _execute.record_gradient(
1134 "DataServiceDataset", _inputs_flat, _attrs, _result)
1135 _result, = _result
1136 return _result
1138DataServiceDataset = tf_export("raw_ops.DataServiceDataset")(_ops.to_raw_op(data_service_dataset))
1141def data_service_dataset_eager_fallback(dataset_id, processing_mode, address, protocol, job_name, max_outstanding_requests, iteration_counter, output_types, output_shapes, task_refresh_interval_hint_ms, data_transfer_protocol, target_workers, cross_trainer_cache_options, name, ctx):
1142 if not isinstance(output_types, (list, tuple)):
1143 raise TypeError(
1144 "Expected list for 'output_types' argument to "
1145 "'data_service_dataset' Op, not %r." % output_types)
1146 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1147 if not isinstance(output_shapes, (list, tuple)):
1148 raise TypeError(
1149 "Expected list for 'output_shapes' argument to "
1150 "'data_service_dataset' Op, not %r." % output_shapes)
1151 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1152 if task_refresh_interval_hint_ms is None:
1153 task_refresh_interval_hint_ms = -1
1154 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1155 if data_transfer_protocol is None:
1156 data_transfer_protocol = ""
1157 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1158 if target_workers is None:
1159 target_workers = "AUTO"
1160 target_workers = _execute.make_str(target_workers, "target_workers")
1161 if cross_trainer_cache_options is None:
1162 cross_trainer_cache_options = ""
1163 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1164 dataset_id = _ops.convert_to_tensor(dataset_id, _dtypes.int64)
1165 processing_mode = _ops.convert_to_tensor(processing_mode, _dtypes.string)
1166 address = _ops.convert_to_tensor(address, _dtypes.string)
1167 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
1168 job_name = _ops.convert_to_tensor(job_name, _dtypes.string)
1169 max_outstanding_requests = _ops.convert_to_tensor(max_outstanding_requests, _dtypes.int64)
1170 iteration_counter = _ops.convert_to_tensor(iteration_counter, _dtypes.resource)
1171 _inputs_flat = [dataset_id, processing_mode, address, protocol, job_name, max_outstanding_requests, iteration_counter]
1172 _attrs = ("task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1173 "output_types", output_types, "output_shapes", output_shapes,
1174 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1175 target_workers, "cross_trainer_cache_options", cross_trainer_cache_options)
1176 _result = _execute.execute(b"DataServiceDataset", 1, inputs=_inputs_flat,
1177 attrs=_attrs, ctx=ctx, name=name)
1178 if _execute.must_record_gradient():
1179 _execute.record_gradient(
1180 "DataServiceDataset", _inputs_flat, _attrs, _result)
1181 _result, = _result
1182 return _result
1185def data_service_dataset_v2(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, task_refresh_interval_hint_ms=-1, data_transfer_protocol="", target_workers="AUTO", cross_trainer_cache_options="", name=None):
1186 r"""Creates a dataset that reads data from the tf.data service.
1188 Args:
1189 dataset_id: A `Tensor` of type `int64`.
1190 processing_mode: A `Tensor` of type `string`.
1191 address: A `Tensor` of type `string`.
1192 protocol: A `Tensor` of type `string`.
1193 job_name: A `Tensor` of type `string`.
1194 consumer_index: A `Tensor` of type `int64`.
1195 num_consumers: A `Tensor` of type `int64`.
1196 max_outstanding_requests: A `Tensor` of type `int64`.
1197 iteration_counter: A `Tensor` of type `resource`.
1198 output_types: A list of `tf.DTypes` that has length `>= 1`.
1199 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1200 task_refresh_interval_hint_ms: An optional `int`. Defaults to `-1`.
1201 data_transfer_protocol: An optional `string`. Defaults to `""`.
1202 target_workers: An optional `string`. Defaults to `"AUTO"`.
1203 cross_trainer_cache_options: An optional `string`. Defaults to `""`.
1204 name: A name for the operation (optional).
1206 Returns:
1207 A `Tensor` of type `variant`.
1208 """
1209 _ctx = _context._context or _context.context()
1210 tld = _ctx._thread_local_data
1211 if tld.is_eager:
1212 try:
1213 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1214 _ctx, "DataServiceDatasetV2", name, dataset_id, processing_mode,
1215 address, protocol, job_name, consumer_index, num_consumers,
1216 max_outstanding_requests, iteration_counter,
1217 "task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1218 "output_types", output_types, "output_shapes", output_shapes,
1219 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1220 target_workers, "cross_trainer_cache_options",
1221 cross_trainer_cache_options)
1222 return _result
1223 except _core._NotOkStatusException as e:
1224 _ops.raise_from_not_ok_status(e, name)
1225 except _core._FallbackException:
1226 pass
1227 try:
1228 return data_service_dataset_v2_eager_fallback(
1229 dataset_id, processing_mode, address, protocol, job_name,
1230 consumer_index, num_consumers, max_outstanding_requests,
1231 iteration_counter,
1232 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1233 output_types=output_types, output_shapes=output_shapes,
1234 data_transfer_protocol=data_transfer_protocol,
1235 target_workers=target_workers,
1236 cross_trainer_cache_options=cross_trainer_cache_options, name=name,
1237 ctx=_ctx)
1238 except _core._SymbolicException:
1239 pass # Add nodes to the TensorFlow graph.
1240 # Add nodes to the TensorFlow graph.
1241 if not isinstance(output_types, (list, tuple)):
1242 raise TypeError(
1243 "Expected list for 'output_types' argument to "
1244 "'data_service_dataset_v2' Op, not %r." % output_types)
1245 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1246 if not isinstance(output_shapes, (list, tuple)):
1247 raise TypeError(
1248 "Expected list for 'output_shapes' argument to "
1249 "'data_service_dataset_v2' Op, not %r." % output_shapes)
1250 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1251 if task_refresh_interval_hint_ms is None:
1252 task_refresh_interval_hint_ms = -1
1253 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1254 if data_transfer_protocol is None:
1255 data_transfer_protocol = ""
1256 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1257 if target_workers is None:
1258 target_workers = "AUTO"
1259 target_workers = _execute.make_str(target_workers, "target_workers")
1260 if cross_trainer_cache_options is None:
1261 cross_trainer_cache_options = ""
1262 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1263 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1264 "DataServiceDatasetV2", dataset_id=dataset_id,
1265 processing_mode=processing_mode,
1266 address=address, protocol=protocol,
1267 job_name=job_name,
1268 consumer_index=consumer_index,
1269 num_consumers=num_consumers,
1270 max_outstanding_requests=max_outstanding_requests,
1271 iteration_counter=iteration_counter,
1272 output_types=output_types,
1273 output_shapes=output_shapes,
1274 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1275 data_transfer_protocol=data_transfer_protocol,
1276 target_workers=target_workers,
1277 cross_trainer_cache_options=cross_trainer_cache_options,
1278 name=name)
1279 _result = _outputs[:]
1280 if _execute.must_record_gradient():
1281 _attrs = ("task_refresh_interval_hint_ms",
1282 _op._get_attr_int("task_refresh_interval_hint_ms"),
1283 "output_types", _op.get_attr("output_types"), "output_shapes",
1284 _op.get_attr("output_shapes"), "data_transfer_protocol",
1285 _op.get_attr("data_transfer_protocol"), "target_workers",
1286 _op.get_attr("target_workers"), "cross_trainer_cache_options",
1287 _op.get_attr("cross_trainer_cache_options"))
1288 _inputs_flat = _op.inputs
1289 _execute.record_gradient(
1290 "DataServiceDatasetV2", _inputs_flat, _attrs, _result)
1291 _result, = _result
1292 return _result
1294DataServiceDatasetV2 = tf_export("raw_ops.DataServiceDatasetV2")(_ops.to_raw_op(data_service_dataset_v2))
1297def data_service_dataset_v2_eager_fallback(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, task_refresh_interval_hint_ms, data_transfer_protocol, target_workers, cross_trainer_cache_options, name, ctx):
1298 if not isinstance(output_types, (list, tuple)):
1299 raise TypeError(
1300 "Expected list for 'output_types' argument to "
1301 "'data_service_dataset_v2' Op, not %r." % output_types)
1302 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1303 if not isinstance(output_shapes, (list, tuple)):
1304 raise TypeError(
1305 "Expected list for 'output_shapes' argument to "
1306 "'data_service_dataset_v2' Op, not %r." % output_shapes)
1307 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1308 if task_refresh_interval_hint_ms is None:
1309 task_refresh_interval_hint_ms = -1
1310 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1311 if data_transfer_protocol is None:
1312 data_transfer_protocol = ""
1313 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1314 if target_workers is None:
1315 target_workers = "AUTO"
1316 target_workers = _execute.make_str(target_workers, "target_workers")
1317 if cross_trainer_cache_options is None:
1318 cross_trainer_cache_options = ""
1319 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1320 dataset_id = _ops.convert_to_tensor(dataset_id, _dtypes.int64)
1321 processing_mode = _ops.convert_to_tensor(processing_mode, _dtypes.string)
1322 address = _ops.convert_to_tensor(address, _dtypes.string)
1323 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
1324 job_name = _ops.convert_to_tensor(job_name, _dtypes.string)
1325 consumer_index = _ops.convert_to_tensor(consumer_index, _dtypes.int64)
1326 num_consumers = _ops.convert_to_tensor(num_consumers, _dtypes.int64)
1327 max_outstanding_requests = _ops.convert_to_tensor(max_outstanding_requests, _dtypes.int64)
1328 iteration_counter = _ops.convert_to_tensor(iteration_counter, _dtypes.resource)
1329 _inputs_flat = [dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter]
1330 _attrs = ("task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1331 "output_types", output_types, "output_shapes", output_shapes,
1332 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1333 target_workers, "cross_trainer_cache_options", cross_trainer_cache_options)
1334 _result = _execute.execute(b"DataServiceDatasetV2", 1, inputs=_inputs_flat,
1335 attrs=_attrs, ctx=ctx, name=name)
1336 if _execute.must_record_gradient():
1337 _execute.record_gradient(
1338 "DataServiceDatasetV2", _inputs_flat, _attrs, _result)
1339 _result, = _result
1340 return _result
1343def data_service_dataset_v3(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, uncompress_fn, task_refresh_interval_hint_ms=-1, data_transfer_protocol="", target_workers="AUTO", uncompress=False, cross_trainer_cache_options="", name=None):
1344 r"""Creates a dataset that reads data from the tf.data service.
1346 Args:
1347 dataset_id: A `Tensor` of type `int64`.
1348 processing_mode: A `Tensor` of type `string`.
1349 address: A `Tensor` of type `string`.
1350 protocol: A `Tensor` of type `string`.
1351 job_name: A `Tensor` of type `string`.
1352 consumer_index: A `Tensor` of type `int64`.
1353 num_consumers: A `Tensor` of type `int64`.
1354 max_outstanding_requests: A `Tensor` of type `int64`.
1355 iteration_counter: A `Tensor` of type `resource`.
1356 output_types: A list of `tf.DTypes` that has length `>= 1`.
1357 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1358 uncompress_fn: A function decorated with @Defun.
1359 task_refresh_interval_hint_ms: An optional `int`. Defaults to `-1`.
1360 data_transfer_protocol: An optional `string`. Defaults to `""`.
1361 target_workers: An optional `string`. Defaults to `"AUTO"`.
1362 uncompress: An optional `bool`. Defaults to `False`.
1363 cross_trainer_cache_options: An optional `string`. Defaults to `""`.
1364 name: A name for the operation (optional).
1366 Returns:
1367 A `Tensor` of type `variant`.
1368 """
1369 _ctx = _context._context or _context.context()
1370 tld = _ctx._thread_local_data
1371 if tld.is_eager:
1372 try:
1373 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1374 _ctx, "DataServiceDatasetV3", name, dataset_id, processing_mode,
1375 address, protocol, job_name, consumer_index, num_consumers,
1376 max_outstanding_requests, iteration_counter,
1377 "task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1378 "output_types", output_types, "output_shapes", output_shapes,
1379 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1380 target_workers, "uncompress", uncompress, "uncompress_fn",
1381 uncompress_fn, "cross_trainer_cache_options",
1382 cross_trainer_cache_options)
1383 return _result
1384 except _core._NotOkStatusException as e:
1385 _ops.raise_from_not_ok_status(e, name)
1386 except _core._FallbackException:
1387 pass
1388 try:
1389 return data_service_dataset_v3_eager_fallback(
1390 dataset_id, processing_mode, address, protocol, job_name,
1391 consumer_index, num_consumers, max_outstanding_requests,
1392 iteration_counter,
1393 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1394 output_types=output_types, output_shapes=output_shapes,
1395 data_transfer_protocol=data_transfer_protocol,
1396 target_workers=target_workers, uncompress=uncompress,
1397 uncompress_fn=uncompress_fn,
1398 cross_trainer_cache_options=cross_trainer_cache_options, name=name,
1399 ctx=_ctx)
1400 except _core._SymbolicException:
1401 pass # Add nodes to the TensorFlow graph.
1402 # Add nodes to the TensorFlow graph.
1403 if not isinstance(output_types, (list, tuple)):
1404 raise TypeError(
1405 "Expected list for 'output_types' argument to "
1406 "'data_service_dataset_v3' Op, not %r." % output_types)
1407 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1408 if not isinstance(output_shapes, (list, tuple)):
1409 raise TypeError(
1410 "Expected list for 'output_shapes' argument to "
1411 "'data_service_dataset_v3' Op, not %r." % output_shapes)
1412 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1413 if task_refresh_interval_hint_ms is None:
1414 task_refresh_interval_hint_ms = -1
1415 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1416 if data_transfer_protocol is None:
1417 data_transfer_protocol = ""
1418 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1419 if target_workers is None:
1420 target_workers = "AUTO"
1421 target_workers = _execute.make_str(target_workers, "target_workers")
1422 if uncompress is None:
1423 uncompress = False
1424 uncompress = _execute.make_bool(uncompress, "uncompress")
1425 if cross_trainer_cache_options is None:
1426 cross_trainer_cache_options = ""
1427 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1428 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1429 "DataServiceDatasetV3", dataset_id=dataset_id,
1430 processing_mode=processing_mode,
1431 address=address, protocol=protocol,
1432 job_name=job_name,
1433 consumer_index=consumer_index,
1434 num_consumers=num_consumers,
1435 max_outstanding_requests=max_outstanding_requests,
1436 iteration_counter=iteration_counter,
1437 output_types=output_types,
1438 output_shapes=output_shapes,
1439 uncompress_fn=uncompress_fn,
1440 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1441 data_transfer_protocol=data_transfer_protocol,
1442 target_workers=target_workers,
1443 uncompress=uncompress,
1444 cross_trainer_cache_options=cross_trainer_cache_options,
1445 name=name)
1446 _result = _outputs[:]
1447 if _execute.must_record_gradient():
1448 _attrs = ("task_refresh_interval_hint_ms",
1449 _op._get_attr_int("task_refresh_interval_hint_ms"),
1450 "output_types", _op.get_attr("output_types"), "output_shapes",
1451 _op.get_attr("output_shapes"), "data_transfer_protocol",
1452 _op.get_attr("data_transfer_protocol"), "target_workers",
1453 _op.get_attr("target_workers"), "uncompress",
1454 _op._get_attr_bool("uncompress"), "uncompress_fn",
1455 _op.get_attr("uncompress_fn"), "cross_trainer_cache_options",
1456 _op.get_attr("cross_trainer_cache_options"))
1457 _inputs_flat = _op.inputs
1458 _execute.record_gradient(
1459 "DataServiceDatasetV3", _inputs_flat, _attrs, _result)
1460 _result, = _result
1461 return _result
1463DataServiceDatasetV3 = tf_export("raw_ops.DataServiceDatasetV3")(_ops.to_raw_op(data_service_dataset_v3))
1466def data_service_dataset_v3_eager_fallback(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, uncompress_fn, task_refresh_interval_hint_ms, data_transfer_protocol, target_workers, uncompress, cross_trainer_cache_options, name, ctx):
1467 if not isinstance(output_types, (list, tuple)):
1468 raise TypeError(
1469 "Expected list for 'output_types' argument to "
1470 "'data_service_dataset_v3' Op, not %r." % output_types)
1471 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1472 if not isinstance(output_shapes, (list, tuple)):
1473 raise TypeError(
1474 "Expected list for 'output_shapes' argument to "
1475 "'data_service_dataset_v3' Op, not %r." % output_shapes)
1476 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1477 if task_refresh_interval_hint_ms is None:
1478 task_refresh_interval_hint_ms = -1
1479 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1480 if data_transfer_protocol is None:
1481 data_transfer_protocol = ""
1482 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1483 if target_workers is None:
1484 target_workers = "AUTO"
1485 target_workers = _execute.make_str(target_workers, "target_workers")
1486 if uncompress is None:
1487 uncompress = False
1488 uncompress = _execute.make_bool(uncompress, "uncompress")
1489 if cross_trainer_cache_options is None:
1490 cross_trainer_cache_options = ""
1491 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1492 dataset_id = _ops.convert_to_tensor(dataset_id, _dtypes.int64)
1493 processing_mode = _ops.convert_to_tensor(processing_mode, _dtypes.string)
1494 address = _ops.convert_to_tensor(address, _dtypes.string)
1495 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
1496 job_name = _ops.convert_to_tensor(job_name, _dtypes.string)
1497 consumer_index = _ops.convert_to_tensor(consumer_index, _dtypes.int64)
1498 num_consumers = _ops.convert_to_tensor(num_consumers, _dtypes.int64)
1499 max_outstanding_requests = _ops.convert_to_tensor(max_outstanding_requests, _dtypes.int64)
1500 iteration_counter = _ops.convert_to_tensor(iteration_counter, _dtypes.resource)
1501 _inputs_flat = [dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter]
1502 _attrs = ("task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1503 "output_types", output_types, "output_shapes", output_shapes,
1504 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1505 target_workers, "uncompress", uncompress, "uncompress_fn", uncompress_fn,
1506 "cross_trainer_cache_options", cross_trainer_cache_options)
1507 _result = _execute.execute(b"DataServiceDatasetV3", 1, inputs=_inputs_flat,
1508 attrs=_attrs, ctx=ctx, name=name)
1509 if _execute.must_record_gradient():
1510 _execute.record_gradient(
1511 "DataServiceDatasetV3", _inputs_flat, _attrs, _result)
1512 _result, = _result
1513 return _result
1516def data_service_dataset_v4(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, uncompress_fn, task_refresh_interval_hint_ms=-1, data_transfer_protocol="", target_workers="AUTO", uncompress=False, cross_trainer_cache_options="", name=None):
1517 r"""Creates a dataset that reads data from the tf.data service.
1519 Args:
1520 dataset_id: A `Tensor` of type `string`.
1521 processing_mode: A `Tensor` of type `string`.
1522 address: A `Tensor` of type `string`.
1523 protocol: A `Tensor` of type `string`.
1524 job_name: A `Tensor` of type `string`.
1525 consumer_index: A `Tensor` of type `int64`.
1526 num_consumers: A `Tensor` of type `int64`.
1527 max_outstanding_requests: A `Tensor` of type `int64`.
1528 iteration_counter: A `Tensor` of type `resource`.
1529 output_types: A list of `tf.DTypes` that has length `>= 1`.
1530 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1531 uncompress_fn: A function decorated with @Defun.
1532 task_refresh_interval_hint_ms: An optional `int`. Defaults to `-1`.
1533 data_transfer_protocol: An optional `string`. Defaults to `""`.
1534 target_workers: An optional `string`. Defaults to `"AUTO"`.
1535 uncompress: An optional `bool`. Defaults to `False`.
1536 cross_trainer_cache_options: An optional `string`. Defaults to `""`.
1537 name: A name for the operation (optional).
1539 Returns:
1540 A `Tensor` of type `variant`.
1541 """
1542 _ctx = _context._context or _context.context()
1543 tld = _ctx._thread_local_data
1544 if tld.is_eager:
1545 try:
1546 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1547 _ctx, "DataServiceDatasetV4", name, dataset_id, processing_mode,
1548 address, protocol, job_name, consumer_index, num_consumers,
1549 max_outstanding_requests, iteration_counter,
1550 "task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1551 "output_types", output_types, "output_shapes", output_shapes,
1552 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1553 target_workers, "uncompress", uncompress, "uncompress_fn",
1554 uncompress_fn, "cross_trainer_cache_options",
1555 cross_trainer_cache_options)
1556 return _result
1557 except _core._NotOkStatusException as e:
1558 _ops.raise_from_not_ok_status(e, name)
1559 except _core._FallbackException:
1560 pass
1561 try:
1562 return data_service_dataset_v4_eager_fallback(
1563 dataset_id, processing_mode, address, protocol, job_name,
1564 consumer_index, num_consumers, max_outstanding_requests,
1565 iteration_counter,
1566 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1567 output_types=output_types, output_shapes=output_shapes,
1568 data_transfer_protocol=data_transfer_protocol,
1569 target_workers=target_workers, uncompress=uncompress,
1570 uncompress_fn=uncompress_fn,
1571 cross_trainer_cache_options=cross_trainer_cache_options, name=name,
1572 ctx=_ctx)
1573 except _core._SymbolicException:
1574 pass # Add nodes to the TensorFlow graph.
1575 # Add nodes to the TensorFlow graph.
1576 if not isinstance(output_types, (list, tuple)):
1577 raise TypeError(
1578 "Expected list for 'output_types' argument to "
1579 "'data_service_dataset_v4' Op, not %r." % output_types)
1580 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1581 if not isinstance(output_shapes, (list, tuple)):
1582 raise TypeError(
1583 "Expected list for 'output_shapes' argument to "
1584 "'data_service_dataset_v4' Op, not %r." % output_shapes)
1585 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1586 if task_refresh_interval_hint_ms is None:
1587 task_refresh_interval_hint_ms = -1
1588 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1589 if data_transfer_protocol is None:
1590 data_transfer_protocol = ""
1591 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1592 if target_workers is None:
1593 target_workers = "AUTO"
1594 target_workers = _execute.make_str(target_workers, "target_workers")
1595 if uncompress is None:
1596 uncompress = False
1597 uncompress = _execute.make_bool(uncompress, "uncompress")
1598 if cross_trainer_cache_options is None:
1599 cross_trainer_cache_options = ""
1600 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1601 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1602 "DataServiceDatasetV4", dataset_id=dataset_id,
1603 processing_mode=processing_mode,
1604 address=address, protocol=protocol,
1605 job_name=job_name,
1606 consumer_index=consumer_index,
1607 num_consumers=num_consumers,
1608 max_outstanding_requests=max_outstanding_requests,
1609 iteration_counter=iteration_counter,
1610 output_types=output_types,
1611 output_shapes=output_shapes,
1612 uncompress_fn=uncompress_fn,
1613 task_refresh_interval_hint_ms=task_refresh_interval_hint_ms,
1614 data_transfer_protocol=data_transfer_protocol,
1615 target_workers=target_workers,
1616 uncompress=uncompress,
1617 cross_trainer_cache_options=cross_trainer_cache_options,
1618 name=name)
1619 _result = _outputs[:]
1620 if _execute.must_record_gradient():
1621 _attrs = ("task_refresh_interval_hint_ms",
1622 _op._get_attr_int("task_refresh_interval_hint_ms"),
1623 "output_types", _op.get_attr("output_types"), "output_shapes",
1624 _op.get_attr("output_shapes"), "data_transfer_protocol",
1625 _op.get_attr("data_transfer_protocol"), "target_workers",
1626 _op.get_attr("target_workers"), "uncompress",
1627 _op._get_attr_bool("uncompress"), "uncompress_fn",
1628 _op.get_attr("uncompress_fn"), "cross_trainer_cache_options",
1629 _op.get_attr("cross_trainer_cache_options"))
1630 _inputs_flat = _op.inputs
1631 _execute.record_gradient(
1632 "DataServiceDatasetV4", _inputs_flat, _attrs, _result)
1633 _result, = _result
1634 return _result
1636DataServiceDatasetV4 = tf_export("raw_ops.DataServiceDatasetV4")(_ops.to_raw_op(data_service_dataset_v4))
1639def data_service_dataset_v4_eager_fallback(dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter, output_types, output_shapes, uncompress_fn, task_refresh_interval_hint_ms, data_transfer_protocol, target_workers, uncompress, cross_trainer_cache_options, name, ctx):
1640 if not isinstance(output_types, (list, tuple)):
1641 raise TypeError(
1642 "Expected list for 'output_types' argument to "
1643 "'data_service_dataset_v4' Op, not %r." % output_types)
1644 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1645 if not isinstance(output_shapes, (list, tuple)):
1646 raise TypeError(
1647 "Expected list for 'output_shapes' argument to "
1648 "'data_service_dataset_v4' Op, not %r." % output_shapes)
1649 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1650 if task_refresh_interval_hint_ms is None:
1651 task_refresh_interval_hint_ms = -1
1652 task_refresh_interval_hint_ms = _execute.make_int(task_refresh_interval_hint_ms, "task_refresh_interval_hint_ms")
1653 if data_transfer_protocol is None:
1654 data_transfer_protocol = ""
1655 data_transfer_protocol = _execute.make_str(data_transfer_protocol, "data_transfer_protocol")
1656 if target_workers is None:
1657 target_workers = "AUTO"
1658 target_workers = _execute.make_str(target_workers, "target_workers")
1659 if uncompress is None:
1660 uncompress = False
1661 uncompress = _execute.make_bool(uncompress, "uncompress")
1662 if cross_trainer_cache_options is None:
1663 cross_trainer_cache_options = ""
1664 cross_trainer_cache_options = _execute.make_str(cross_trainer_cache_options, "cross_trainer_cache_options")
1665 dataset_id = _ops.convert_to_tensor(dataset_id, _dtypes.string)
1666 processing_mode = _ops.convert_to_tensor(processing_mode, _dtypes.string)
1667 address = _ops.convert_to_tensor(address, _dtypes.string)
1668 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
1669 job_name = _ops.convert_to_tensor(job_name, _dtypes.string)
1670 consumer_index = _ops.convert_to_tensor(consumer_index, _dtypes.int64)
1671 num_consumers = _ops.convert_to_tensor(num_consumers, _dtypes.int64)
1672 max_outstanding_requests = _ops.convert_to_tensor(max_outstanding_requests, _dtypes.int64)
1673 iteration_counter = _ops.convert_to_tensor(iteration_counter, _dtypes.resource)
1674 _inputs_flat = [dataset_id, processing_mode, address, protocol, job_name, consumer_index, num_consumers, max_outstanding_requests, iteration_counter]
1675 _attrs = ("task_refresh_interval_hint_ms", task_refresh_interval_hint_ms,
1676 "output_types", output_types, "output_shapes", output_shapes,
1677 "data_transfer_protocol", data_transfer_protocol, "target_workers",
1678 target_workers, "uncompress", uncompress, "uncompress_fn", uncompress_fn,
1679 "cross_trainer_cache_options", cross_trainer_cache_options)
1680 _result = _execute.execute(b"DataServiceDatasetV4", 1, inputs=_inputs_flat,
1681 attrs=_attrs, ctx=ctx, name=name)
1682 if _execute.must_record_gradient():
1683 _execute.record_gradient(
1684 "DataServiceDatasetV4", _inputs_flat, _attrs, _result)
1685 _result, = _result
1686 return _result
1689def dataset_from_graph(graph_def, name=None):
1690 r"""Creates a dataset from the given `graph_def`.
1692 Creates a dataset from the provided `graph_def`.
1694 Args:
1695 graph_def: A `Tensor` of type `string`.
1696 The graph representation of the dataset (as serialized GraphDef).
1697 name: A name for the operation (optional).
1699 Returns:
1700 A `Tensor` of type `variant`.
1701 """
1702 _ctx = _context._context or _context.context()
1703 tld = _ctx._thread_local_data
1704 if tld.is_eager:
1705 try:
1706 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1707 _ctx, "DatasetFromGraph", name, graph_def)
1708 return _result
1709 except _core._NotOkStatusException as e:
1710 _ops.raise_from_not_ok_status(e, name)
1711 except _core._FallbackException:
1712 pass
1713 try:
1714 return dataset_from_graph_eager_fallback(
1715 graph_def, name=name, ctx=_ctx)
1716 except _core._SymbolicException:
1717 pass # Add nodes to the TensorFlow graph.
1718 # Add nodes to the TensorFlow graph.
1719 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1720 "DatasetFromGraph", graph_def=graph_def, name=name)
1721 _result = _outputs[:]
1722 if _execute.must_record_gradient():
1723 _attrs = ()
1724 _inputs_flat = _op.inputs
1725 _execute.record_gradient(
1726 "DatasetFromGraph", _inputs_flat, _attrs, _result)
1727 _result, = _result
1728 return _result
1730DatasetFromGraph = tf_export("raw_ops.DatasetFromGraph")(_ops.to_raw_op(dataset_from_graph))
1733def dataset_from_graph_eager_fallback(graph_def, name, ctx):
1734 graph_def = _ops.convert_to_tensor(graph_def, _dtypes.string)
1735 _inputs_flat = [graph_def]
1736 _attrs = None
1737 _result = _execute.execute(b"DatasetFromGraph", 1, inputs=_inputs_flat,
1738 attrs=_attrs, ctx=ctx, name=name)
1739 if _execute.must_record_gradient():
1740 _execute.record_gradient(
1741 "DatasetFromGraph", _inputs_flat, _attrs, _result)
1742 _result, = _result
1743 return _result
1746def dataset_to_tf_record(input_dataset, filename, compression_type, name=None):
1747 r"""Writes the given dataset to the given file using the TFRecord format.
1749 Args:
1750 input_dataset: A `Tensor` of type `variant`.
1751 A variant tensor representing the dataset to write.
1752 filename: A `Tensor` of type `string`.
1753 A scalar string tensor representing the filename to use.
1754 compression_type: A `Tensor` of type `string`.
1755 A scalar string tensor containing either (i) the empty string (no
1756 compression), (ii) "ZLIB", or (iii) "GZIP".
1757 name: A name for the operation (optional).
1759 Returns:
1760 The created Operation.
1761 """
1762 _ctx = _context._context or _context.context()
1763 tld = _ctx._thread_local_data
1764 if tld.is_eager:
1765 try:
1766 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1767 _ctx, "DatasetToTFRecord", name, input_dataset, filename,
1768 compression_type)
1769 return _result
1770 except _core._NotOkStatusException as e:
1771 _ops.raise_from_not_ok_status(e, name)
1772 except _core._FallbackException:
1773 pass
1774 try:
1775 return dataset_to_tf_record_eager_fallback(
1776 input_dataset, filename, compression_type, name=name, ctx=_ctx)
1777 except _core._SymbolicException:
1778 pass # Add nodes to the TensorFlow graph.
1779 # Add nodes to the TensorFlow graph.
1780 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1781 "DatasetToTFRecord", input_dataset=input_dataset, filename=filename,
1782 compression_type=compression_type, name=name)
1783 return _op
1784DatasetToTFRecord = tf_export("raw_ops.DatasetToTFRecord")(_ops.to_raw_op(dataset_to_tf_record))
1787def dataset_to_tf_record_eager_fallback(input_dataset, filename, compression_type, name, ctx):
1788 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1789 filename = _ops.convert_to_tensor(filename, _dtypes.string)
1790 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
1791 _inputs_flat = [input_dataset, filename, compression_type]
1792 _attrs = None
1793 _result = _execute.execute(b"DatasetToTFRecord", 0, inputs=_inputs_flat,
1794 attrs=_attrs, ctx=ctx, name=name)
1795 _result = None
1796 return _result
1799def dense_to_sparse_batch_dataset(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None):
1800 r"""Creates a dataset that batches input elements into a SparseTensor.
1802 Args:
1803 input_dataset: A `Tensor` of type `variant`.
1804 A handle to an input dataset. Must have a single component.
1805 batch_size: A `Tensor` of type `int64`.
1806 A scalar representing the number of elements to accumulate in a
1807 batch.
1808 row_shape: A `Tensor` of type `int64`.
1809 A vector representing the dense shape of each row in the produced
1810 SparseTensor. The shape may be partially specified, using `-1` to indicate
1811 that a particular dimension should use the maximum size of all batch elements.
1812 output_types: A list of `tf.DTypes` that has length `>= 1`.
1813 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1814 name: A name for the operation (optional).
1816 Returns:
1817 A `Tensor` of type `variant`.
1818 """
1819 _ctx = _context._context or _context.context()
1820 tld = _ctx._thread_local_data
1821 if tld.is_eager:
1822 try:
1823 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1824 _ctx, "DenseToSparseBatchDataset", name, input_dataset, batch_size,
1825 row_shape, "output_types", output_types, "output_shapes",
1826 output_shapes)
1827 return _result
1828 except _core._NotOkStatusException as e:
1829 _ops.raise_from_not_ok_status(e, name)
1830 except _core._FallbackException:
1831 pass
1832 try:
1833 return dense_to_sparse_batch_dataset_eager_fallback(
1834 input_dataset, batch_size, row_shape, output_types=output_types,
1835 output_shapes=output_shapes, name=name, ctx=_ctx)
1836 except _core._SymbolicException:
1837 pass # Add nodes to the TensorFlow graph.
1838 # Add nodes to the TensorFlow graph.
1839 if not isinstance(output_types, (list, tuple)):
1840 raise TypeError(
1841 "Expected list for 'output_types' argument to "
1842 "'dense_to_sparse_batch_dataset' Op, not %r." % output_types)
1843 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1844 if not isinstance(output_shapes, (list, tuple)):
1845 raise TypeError(
1846 "Expected list for 'output_shapes' argument to "
1847 "'dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
1848 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1849 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1850 "DenseToSparseBatchDataset", input_dataset=input_dataset,
1851 batch_size=batch_size,
1852 row_shape=row_shape,
1853 output_types=output_types,
1854 output_shapes=output_shapes, name=name)
1855 _result = _outputs[:]
1856 if _execute.must_record_gradient():
1857 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
1858 _op.get_attr("output_shapes"))
1859 _inputs_flat = _op.inputs
1860 _execute.record_gradient(
1861 "DenseToSparseBatchDataset", _inputs_flat, _attrs, _result)
1862 _result, = _result
1863 return _result
1865DenseToSparseBatchDataset = tf_export("raw_ops.DenseToSparseBatchDataset")(_ops.to_raw_op(dense_to_sparse_batch_dataset))
1868def dense_to_sparse_batch_dataset_eager_fallback(input_dataset, batch_size, row_shape, output_types, output_shapes, name, ctx):
1869 if not isinstance(output_types, (list, tuple)):
1870 raise TypeError(
1871 "Expected list for 'output_types' argument to "
1872 "'dense_to_sparse_batch_dataset' Op, not %r." % output_types)
1873 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1874 if not isinstance(output_shapes, (list, tuple)):
1875 raise TypeError(
1876 "Expected list for 'output_shapes' argument to "
1877 "'dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
1878 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1879 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1880 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
1881 row_shape = _ops.convert_to_tensor(row_shape, _dtypes.int64)
1882 _inputs_flat = [input_dataset, batch_size, row_shape]
1883 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
1884 _result = _execute.execute(b"DenseToSparseBatchDataset", 1,
1885 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
1886 name=name)
1887 if _execute.must_record_gradient():
1888 _execute.record_gradient(
1889 "DenseToSparseBatchDataset", _inputs_flat, _attrs, _result)
1890 _result, = _result
1891 return _result
1894def directed_interleave_dataset(selector_input_dataset, data_input_datasets, output_types, output_shapes, stop_on_empty_dataset=False, name=None):
1895 r"""A substitute for `InterleaveDataset` on a fixed list of `N` datasets.
1897 Args:
1898 selector_input_dataset: A `Tensor` of type `variant`.
1899 A dataset of scalar `DT_INT64` elements that determines which of the
1900 `N` data inputs should produce the next output element.
1901 data_input_datasets: A list of at least 1 `Tensor` objects with type `variant`.
1902 `N` datasets with the same type that will be interleaved according to
1903 the values of `selector_input_dataset`.
1904 output_types: A list of `tf.DTypes` that has length `>= 1`.
1905 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1906 stop_on_empty_dataset: An optional `bool`. Defaults to `False`.
1907 name: A name for the operation (optional).
1909 Returns:
1910 A `Tensor` of type `variant`.
1911 """
1912 _ctx = _context._context or _context.context()
1913 tld = _ctx._thread_local_data
1914 if tld.is_eager:
1915 try:
1916 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1917 _ctx, "DirectedInterleaveDataset", name, selector_input_dataset,
1918 data_input_datasets, "output_types", output_types, "output_shapes",
1919 output_shapes, "stop_on_empty_dataset", stop_on_empty_dataset)
1920 return _result
1921 except _core._NotOkStatusException as e:
1922 _ops.raise_from_not_ok_status(e, name)
1923 except _core._FallbackException:
1924 pass
1925 try:
1926 return directed_interleave_dataset_eager_fallback(
1927 selector_input_dataset, data_input_datasets,
1928 output_types=output_types, output_shapes=output_shapes,
1929 stop_on_empty_dataset=stop_on_empty_dataset, name=name, ctx=_ctx)
1930 except _core._SymbolicException:
1931 pass # Add nodes to the TensorFlow graph.
1932 # Add nodes to the TensorFlow graph.
1933 if not isinstance(data_input_datasets, (list, tuple)):
1934 raise TypeError(
1935 "Expected list for 'data_input_datasets' argument to "
1936 "'directed_interleave_dataset' Op, not %r." % data_input_datasets)
1937 _attr_N = len(data_input_datasets)
1938 if not isinstance(output_types, (list, tuple)):
1939 raise TypeError(
1940 "Expected list for 'output_types' argument to "
1941 "'directed_interleave_dataset' Op, not %r." % output_types)
1942 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1943 if not isinstance(output_shapes, (list, tuple)):
1944 raise TypeError(
1945 "Expected list for 'output_shapes' argument to "
1946 "'directed_interleave_dataset' Op, not %r." % output_shapes)
1947 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1948 if stop_on_empty_dataset is None:
1949 stop_on_empty_dataset = False
1950 stop_on_empty_dataset = _execute.make_bool(stop_on_empty_dataset, "stop_on_empty_dataset")
1951 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1952 "DirectedInterleaveDataset", selector_input_dataset=selector_input_dataset,
1953 data_input_datasets=data_input_datasets,
1954 output_types=output_types,
1955 output_shapes=output_shapes,
1956 stop_on_empty_dataset=stop_on_empty_dataset,
1957 name=name)
1958 _result = _outputs[:]
1959 if _execute.must_record_gradient():
1960 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
1961 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"),
1962 "stop_on_empty_dataset",
1963 _op._get_attr_bool("stop_on_empty_dataset"))
1964 _inputs_flat = _op.inputs
1965 _execute.record_gradient(
1966 "DirectedInterleaveDataset", _inputs_flat, _attrs, _result)
1967 _result, = _result
1968 return _result
1970DirectedInterleaveDataset = tf_export("raw_ops.DirectedInterleaveDataset")(_ops.to_raw_op(directed_interleave_dataset))
1973def directed_interleave_dataset_eager_fallback(selector_input_dataset, data_input_datasets, output_types, output_shapes, stop_on_empty_dataset, name, ctx):
1974 if not isinstance(data_input_datasets, (list, tuple)):
1975 raise TypeError(
1976 "Expected list for 'data_input_datasets' argument to "
1977 "'directed_interleave_dataset' Op, not %r." % data_input_datasets)
1978 _attr_N = len(data_input_datasets)
1979 if not isinstance(output_types, (list, tuple)):
1980 raise TypeError(
1981 "Expected list for 'output_types' argument to "
1982 "'directed_interleave_dataset' Op, not %r." % output_types)
1983 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1984 if not isinstance(output_shapes, (list, tuple)):
1985 raise TypeError(
1986 "Expected list for 'output_shapes' argument to "
1987 "'directed_interleave_dataset' Op, not %r." % output_shapes)
1988 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1989 if stop_on_empty_dataset is None:
1990 stop_on_empty_dataset = False
1991 stop_on_empty_dataset = _execute.make_bool(stop_on_empty_dataset, "stop_on_empty_dataset")
1992 selector_input_dataset = _ops.convert_to_tensor(selector_input_dataset, _dtypes.variant)
1993 data_input_datasets = _ops.convert_n_to_tensor(data_input_datasets, _dtypes.variant)
1994 _inputs_flat = [selector_input_dataset] + list(data_input_datasets)
1995 _attrs = ("output_types", output_types, "output_shapes", output_shapes, "N",
1996 _attr_N, "stop_on_empty_dataset", stop_on_empty_dataset)
1997 _result = _execute.execute(b"DirectedInterleaveDataset", 1,
1998 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
1999 name=name)
2000 if _execute.must_record_gradient():
2001 _execute.record_gradient(
2002 "DirectedInterleaveDataset", _inputs_flat, _attrs, _result)
2003 _result, = _result
2004 return _result
2007def distributed_save(dataset, directory, address, metadata="", name=None):
2008 r"""TODO: add doc.
2010 Args:
2011 dataset: A `Tensor` of type `variant`.
2012 directory: A `Tensor` of type `string`.
2013 address: A `Tensor` of type `string`.
2014 metadata: An optional `string`. Defaults to `""`.
2015 name: A name for the operation (optional).
2017 Returns:
2018 The created Operation.
2019 """
2020 _ctx = _context._context or _context.context()
2021 tld = _ctx._thread_local_data
2022 if tld.is_eager:
2023 try:
2024 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2025 _ctx, "DistributedSave", name, dataset, directory, address,
2026 "metadata", metadata)
2027 return _result
2028 except _core._NotOkStatusException as e:
2029 _ops.raise_from_not_ok_status(e, name)
2030 except _core._FallbackException:
2031 pass
2032 try:
2033 return distributed_save_eager_fallback(
2034 dataset, directory, address, metadata=metadata, name=name, ctx=_ctx)
2035 except _core._SymbolicException:
2036 pass # Add nodes to the TensorFlow graph.
2037 # Add nodes to the TensorFlow graph.
2038 if metadata is None:
2039 metadata = ""
2040 metadata = _execute.make_str(metadata, "metadata")
2041 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2042 "DistributedSave", dataset=dataset, directory=directory,
2043 address=address, metadata=metadata, name=name)
2044 return _op
2045DistributedSave = tf_export("raw_ops.DistributedSave")(_ops.to_raw_op(distributed_save))
2048def distributed_save_eager_fallback(dataset, directory, address, metadata, name, ctx):
2049 if metadata is None:
2050 metadata = ""
2051 metadata = _execute.make_str(metadata, "metadata")
2052 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
2053 directory = _ops.convert_to_tensor(directory, _dtypes.string)
2054 address = _ops.convert_to_tensor(address, _dtypes.string)
2055 _inputs_flat = [dataset, directory, address]
2056 _attrs = ("metadata", metadata)
2057 _result = _execute.execute(b"DistributedSave", 0, inputs=_inputs_flat,
2058 attrs=_attrs, ctx=ctx, name=name)
2059 _result = None
2060 return _result
2063def dummy_iteration_counter(name=None):
2064 r"""TODO: add doc.
2066 Args:
2067 name: A name for the operation (optional).
2069 Returns:
2070 A `Tensor` of type `resource`.
2071 """
2072 _ctx = _context._context or _context.context()
2073 tld = _ctx._thread_local_data
2074 if tld.is_eager:
2075 try:
2076 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2077 _ctx, "DummyIterationCounter", name)
2078 return _result
2079 except _core._NotOkStatusException as e:
2080 _ops.raise_from_not_ok_status(e, name)
2081 except _core._FallbackException:
2082 pass
2083 try:
2084 return dummy_iteration_counter_eager_fallback(
2085 name=name, ctx=_ctx)
2086 except _core._SymbolicException:
2087 pass # Add nodes to the TensorFlow graph.
2088 # Add nodes to the TensorFlow graph.
2089 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2090 "DummyIterationCounter", name=name)
2091 _result = _outputs[:]
2092 if _execute.must_record_gradient():
2093 _attrs = ()
2094 _inputs_flat = _op.inputs
2095 _execute.record_gradient(
2096 "DummyIterationCounter", _inputs_flat, _attrs, _result)
2097 _result, = _result
2098 return _result
2100DummyIterationCounter = tf_export("raw_ops.DummyIterationCounter")(_ops.to_raw_op(dummy_iteration_counter))
2103def dummy_iteration_counter_eager_fallback(name, ctx):
2104 _inputs_flat = []
2105 _attrs = None
2106 _result = _execute.execute(b"DummyIterationCounter", 1, inputs=_inputs_flat,
2107 attrs=_attrs, ctx=ctx, name=name)
2108 if _execute.must_record_gradient():
2109 _execute.record_gradient(
2110 "DummyIterationCounter", _inputs_flat, _attrs, _result)
2111 _result, = _result
2112 return _result
2115def experimental_assert_next_dataset(input_dataset, transformations, output_types, output_shapes, name=None):
2116 r"""TODO: add doc.
2118 Args:
2119 input_dataset: A `Tensor` of type `variant`.
2120 transformations: A `Tensor` of type `string`.
2121 output_types: A list of `tf.DTypes` that has length `>= 1`.
2122 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2123 name: A name for the operation (optional).
2125 Returns:
2126 A `Tensor` of type `variant`.
2127 """
2128 _ctx = _context._context or _context.context()
2129 tld = _ctx._thread_local_data
2130 if tld.is_eager:
2131 try:
2132 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2133 _ctx, "ExperimentalAssertNextDataset", name, input_dataset,
2134 transformations, "output_types", output_types, "output_shapes",
2135 output_shapes)
2136 return _result
2137 except _core._NotOkStatusException as e:
2138 _ops.raise_from_not_ok_status(e, name)
2139 except _core._FallbackException:
2140 pass
2141 try:
2142 return experimental_assert_next_dataset_eager_fallback(
2143 input_dataset, transformations, output_types=output_types,
2144 output_shapes=output_shapes, name=name, ctx=_ctx)
2145 except _core._SymbolicException:
2146 pass # Add nodes to the TensorFlow graph.
2147 # Add nodes to the TensorFlow graph.
2148 if not isinstance(output_types, (list, tuple)):
2149 raise TypeError(
2150 "Expected list for 'output_types' argument to "
2151 "'experimental_assert_next_dataset' Op, not %r." % output_types)
2152 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2153 if not isinstance(output_shapes, (list, tuple)):
2154 raise TypeError(
2155 "Expected list for 'output_shapes' argument to "
2156 "'experimental_assert_next_dataset' Op, not %r." % output_shapes)
2157 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2158 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2159 "ExperimentalAssertNextDataset", input_dataset=input_dataset,
2160 transformations=transformations,
2161 output_types=output_types,
2162 output_shapes=output_shapes,
2163 name=name)
2164 _result = _outputs[:]
2165 if _execute.must_record_gradient():
2166 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2167 _op.get_attr("output_shapes"))
2168 _inputs_flat = _op.inputs
2169 _execute.record_gradient(
2170 "ExperimentalAssertNextDataset", _inputs_flat, _attrs, _result)
2171 _result, = _result
2172 return _result
2174ExperimentalAssertNextDataset = tf_export("raw_ops.ExperimentalAssertNextDataset")(_ops.to_raw_op(experimental_assert_next_dataset))
2177def experimental_assert_next_dataset_eager_fallback(input_dataset, transformations, output_types, output_shapes, name, ctx):
2178 if not isinstance(output_types, (list, tuple)):
2179 raise TypeError(
2180 "Expected list for 'output_types' argument to "
2181 "'experimental_assert_next_dataset' Op, not %r." % output_types)
2182 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2183 if not isinstance(output_shapes, (list, tuple)):
2184 raise TypeError(
2185 "Expected list for 'output_shapes' argument to "
2186 "'experimental_assert_next_dataset' Op, not %r." % output_shapes)
2187 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2188 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2189 transformations = _ops.convert_to_tensor(transformations, _dtypes.string)
2190 _inputs_flat = [input_dataset, transformations]
2191 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2192 _result = _execute.execute(b"ExperimentalAssertNextDataset", 1,
2193 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2194 name=name)
2195 if _execute.must_record_gradient():
2196 _execute.record_gradient(
2197 "ExperimentalAssertNextDataset", _inputs_flat, _attrs, _result)
2198 _result, = _result
2199 return _result
2202def experimental_auto_shard_dataset(input_dataset, num_workers, index, output_types, output_shapes, auto_shard_policy=0, name=None):
2203 r"""Creates a dataset that shards the input dataset.
2205 Creates a dataset that shards the input dataset by num_workers, returning a
2206 sharded dataset for the index-th worker. This attempts to automatically shard
2207 a dataset by examining the Dataset graph and inserting a shard op before the
2208 inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
2210 This dataset will throw a NotFound error if we cannot shard the dataset
2211 automatically.
2213 Args:
2214 input_dataset: A `Tensor` of type `variant`.
2215 A variant tensor representing the input dataset.
2216 num_workers: A `Tensor` of type `int64`.
2217 A scalar representing the number of workers to distribute this dataset across.
2218 index: A `Tensor` of type `int64`.
2219 A scalar representing the index of the current worker out of num_workers.
2220 output_types: A list of `tf.DTypes` that has length `>= 1`.
2221 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2222 auto_shard_policy: An optional `int`. Defaults to `0`.
2223 name: A name for the operation (optional).
2225 Returns:
2226 A `Tensor` of type `variant`.
2227 """
2228 _ctx = _context._context or _context.context()
2229 tld = _ctx._thread_local_data
2230 if tld.is_eager:
2231 try:
2232 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2233 _ctx, "ExperimentalAutoShardDataset", name, input_dataset,
2234 num_workers, index, "auto_shard_policy", auto_shard_policy,
2235 "output_types", output_types, "output_shapes", output_shapes)
2236 return _result
2237 except _core._NotOkStatusException as e:
2238 _ops.raise_from_not_ok_status(e, name)
2239 except _core._FallbackException:
2240 pass
2241 try:
2242 return experimental_auto_shard_dataset_eager_fallback(
2243 input_dataset, num_workers, index,
2244 auto_shard_policy=auto_shard_policy, output_types=output_types,
2245 output_shapes=output_shapes, name=name, ctx=_ctx)
2246 except _core._SymbolicException:
2247 pass # Add nodes to the TensorFlow graph.
2248 # Add nodes to the TensorFlow graph.
2249 if not isinstance(output_types, (list, tuple)):
2250 raise TypeError(
2251 "Expected list for 'output_types' argument to "
2252 "'experimental_auto_shard_dataset' Op, not %r." % output_types)
2253 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2254 if not isinstance(output_shapes, (list, tuple)):
2255 raise TypeError(
2256 "Expected list for 'output_shapes' argument to "
2257 "'experimental_auto_shard_dataset' Op, not %r." % output_shapes)
2258 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2259 if auto_shard_policy is None:
2260 auto_shard_policy = 0
2261 auto_shard_policy = _execute.make_int(auto_shard_policy, "auto_shard_policy")
2262 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2263 "ExperimentalAutoShardDataset", input_dataset=input_dataset,
2264 num_workers=num_workers, index=index,
2265 output_types=output_types,
2266 output_shapes=output_shapes,
2267 auto_shard_policy=auto_shard_policy,
2268 name=name)
2269 _result = _outputs[:]
2270 if _execute.must_record_gradient():
2271 _attrs = ("auto_shard_policy", _op._get_attr_int("auto_shard_policy"),
2272 "output_types", _op.get_attr("output_types"), "output_shapes",
2273 _op.get_attr("output_shapes"))
2274 _inputs_flat = _op.inputs
2275 _execute.record_gradient(
2276 "ExperimentalAutoShardDataset", _inputs_flat, _attrs, _result)
2277 _result, = _result
2278 return _result
2280ExperimentalAutoShardDataset = tf_export("raw_ops.ExperimentalAutoShardDataset")(_ops.to_raw_op(experimental_auto_shard_dataset))
2283def experimental_auto_shard_dataset_eager_fallback(input_dataset, num_workers, index, output_types, output_shapes, auto_shard_policy, name, ctx):
2284 if not isinstance(output_types, (list, tuple)):
2285 raise TypeError(
2286 "Expected list for 'output_types' argument to "
2287 "'experimental_auto_shard_dataset' Op, not %r." % output_types)
2288 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2289 if not isinstance(output_shapes, (list, tuple)):
2290 raise TypeError(
2291 "Expected list for 'output_shapes' argument to "
2292 "'experimental_auto_shard_dataset' Op, not %r." % output_shapes)
2293 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2294 if auto_shard_policy is None:
2295 auto_shard_policy = 0
2296 auto_shard_policy = _execute.make_int(auto_shard_policy, "auto_shard_policy")
2297 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2298 num_workers = _ops.convert_to_tensor(num_workers, _dtypes.int64)
2299 index = _ops.convert_to_tensor(index, _dtypes.int64)
2300 _inputs_flat = [input_dataset, num_workers, index]
2301 _attrs = ("auto_shard_policy", auto_shard_policy, "output_types",
2302 output_types, "output_shapes", output_shapes)
2303 _result = _execute.execute(b"ExperimentalAutoShardDataset", 1,
2304 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2305 name=name)
2306 if _execute.must_record_gradient():
2307 _execute.record_gradient(
2308 "ExperimentalAutoShardDataset", _inputs_flat, _attrs, _result)
2309 _result, = _result
2310 return _result
2313def experimental_bytes_produced_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
2314 r"""Records the bytes size of each element of `input_dataset` in a StatsAggregator.
2316 Args:
2317 input_dataset: A `Tensor` of type `variant`.
2318 tag: A `Tensor` of type `string`.
2319 output_types: A list of `tf.DTypes` that has length `>= 1`.
2320 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2321 name: A name for the operation (optional).
2323 Returns:
2324 A `Tensor` of type `variant`.
2325 """
2326 _ctx = _context._context or _context.context()
2327 tld = _ctx._thread_local_data
2328 if tld.is_eager:
2329 try:
2330 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2331 _ctx, "ExperimentalBytesProducedStatsDataset", name, input_dataset,
2332 tag, "output_types", output_types, "output_shapes", output_shapes)
2333 return _result
2334 except _core._NotOkStatusException as e:
2335 _ops.raise_from_not_ok_status(e, name)
2336 except _core._FallbackException:
2337 pass
2338 try:
2339 return experimental_bytes_produced_stats_dataset_eager_fallback(
2340 input_dataset, tag, output_types=output_types,
2341 output_shapes=output_shapes, name=name, ctx=_ctx)
2342 except _core._SymbolicException:
2343 pass # Add nodes to the TensorFlow graph.
2344 # Add nodes to the TensorFlow graph.
2345 if not isinstance(output_types, (list, tuple)):
2346 raise TypeError(
2347 "Expected list for 'output_types' argument to "
2348 "'experimental_bytes_produced_stats_dataset' Op, not %r." % output_types)
2349 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2350 if not isinstance(output_shapes, (list, tuple)):
2351 raise TypeError(
2352 "Expected list for 'output_shapes' argument to "
2353 "'experimental_bytes_produced_stats_dataset' Op, not %r." % output_shapes)
2354 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2355 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2356 "ExperimentalBytesProducedStatsDataset", input_dataset=input_dataset,
2357 tag=tag,
2358 output_types=output_types,
2359 output_shapes=output_shapes,
2360 name=name)
2361 _result = _outputs[:]
2362 if _execute.must_record_gradient():
2363 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2364 _op.get_attr("output_shapes"))
2365 _inputs_flat = _op.inputs
2366 _execute.record_gradient(
2367 "ExperimentalBytesProducedStatsDataset", _inputs_flat, _attrs, _result)
2368 _result, = _result
2369 return _result
2371ExperimentalBytesProducedStatsDataset = tf_export("raw_ops.ExperimentalBytesProducedStatsDataset")(_ops.to_raw_op(experimental_bytes_produced_stats_dataset))
2374def experimental_bytes_produced_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name, ctx):
2375 if not isinstance(output_types, (list, tuple)):
2376 raise TypeError(
2377 "Expected list for 'output_types' argument to "
2378 "'experimental_bytes_produced_stats_dataset' Op, not %r." % output_types)
2379 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2380 if not isinstance(output_shapes, (list, tuple)):
2381 raise TypeError(
2382 "Expected list for 'output_shapes' argument to "
2383 "'experimental_bytes_produced_stats_dataset' Op, not %r." % output_shapes)
2384 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2385 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2386 tag = _ops.convert_to_tensor(tag, _dtypes.string)
2387 _inputs_flat = [input_dataset, tag]
2388 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2389 _result = _execute.execute(b"ExperimentalBytesProducedStatsDataset", 1,
2390 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2391 name=name)
2392 if _execute.must_record_gradient():
2393 _execute.record_gradient(
2394 "ExperimentalBytesProducedStatsDataset", _inputs_flat, _attrs, _result)
2395 _result, = _result
2396 return _result
2399def experimental_csv_dataset(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name=None):
2400 r"""TODO: add doc.
2402 Args:
2403 filenames: A `Tensor` of type `string`.
2404 compression_type: A `Tensor` of type `string`.
2405 buffer_size: A `Tensor` of type `int64`.
2406 header: A `Tensor` of type `bool`.
2407 field_delim: A `Tensor` of type `string`.
2408 use_quote_delim: A `Tensor` of type `bool`.
2409 na_value: A `Tensor` of type `string`.
2410 select_cols: A `Tensor` of type `int64`.
2411 record_defaults: A list of `Tensor` objects with types from: `float32`, `float64`, `int32`, `int64`, `string`.
2412 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2413 name: A name for the operation (optional).
2415 Returns:
2416 A `Tensor` of type `variant`.
2417 """
2418 _ctx = _context._context or _context.context()
2419 tld = _ctx._thread_local_data
2420 if tld.is_eager:
2421 try:
2422 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2423 _ctx, "ExperimentalCSVDataset", name, filenames, compression_type,
2424 buffer_size, header, field_delim, use_quote_delim, na_value,
2425 select_cols, record_defaults, "output_shapes", output_shapes)
2426 return _result
2427 except _core._NotOkStatusException as e:
2428 _ops.raise_from_not_ok_status(e, name)
2429 except _core._FallbackException:
2430 pass
2431 try:
2432 return experimental_csv_dataset_eager_fallback(
2433 filenames, compression_type, buffer_size, header, field_delim,
2434 use_quote_delim, na_value, select_cols, record_defaults,
2435 output_shapes=output_shapes, name=name, ctx=_ctx)
2436 except _core._SymbolicException:
2437 pass # Add nodes to the TensorFlow graph.
2438 # Add nodes to the TensorFlow graph.
2439 if not isinstance(output_shapes, (list, tuple)):
2440 raise TypeError(
2441 "Expected list for 'output_shapes' argument to "
2442 "'experimental_csv_dataset' Op, not %r." % output_shapes)
2443 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2444 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2445 "ExperimentalCSVDataset", filenames=filenames,
2446 compression_type=compression_type,
2447 buffer_size=buffer_size, header=header,
2448 field_delim=field_delim,
2449 use_quote_delim=use_quote_delim,
2450 na_value=na_value, select_cols=select_cols,
2451 record_defaults=record_defaults,
2452 output_shapes=output_shapes, name=name)
2453 _result = _outputs[:]
2454 if _execute.must_record_gradient():
2455 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2456 _op.get_attr("output_shapes"))
2457 _inputs_flat = _op.inputs
2458 _execute.record_gradient(
2459 "ExperimentalCSVDataset", _inputs_flat, _attrs, _result)
2460 _result, = _result
2461 return _result
2463ExperimentalCSVDataset = tf_export("raw_ops.ExperimentalCSVDataset")(_ops.to_raw_op(experimental_csv_dataset))
2466def experimental_csv_dataset_eager_fallback(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name, ctx):
2467 if not isinstance(output_shapes, (list, tuple)):
2468 raise TypeError(
2469 "Expected list for 'output_shapes' argument to "
2470 "'experimental_csv_dataset' Op, not %r." % output_shapes)
2471 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2472 _attr_output_types, record_defaults = _execute.convert_to_mixed_eager_tensors(record_defaults, ctx)
2473 filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
2474 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
2475 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
2476 header = _ops.convert_to_tensor(header, _dtypes.bool)
2477 field_delim = _ops.convert_to_tensor(field_delim, _dtypes.string)
2478 use_quote_delim = _ops.convert_to_tensor(use_quote_delim, _dtypes.bool)
2479 na_value = _ops.convert_to_tensor(na_value, _dtypes.string)
2480 select_cols = _ops.convert_to_tensor(select_cols, _dtypes.int64)
2481 _inputs_flat = [filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols] + list(record_defaults)
2482 _attrs = ("output_types", _attr_output_types, "output_shapes",
2483 output_shapes)
2484 _result = _execute.execute(b"ExperimentalCSVDataset", 1,
2485 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2486 name=name)
2487 if _execute.must_record_gradient():
2488 _execute.record_gradient(
2489 "ExperimentalCSVDataset", _inputs_flat, _attrs, _result)
2490 _result, = _result
2491 return _result
2494def experimental_choose_fastest_dataset(input_datasets, num_experiments, output_types, output_shapes, name=None):
2495 r"""TODO: add doc.
2497 Args:
2498 input_datasets: A list of at least 2 `Tensor` objects with type `variant`.
2499 num_experiments: An `int`.
2500 output_types: A list of `tf.DTypes` that has length `>= 1`.
2501 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2502 name: A name for the operation (optional).
2504 Returns:
2505 A `Tensor` of type `variant`.
2506 """
2507 _ctx = _context._context or _context.context()
2508 tld = _ctx._thread_local_data
2509 if tld.is_eager:
2510 try:
2511 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2512 _ctx, "ExperimentalChooseFastestDataset", name, input_datasets,
2513 "num_experiments", num_experiments, "output_types", output_types,
2514 "output_shapes", output_shapes)
2515 return _result
2516 except _core._NotOkStatusException as e:
2517 _ops.raise_from_not_ok_status(e, name)
2518 except _core._FallbackException:
2519 pass
2520 try:
2521 return experimental_choose_fastest_dataset_eager_fallback(
2522 input_datasets, num_experiments=num_experiments,
2523 output_types=output_types, output_shapes=output_shapes, name=name,
2524 ctx=_ctx)
2525 except _core._SymbolicException:
2526 pass # Add nodes to the TensorFlow graph.
2527 # Add nodes to the TensorFlow graph.
2528 if not isinstance(input_datasets, (list, tuple)):
2529 raise TypeError(
2530 "Expected list for 'input_datasets' argument to "
2531 "'experimental_choose_fastest_dataset' Op, not %r." % input_datasets)
2532 _attr_N = len(input_datasets)
2533 num_experiments = _execute.make_int(num_experiments, "num_experiments")
2534 if not isinstance(output_types, (list, tuple)):
2535 raise TypeError(
2536 "Expected list for 'output_types' argument to "
2537 "'experimental_choose_fastest_dataset' Op, not %r." % output_types)
2538 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2539 if not isinstance(output_shapes, (list, tuple)):
2540 raise TypeError(
2541 "Expected list for 'output_shapes' argument to "
2542 "'experimental_choose_fastest_dataset' Op, not %r." % output_shapes)
2543 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2544 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2545 "ExperimentalChooseFastestDataset", input_datasets=input_datasets,
2546 num_experiments=num_experiments,
2547 output_types=output_types,
2548 output_shapes=output_shapes,
2549 name=name)
2550 _result = _outputs[:]
2551 if _execute.must_record_gradient():
2552 _attrs = ("N", _op._get_attr_int("N"), "num_experiments",
2553 _op._get_attr_int("num_experiments"), "output_types",
2554 _op.get_attr("output_types"), "output_shapes",
2555 _op.get_attr("output_shapes"))
2556 _inputs_flat = _op.inputs
2557 _execute.record_gradient(
2558 "ExperimentalChooseFastestDataset", _inputs_flat, _attrs, _result)
2559 _result, = _result
2560 return _result
2562ExperimentalChooseFastestDataset = tf_export("raw_ops.ExperimentalChooseFastestDataset")(_ops.to_raw_op(experimental_choose_fastest_dataset))
2565def experimental_choose_fastest_dataset_eager_fallback(input_datasets, num_experiments, output_types, output_shapes, name, ctx):
2566 if not isinstance(input_datasets, (list, tuple)):
2567 raise TypeError(
2568 "Expected list for 'input_datasets' argument to "
2569 "'experimental_choose_fastest_dataset' Op, not %r." % input_datasets)
2570 _attr_N = len(input_datasets)
2571 num_experiments = _execute.make_int(num_experiments, "num_experiments")
2572 if not isinstance(output_types, (list, tuple)):
2573 raise TypeError(
2574 "Expected list for 'output_types' argument to "
2575 "'experimental_choose_fastest_dataset' Op, not %r." % output_types)
2576 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2577 if not isinstance(output_shapes, (list, tuple)):
2578 raise TypeError(
2579 "Expected list for 'output_shapes' argument to "
2580 "'experimental_choose_fastest_dataset' Op, not %r." % output_shapes)
2581 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2582 input_datasets = _ops.convert_n_to_tensor(input_datasets, _dtypes.variant)
2583 _inputs_flat = list(input_datasets)
2584 _attrs = ("N", _attr_N, "num_experiments", num_experiments, "output_types",
2585 output_types, "output_shapes", output_shapes)
2586 _result = _execute.execute(b"ExperimentalChooseFastestDataset", 1,
2587 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2588 name=name)
2589 if _execute.must_record_gradient():
2590 _execute.record_gradient(
2591 "ExperimentalChooseFastestDataset", _inputs_flat, _attrs, _result)
2592 _result, = _result
2593 return _result
2596def experimental_dataset_cardinality(input_dataset, name=None):
2597 r"""Returns the cardinality of `input_dataset`.
2599 Returns the cardinality of `input_dataset`.
2601 Args:
2602 input_dataset: A `Tensor` of type `variant`.
2603 A variant tensor representing the dataset to return cardinality for.
2604 name: A name for the operation (optional).
2606 Returns:
2607 A `Tensor` of type `int64`.
2608 """
2609 _ctx = _context._context or _context.context()
2610 tld = _ctx._thread_local_data
2611 if tld.is_eager:
2612 try:
2613 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2614 _ctx, "ExperimentalDatasetCardinality", name, input_dataset)
2615 return _result
2616 except _core._NotOkStatusException as e:
2617 _ops.raise_from_not_ok_status(e, name)
2618 except _core._FallbackException:
2619 pass
2620 try:
2621 return experimental_dataset_cardinality_eager_fallback(
2622 input_dataset, name=name, ctx=_ctx)
2623 except _core._SymbolicException:
2624 pass # Add nodes to the TensorFlow graph.
2625 # Add nodes to the TensorFlow graph.
2626 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2627 "ExperimentalDatasetCardinality", input_dataset=input_dataset,
2628 name=name)
2629 _result = _outputs[:]
2630 if _execute.must_record_gradient():
2631 _attrs = ()
2632 _inputs_flat = _op.inputs
2633 _execute.record_gradient(
2634 "ExperimentalDatasetCardinality", _inputs_flat, _attrs, _result)
2635 _result, = _result
2636 return _result
2638ExperimentalDatasetCardinality = tf_export("raw_ops.ExperimentalDatasetCardinality")(_ops.to_raw_op(experimental_dataset_cardinality))
2641def experimental_dataset_cardinality_eager_fallback(input_dataset, name, ctx):
2642 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2643 _inputs_flat = [input_dataset]
2644 _attrs = None
2645 _result = _execute.execute(b"ExperimentalDatasetCardinality", 1,
2646 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2647 name=name)
2648 if _execute.must_record_gradient():
2649 _execute.record_gradient(
2650 "ExperimentalDatasetCardinality", _inputs_flat, _attrs, _result)
2651 _result, = _result
2652 return _result
2655def experimental_dataset_to_tf_record(input_dataset, filename, compression_type, name=None):
2656 r"""Writes the given dataset to the given file using the TFRecord format.
2658 Args:
2659 input_dataset: A `Tensor` of type `variant`.
2660 A variant tensor representing the dataset to write.
2661 filename: A `Tensor` of type `string`.
2662 A scalar string tensor representing the filename to use.
2663 compression_type: A `Tensor` of type `string`.
2664 A scalar string tensor containing either (i) the empty string (no
2665 compression), (ii) "ZLIB", or (iii) "GZIP".
2666 name: A name for the operation (optional).
2668 Returns:
2669 The created Operation.
2670 """
2671 _ctx = _context._context or _context.context()
2672 tld = _ctx._thread_local_data
2673 if tld.is_eager:
2674 try:
2675 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2676 _ctx, "ExperimentalDatasetToTFRecord", name, input_dataset, filename,
2677 compression_type)
2678 return _result
2679 except _core._NotOkStatusException as e:
2680 _ops.raise_from_not_ok_status(e, name)
2681 except _core._FallbackException:
2682 pass
2683 try:
2684 return experimental_dataset_to_tf_record_eager_fallback(
2685 input_dataset, filename, compression_type, name=name, ctx=_ctx)
2686 except _core._SymbolicException:
2687 pass # Add nodes to the TensorFlow graph.
2688 # Add nodes to the TensorFlow graph.
2689 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2690 "ExperimentalDatasetToTFRecord", input_dataset=input_dataset,
2691 filename=filename,
2692 compression_type=compression_type,
2693 name=name)
2694 return _op
2695ExperimentalDatasetToTFRecord = tf_export("raw_ops.ExperimentalDatasetToTFRecord")(_ops.to_raw_op(experimental_dataset_to_tf_record))
2698def experimental_dataset_to_tf_record_eager_fallback(input_dataset, filename, compression_type, name, ctx):
2699 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2700 filename = _ops.convert_to_tensor(filename, _dtypes.string)
2701 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
2702 _inputs_flat = [input_dataset, filename, compression_type]
2703 _attrs = None
2704 _result = _execute.execute(b"ExperimentalDatasetToTFRecord", 0,
2705 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2706 name=name)
2707 _result = None
2708 return _result
2711def experimental_dense_to_sparse_batch_dataset(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None):
2712 r"""Creates a dataset that batches input elements into a SparseTensor.
2714 Args:
2715 input_dataset: A `Tensor` of type `variant`.
2716 A handle to an input dataset. Must have a single component.
2717 batch_size: A `Tensor` of type `int64`.
2718 A scalar representing the number of elements to accumulate in a
2719 batch.
2720 row_shape: A `Tensor` of type `int64`.
2721 A vector representing the dense shape of each row in the produced
2722 SparseTensor. The shape may be partially specified, using `-1` to indicate
2723 that a particular dimension should use the maximum size of all batch elements.
2724 output_types: A list of `tf.DTypes` that has length `>= 1`.
2725 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2726 name: A name for the operation (optional).
2728 Returns:
2729 A `Tensor` of type `variant`.
2730 """
2731 _ctx = _context._context or _context.context()
2732 tld = _ctx._thread_local_data
2733 if tld.is_eager:
2734 try:
2735 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2736 _ctx, "ExperimentalDenseToSparseBatchDataset", name, input_dataset,
2737 batch_size, row_shape, "output_types", output_types, "output_shapes",
2738 output_shapes)
2739 return _result
2740 except _core._NotOkStatusException as e:
2741 _ops.raise_from_not_ok_status(e, name)
2742 except _core._FallbackException:
2743 pass
2744 try:
2745 return experimental_dense_to_sparse_batch_dataset_eager_fallback(
2746 input_dataset, batch_size, row_shape, output_types=output_types,
2747 output_shapes=output_shapes, name=name, ctx=_ctx)
2748 except _core._SymbolicException:
2749 pass # Add nodes to the TensorFlow graph.
2750 # Add nodes to the TensorFlow graph.
2751 if not isinstance(output_types, (list, tuple)):
2752 raise TypeError(
2753 "Expected list for 'output_types' argument to "
2754 "'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_types)
2755 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2756 if not isinstance(output_shapes, (list, tuple)):
2757 raise TypeError(
2758 "Expected list for 'output_shapes' argument to "
2759 "'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
2760 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2761 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2762 "ExperimentalDenseToSparseBatchDataset", input_dataset=input_dataset,
2763 batch_size=batch_size,
2764 row_shape=row_shape,
2765 output_types=output_types,
2766 output_shapes=output_shapes,
2767 name=name)
2768 _result = _outputs[:]
2769 if _execute.must_record_gradient():
2770 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2771 _op.get_attr("output_shapes"))
2772 _inputs_flat = _op.inputs
2773 _execute.record_gradient(
2774 "ExperimentalDenseToSparseBatchDataset", _inputs_flat, _attrs, _result)
2775 _result, = _result
2776 return _result
2778ExperimentalDenseToSparseBatchDataset = tf_export("raw_ops.ExperimentalDenseToSparseBatchDataset")(_ops.to_raw_op(experimental_dense_to_sparse_batch_dataset))
2781def experimental_dense_to_sparse_batch_dataset_eager_fallback(input_dataset, batch_size, row_shape, output_types, output_shapes, name, ctx):
2782 if not isinstance(output_types, (list, tuple)):
2783 raise TypeError(
2784 "Expected list for 'output_types' argument to "
2785 "'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_types)
2786 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2787 if not isinstance(output_shapes, (list, tuple)):
2788 raise TypeError(
2789 "Expected list for 'output_shapes' argument to "
2790 "'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
2791 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2792 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2793 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
2794 row_shape = _ops.convert_to_tensor(row_shape, _dtypes.int64)
2795 _inputs_flat = [input_dataset, batch_size, row_shape]
2796 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2797 _result = _execute.execute(b"ExperimentalDenseToSparseBatchDataset", 1,
2798 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2799 name=name)
2800 if _execute.must_record_gradient():
2801 _execute.record_gradient(
2802 "ExperimentalDenseToSparseBatchDataset", _inputs_flat, _attrs, _result)
2803 _result, = _result
2804 return _result
2807def experimental_directed_interleave_dataset(selector_input_dataset, data_input_datasets, output_types, output_shapes, name=None):
2808 r"""A substitute for `InterleaveDataset` on a fixed list of `N` datasets.
2810 Args:
2811 selector_input_dataset: A `Tensor` of type `variant`.
2812 A dataset of scalar `DT_INT64` elements that determines which of the
2813 `N` data inputs should produce the next output element.
2814 data_input_datasets: A list of at least 1 `Tensor` objects with type `variant`.
2815 `N` datasets with the same type that will be interleaved according to
2816 the values of `selector_input_dataset`.
2817 output_types: A list of `tf.DTypes` that has length `>= 1`.
2818 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2819 name: A name for the operation (optional).
2821 Returns:
2822 A `Tensor` of type `variant`.
2823 """
2824 _ctx = _context._context or _context.context()
2825 tld = _ctx._thread_local_data
2826 if tld.is_eager:
2827 try:
2828 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2829 _ctx, "ExperimentalDirectedInterleaveDataset", name,
2830 selector_input_dataset, data_input_datasets, "output_types",
2831 output_types, "output_shapes", output_shapes)
2832 return _result
2833 except _core._NotOkStatusException as e:
2834 _ops.raise_from_not_ok_status(e, name)
2835 except _core._FallbackException:
2836 pass
2837 try:
2838 return experimental_directed_interleave_dataset_eager_fallback(
2839 selector_input_dataset, data_input_datasets,
2840 output_types=output_types, output_shapes=output_shapes, name=name,
2841 ctx=_ctx)
2842 except _core._SymbolicException:
2843 pass # Add nodes to the TensorFlow graph.
2844 # Add nodes to the TensorFlow graph.
2845 if not isinstance(data_input_datasets, (list, tuple)):
2846 raise TypeError(
2847 "Expected list for 'data_input_datasets' argument to "
2848 "'experimental_directed_interleave_dataset' Op, not %r." % data_input_datasets)
2849 _attr_N = len(data_input_datasets)
2850 if not isinstance(output_types, (list, tuple)):
2851 raise TypeError(
2852 "Expected list for 'output_types' argument to "
2853 "'experimental_directed_interleave_dataset' Op, not %r." % output_types)
2854 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2855 if not isinstance(output_shapes, (list, tuple)):
2856 raise TypeError(
2857 "Expected list for 'output_shapes' argument to "
2858 "'experimental_directed_interleave_dataset' Op, not %r." % output_shapes)
2859 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2860 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2861 "ExperimentalDirectedInterleaveDataset", selector_input_dataset=selector_input_dataset,
2862 data_input_datasets=data_input_datasets,
2863 output_types=output_types,
2864 output_shapes=output_shapes,
2865 name=name)
2866 _result = _outputs[:]
2867 if _execute.must_record_gradient():
2868 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2869 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"))
2870 _inputs_flat = _op.inputs
2871 _execute.record_gradient(
2872 "ExperimentalDirectedInterleaveDataset", _inputs_flat, _attrs, _result)
2873 _result, = _result
2874 return _result
2876ExperimentalDirectedInterleaveDataset = tf_export("raw_ops.ExperimentalDirectedInterleaveDataset")(_ops.to_raw_op(experimental_directed_interleave_dataset))
2879def experimental_directed_interleave_dataset_eager_fallback(selector_input_dataset, data_input_datasets, output_types, output_shapes, name, ctx):
2880 if not isinstance(data_input_datasets, (list, tuple)):
2881 raise TypeError(
2882 "Expected list for 'data_input_datasets' argument to "
2883 "'experimental_directed_interleave_dataset' Op, not %r." % data_input_datasets)
2884 _attr_N = len(data_input_datasets)
2885 if not isinstance(output_types, (list, tuple)):
2886 raise TypeError(
2887 "Expected list for 'output_types' argument to "
2888 "'experimental_directed_interleave_dataset' Op, not %r." % output_types)
2889 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2890 if not isinstance(output_shapes, (list, tuple)):
2891 raise TypeError(
2892 "Expected list for 'output_shapes' argument to "
2893 "'experimental_directed_interleave_dataset' Op, not %r." % output_shapes)
2894 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2895 selector_input_dataset = _ops.convert_to_tensor(selector_input_dataset, _dtypes.variant)
2896 data_input_datasets = _ops.convert_n_to_tensor(data_input_datasets, _dtypes.variant)
2897 _inputs_flat = [selector_input_dataset] + list(data_input_datasets)
2898 _attrs = ("output_types", output_types, "output_shapes", output_shapes, "N",
2899 _attr_N)
2900 _result = _execute.execute(b"ExperimentalDirectedInterleaveDataset", 1,
2901 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
2902 name=name)
2903 if _execute.must_record_gradient():
2904 _execute.record_gradient(
2905 "ExperimentalDirectedInterleaveDataset", _inputs_flat, _attrs, _result)
2906 _result, = _result
2907 return _result
2910def experimental_group_by_reducer_dataset(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None):
2911 r"""Creates a dataset that computes a group-by on `input_dataset`.
2913 Creates a dataset that computes a group-by on `input_dataset`.
2915 Args:
2916 input_dataset: A `Tensor` of type `variant`.
2917 A variant tensor representing the input dataset.
2918 key_func_other_arguments: A list of `Tensor` objects.
2919 A list of tensors, typically values that were captured when
2920 building a closure for `key_func`.
2921 init_func_other_arguments: A list of `Tensor` objects.
2922 A list of tensors, typically values that were captured when
2923 building a closure for `init_func`.
2924 reduce_func_other_arguments: A list of `Tensor` objects.
2925 A list of tensors, typically values that were captured when
2926 building a closure for `reduce_func`.
2927 finalize_func_other_arguments: A list of `Tensor` objects.
2928 A list of tensors, typically values that were captured when
2929 building a closure for `finalize_func`.
2930 key_func: A function decorated with @Defun.
2931 A function mapping an element of `input_dataset`, concatenated
2932 with `key_func_other_arguments` to a scalar value of type DT_INT64.
2933 init_func: A function decorated with @Defun.
2934 A function mapping a key of type DT_INT64, concatenated with
2935 `init_func_other_arguments` to the initial reducer state.
2936 reduce_func: A function decorated with @Defun.
2937 A function mapping the current reducer state and an element of `input_dataset`,
2938 concatenated with `reduce_func_other_arguments` to a new reducer state.
2939 finalize_func: A function decorated with @Defun.
2940 A function mapping the final reducer state to an output element.
2941 output_types: A list of `tf.DTypes` that has length `>= 1`.
2942 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2943 name: A name for the operation (optional).
2945 Returns:
2946 A `Tensor` of type `variant`.
2947 """
2948 _ctx = _context._context or _context.context()
2949 tld = _ctx._thread_local_data
2950 if tld.is_eager:
2951 try:
2952 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2953 _ctx, "ExperimentalGroupByReducerDataset", name, input_dataset,
2954 key_func_other_arguments, init_func_other_arguments,
2955 reduce_func_other_arguments, finalize_func_other_arguments,
2956 "key_func", key_func, "init_func", init_func, "reduce_func",
2957 reduce_func, "finalize_func", finalize_func, "output_types",
2958 output_types, "output_shapes", output_shapes)
2959 return _result
2960 except _core._NotOkStatusException as e:
2961 _ops.raise_from_not_ok_status(e, name)
2962 except _core._FallbackException:
2963 pass
2964 try:
2965 return experimental_group_by_reducer_dataset_eager_fallback(
2966 input_dataset, key_func_other_arguments, init_func_other_arguments,
2967 reduce_func_other_arguments, finalize_func_other_arguments,
2968 key_func=key_func, init_func=init_func, reduce_func=reduce_func,
2969 finalize_func=finalize_func, output_types=output_types,
2970 output_shapes=output_shapes, name=name, ctx=_ctx)
2971 except _core._SymbolicException:
2972 pass # Add nodes to the TensorFlow graph.
2973 # Add nodes to the TensorFlow graph.
2974 if not isinstance(output_types, (list, tuple)):
2975 raise TypeError(
2976 "Expected list for 'output_types' argument to "
2977 "'experimental_group_by_reducer_dataset' Op, not %r." % output_types)
2978 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2979 if not isinstance(output_shapes, (list, tuple)):
2980 raise TypeError(
2981 "Expected list for 'output_shapes' argument to "
2982 "'experimental_group_by_reducer_dataset' Op, not %r." % output_shapes)
2983 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2984 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2985 "ExperimentalGroupByReducerDataset", input_dataset=input_dataset,
2986 key_func_other_arguments=key_func_other_arguments,
2987 init_func_other_arguments=init_func_other_arguments,
2988 reduce_func_other_arguments=reduce_func_other_arguments,
2989 finalize_func_other_arguments=finalize_func_other_arguments,
2990 key_func=key_func,
2991 init_func=init_func,
2992 reduce_func=reduce_func,
2993 finalize_func=finalize_func,
2994 output_types=output_types,
2995 output_shapes=output_shapes,
2996 name=name)
2997 _result = _outputs[:]
2998 if _execute.must_record_gradient():
2999 _attrs = ("key_func", _op.get_attr("key_func"), "init_func",
3000 _op.get_attr("init_func"), "reduce_func",
3001 _op.get_attr("reduce_func"), "finalize_func",
3002 _op.get_attr("finalize_func"), "Tkey_func_other_arguments",
3003 _op.get_attr("Tkey_func_other_arguments"),
3004 "Tinit_func_other_arguments",
3005 _op.get_attr("Tinit_func_other_arguments"),
3006 "Treduce_func_other_arguments",
3007 _op.get_attr("Treduce_func_other_arguments"),
3008 "Tfinalize_func_other_arguments",
3009 _op.get_attr("Tfinalize_func_other_arguments"), "output_types",
3010 _op.get_attr("output_types"), "output_shapes",
3011 _op.get_attr("output_shapes"))
3012 _inputs_flat = _op.inputs
3013 _execute.record_gradient(
3014 "ExperimentalGroupByReducerDataset", _inputs_flat, _attrs, _result)
3015 _result, = _result
3016 return _result
3018ExperimentalGroupByReducerDataset = tf_export("raw_ops.ExperimentalGroupByReducerDataset")(_ops.to_raw_op(experimental_group_by_reducer_dataset))
3021def experimental_group_by_reducer_dataset_eager_fallback(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name, ctx):
3022 if not isinstance(output_types, (list, tuple)):
3023 raise TypeError(
3024 "Expected list for 'output_types' argument to "
3025 "'experimental_group_by_reducer_dataset' Op, not %r." % output_types)
3026 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3027 if not isinstance(output_shapes, (list, tuple)):
3028 raise TypeError(
3029 "Expected list for 'output_shapes' argument to "
3030 "'experimental_group_by_reducer_dataset' Op, not %r." % output_shapes)
3031 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3032 _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, ctx)
3033 _attr_Tinit_func_other_arguments, init_func_other_arguments = _execute.convert_to_mixed_eager_tensors(init_func_other_arguments, ctx)
3034 _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, ctx)
3035 _attr_Tfinalize_func_other_arguments, finalize_func_other_arguments = _execute.convert_to_mixed_eager_tensors(finalize_func_other_arguments, ctx)
3036 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3037 _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(init_func_other_arguments) + list(reduce_func_other_arguments) + list(finalize_func_other_arguments)
3038 _attrs = ("key_func", key_func, "init_func", init_func, "reduce_func",
3039 reduce_func, "finalize_func", finalize_func, "Tkey_func_other_arguments",
3040 _attr_Tkey_func_other_arguments, "Tinit_func_other_arguments",
3041 _attr_Tinit_func_other_arguments, "Treduce_func_other_arguments",
3042 _attr_Treduce_func_other_arguments, "Tfinalize_func_other_arguments",
3043 _attr_Tfinalize_func_other_arguments, "output_types", output_types,
3044 "output_shapes", output_shapes)
3045 _result = _execute.execute(b"ExperimentalGroupByReducerDataset", 1,
3046 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3047 name=name)
3048 if _execute.must_record_gradient():
3049 _execute.record_gradient(
3050 "ExperimentalGroupByReducerDataset", _inputs_flat, _attrs, _result)
3051 _result, = _result
3052 return _result
3055def experimental_group_by_window_dataset(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None):
3056 r"""Creates a dataset that computes a windowed group-by on `input_dataset`.
3058 // TODO(mrry): Support non-int64 keys.
3060 Args:
3061 input_dataset: A `Tensor` of type `variant`.
3062 key_func_other_arguments: A list of `Tensor` objects.
3063 reduce_func_other_arguments: A list of `Tensor` objects.
3064 window_size_func_other_arguments: A list of `Tensor` objects.
3065 key_func: A function decorated with @Defun.
3066 A function mapping an element of `input_dataset`, concatenated
3067 with `key_func_other_arguments` to a scalar value of type DT_INT64.
3068 reduce_func: A function decorated with @Defun.
3069 window_size_func: A function decorated with @Defun.
3070 output_types: A list of `tf.DTypes` that has length `>= 1`.
3071 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3072 name: A name for the operation (optional).
3074 Returns:
3075 A `Tensor` of type `variant`.
3076 """
3077 _ctx = _context._context or _context.context()
3078 tld = _ctx._thread_local_data
3079 if tld.is_eager:
3080 try:
3081 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3082 _ctx, "ExperimentalGroupByWindowDataset", name, input_dataset,
3083 key_func_other_arguments, reduce_func_other_arguments,
3084 window_size_func_other_arguments, "key_func", key_func, "reduce_func",
3085 reduce_func, "window_size_func", window_size_func, "output_types",
3086 output_types, "output_shapes", output_shapes)
3087 return _result
3088 except _core._NotOkStatusException as e:
3089 _ops.raise_from_not_ok_status(e, name)
3090 except _core._FallbackException:
3091 pass
3092 try:
3093 return experimental_group_by_window_dataset_eager_fallback(
3094 input_dataset, key_func_other_arguments,
3095 reduce_func_other_arguments, window_size_func_other_arguments,
3096 key_func=key_func, reduce_func=reduce_func,
3097 window_size_func=window_size_func, output_types=output_types,
3098 output_shapes=output_shapes, name=name, ctx=_ctx)
3099 except _core._SymbolicException:
3100 pass # Add nodes to the TensorFlow graph.
3101 # Add nodes to the TensorFlow graph.
3102 if not isinstance(output_types, (list, tuple)):
3103 raise TypeError(
3104 "Expected list for 'output_types' argument to "
3105 "'experimental_group_by_window_dataset' Op, not %r." % output_types)
3106 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3107 if not isinstance(output_shapes, (list, tuple)):
3108 raise TypeError(
3109 "Expected list for 'output_shapes' argument to "
3110 "'experimental_group_by_window_dataset' Op, not %r." % output_shapes)
3111 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3112 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3113 "ExperimentalGroupByWindowDataset", input_dataset=input_dataset,
3114 key_func_other_arguments=key_func_other_arguments,
3115 reduce_func_other_arguments=reduce_func_other_arguments,
3116 window_size_func_other_arguments=window_size_func_other_arguments,
3117 key_func=key_func,
3118 reduce_func=reduce_func,
3119 window_size_func=window_size_func,
3120 output_types=output_types,
3121 output_shapes=output_shapes,
3122 name=name)
3123 _result = _outputs[:]
3124 if _execute.must_record_gradient():
3125 _attrs = ("key_func", _op.get_attr("key_func"), "reduce_func",
3126 _op.get_attr("reduce_func"), "window_size_func",
3127 _op.get_attr("window_size_func"), "Tkey_func_other_arguments",
3128 _op.get_attr("Tkey_func_other_arguments"),
3129 "Treduce_func_other_arguments",
3130 _op.get_attr("Treduce_func_other_arguments"),
3131 "Twindow_size_func_other_arguments",
3132 _op.get_attr("Twindow_size_func_other_arguments"),
3133 "output_types", _op.get_attr("output_types"), "output_shapes",
3134 _op.get_attr("output_shapes"))
3135 _inputs_flat = _op.inputs
3136 _execute.record_gradient(
3137 "ExperimentalGroupByWindowDataset", _inputs_flat, _attrs, _result)
3138 _result, = _result
3139 return _result
3141ExperimentalGroupByWindowDataset = tf_export("raw_ops.ExperimentalGroupByWindowDataset")(_ops.to_raw_op(experimental_group_by_window_dataset))
3144def experimental_group_by_window_dataset_eager_fallback(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name, ctx):
3145 if not isinstance(output_types, (list, tuple)):
3146 raise TypeError(
3147 "Expected list for 'output_types' argument to "
3148 "'experimental_group_by_window_dataset' Op, not %r." % output_types)
3149 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3150 if not isinstance(output_shapes, (list, tuple)):
3151 raise TypeError(
3152 "Expected list for 'output_shapes' argument to "
3153 "'experimental_group_by_window_dataset' Op, not %r." % output_shapes)
3154 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3155 _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, ctx)
3156 _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, ctx)
3157 _attr_Twindow_size_func_other_arguments, window_size_func_other_arguments = _execute.convert_to_mixed_eager_tensors(window_size_func_other_arguments, ctx)
3158 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3159 _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(reduce_func_other_arguments) + list(window_size_func_other_arguments)
3160 _attrs = ("key_func", key_func, "reduce_func", reduce_func,
3161 "window_size_func", window_size_func, "Tkey_func_other_arguments",
3162 _attr_Tkey_func_other_arguments, "Treduce_func_other_arguments",
3163 _attr_Treduce_func_other_arguments, "Twindow_size_func_other_arguments",
3164 _attr_Twindow_size_func_other_arguments, "output_types", output_types,
3165 "output_shapes", output_shapes)
3166 _result = _execute.execute(b"ExperimentalGroupByWindowDataset", 1,
3167 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3168 name=name)
3169 if _execute.must_record_gradient():
3170 _execute.record_gradient(
3171 "ExperimentalGroupByWindowDataset", _inputs_flat, _attrs, _result)
3172 _result, = _result
3173 return _result
3176def experimental_ignore_errors_dataset(input_dataset, output_types, output_shapes, log_warning=False, name=None):
3177 r"""Creates a dataset that contains the elements of `input_dataset` ignoring errors.
3179 Args:
3180 input_dataset: A `Tensor` of type `variant`.
3181 output_types: A list of `tf.DTypes` that has length `>= 1`.
3182 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3183 log_warning: An optional `bool`. Defaults to `False`.
3184 name: A name for the operation (optional).
3186 Returns:
3187 A `Tensor` of type `variant`.
3188 """
3189 _ctx = _context._context or _context.context()
3190 tld = _ctx._thread_local_data
3191 if tld.is_eager:
3192 try:
3193 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3194 _ctx, "ExperimentalIgnoreErrorsDataset", name, input_dataset,
3195 "output_types", output_types, "output_shapes", output_shapes,
3196 "log_warning", log_warning)
3197 return _result
3198 except _core._NotOkStatusException as e:
3199 _ops.raise_from_not_ok_status(e, name)
3200 except _core._FallbackException:
3201 pass
3202 try:
3203 return experimental_ignore_errors_dataset_eager_fallback(
3204 input_dataset, output_types=output_types,
3205 output_shapes=output_shapes, log_warning=log_warning, name=name,
3206 ctx=_ctx)
3207 except _core._SymbolicException:
3208 pass # Add nodes to the TensorFlow graph.
3209 # Add nodes to the TensorFlow graph.
3210 if not isinstance(output_types, (list, tuple)):
3211 raise TypeError(
3212 "Expected list for 'output_types' argument to "
3213 "'experimental_ignore_errors_dataset' Op, not %r." % output_types)
3214 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3215 if not isinstance(output_shapes, (list, tuple)):
3216 raise TypeError(
3217 "Expected list for 'output_shapes' argument to "
3218 "'experimental_ignore_errors_dataset' Op, not %r." % output_shapes)
3219 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3220 if log_warning is None:
3221 log_warning = False
3222 log_warning = _execute.make_bool(log_warning, "log_warning")
3223 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3224 "ExperimentalIgnoreErrorsDataset", input_dataset=input_dataset,
3225 output_types=output_types,
3226 output_shapes=output_shapes,
3227 log_warning=log_warning, name=name)
3228 _result = _outputs[:]
3229 if _execute.must_record_gradient():
3230 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3231 _op.get_attr("output_shapes"), "log_warning",
3232 _op._get_attr_bool("log_warning"))
3233 _inputs_flat = _op.inputs
3234 _execute.record_gradient(
3235 "ExperimentalIgnoreErrorsDataset", _inputs_flat, _attrs, _result)
3236 _result, = _result
3237 return _result
3239ExperimentalIgnoreErrorsDataset = tf_export("raw_ops.ExperimentalIgnoreErrorsDataset")(_ops.to_raw_op(experimental_ignore_errors_dataset))
3242def experimental_ignore_errors_dataset_eager_fallback(input_dataset, output_types, output_shapes, log_warning, name, ctx):
3243 if not isinstance(output_types, (list, tuple)):
3244 raise TypeError(
3245 "Expected list for 'output_types' argument to "
3246 "'experimental_ignore_errors_dataset' Op, not %r." % output_types)
3247 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3248 if not isinstance(output_shapes, (list, tuple)):
3249 raise TypeError(
3250 "Expected list for 'output_shapes' argument to "
3251 "'experimental_ignore_errors_dataset' Op, not %r." % output_shapes)
3252 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3253 if log_warning is None:
3254 log_warning = False
3255 log_warning = _execute.make_bool(log_warning, "log_warning")
3256 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3257 _inputs_flat = [input_dataset]
3258 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
3259 "log_warning", log_warning)
3260 _result = _execute.execute(b"ExperimentalIgnoreErrorsDataset", 1,
3261 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3262 name=name)
3263 if _execute.must_record_gradient():
3264 _execute.record_gradient(
3265 "ExperimentalIgnoreErrorsDataset", _inputs_flat, _attrs, _result)
3266 _result, = _result
3267 return _result
3270def experimental_iterator_get_device(resource, name=None):
3271 r"""Returns the name of the device on which `resource` has been placed.
3273 Args:
3274 resource: A `Tensor` of type `resource`.
3275 name: A name for the operation (optional).
3277 Returns:
3278 A `Tensor` of type `string`.
3279 """
3280 _ctx = _context._context or _context.context()
3281 tld = _ctx._thread_local_data
3282 if tld.is_eager:
3283 try:
3284 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3285 _ctx, "ExperimentalIteratorGetDevice", name, resource)
3286 return _result
3287 except _core._NotOkStatusException as e:
3288 _ops.raise_from_not_ok_status(e, name)
3289 except _core._FallbackException:
3290 pass
3291 try:
3292 return experimental_iterator_get_device_eager_fallback(
3293 resource, name=name, ctx=_ctx)
3294 except _core._SymbolicException:
3295 pass # Add nodes to the TensorFlow graph.
3296 # Add nodes to the TensorFlow graph.
3297 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3298 "ExperimentalIteratorGetDevice", resource=resource, name=name)
3299 _result = _outputs[:]
3300 if _execute.must_record_gradient():
3301 _attrs = ()
3302 _inputs_flat = _op.inputs
3303 _execute.record_gradient(
3304 "ExperimentalIteratorGetDevice", _inputs_flat, _attrs, _result)
3305 _result, = _result
3306 return _result
3308ExperimentalIteratorGetDevice = tf_export("raw_ops.ExperimentalIteratorGetDevice")(_ops.to_raw_op(experimental_iterator_get_device))
3311def experimental_iterator_get_device_eager_fallback(resource, name, ctx):
3312 resource = _ops.convert_to_tensor(resource, _dtypes.resource)
3313 _inputs_flat = [resource]
3314 _attrs = None
3315 _result = _execute.execute(b"ExperimentalIteratorGetDevice", 1,
3316 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3317 name=name)
3318 if _execute.must_record_gradient():
3319 _execute.record_gradient(
3320 "ExperimentalIteratorGetDevice", _inputs_flat, _attrs, _result)
3321 _result, = _result
3322 return _result
3325def experimental_lmdb_dataset(filenames, output_types, output_shapes, name=None):
3326 r"""TODO: add doc.
3328 Args:
3329 filenames: A `Tensor` of type `string`.
3330 output_types: A list of `tf.DTypes` that has length `>= 1`.
3331 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3332 name: A name for the operation (optional).
3334 Returns:
3335 A `Tensor` of type `variant`.
3336 """
3337 _ctx = _context._context or _context.context()
3338 tld = _ctx._thread_local_data
3339 if tld.is_eager:
3340 try:
3341 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3342 _ctx, "ExperimentalLMDBDataset", name, filenames, "output_types",
3343 output_types, "output_shapes", output_shapes)
3344 return _result
3345 except _core._NotOkStatusException as e:
3346 _ops.raise_from_not_ok_status(e, name)
3347 except _core._FallbackException:
3348 pass
3349 try:
3350 return experimental_lmdb_dataset_eager_fallback(
3351 filenames, output_types=output_types, output_shapes=output_shapes,
3352 name=name, ctx=_ctx)
3353 except _core._SymbolicException:
3354 pass # Add nodes to the TensorFlow graph.
3355 # Add nodes to the TensorFlow graph.
3356 if not isinstance(output_types, (list, tuple)):
3357 raise TypeError(
3358 "Expected list for 'output_types' argument to "
3359 "'experimental_lmdb_dataset' Op, not %r." % output_types)
3360 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3361 if not isinstance(output_shapes, (list, tuple)):
3362 raise TypeError(
3363 "Expected list for 'output_shapes' argument to "
3364 "'experimental_lmdb_dataset' Op, not %r." % output_shapes)
3365 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3366 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3367 "ExperimentalLMDBDataset", filenames=filenames,
3368 output_types=output_types,
3369 output_shapes=output_shapes, name=name)
3370 _result = _outputs[:]
3371 if _execute.must_record_gradient():
3372 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3373 _op.get_attr("output_shapes"))
3374 _inputs_flat = _op.inputs
3375 _execute.record_gradient(
3376 "ExperimentalLMDBDataset", _inputs_flat, _attrs, _result)
3377 _result, = _result
3378 return _result
3380ExperimentalLMDBDataset = tf_export("raw_ops.ExperimentalLMDBDataset")(_ops.to_raw_op(experimental_lmdb_dataset))
3383def experimental_lmdb_dataset_eager_fallback(filenames, output_types, output_shapes, name, ctx):
3384 if not isinstance(output_types, (list, tuple)):
3385 raise TypeError(
3386 "Expected list for 'output_types' argument to "
3387 "'experimental_lmdb_dataset' Op, not %r." % output_types)
3388 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3389 if not isinstance(output_shapes, (list, tuple)):
3390 raise TypeError(
3391 "Expected list for 'output_shapes' argument to "
3392 "'experimental_lmdb_dataset' Op, not %r." % output_shapes)
3393 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3394 filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
3395 _inputs_flat = [filenames]
3396 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3397 _result = _execute.execute(b"ExperimentalLMDBDataset", 1,
3398 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3399 name=name)
3400 if _execute.must_record_gradient():
3401 _execute.record_gradient(
3402 "ExperimentalLMDBDataset", _inputs_flat, _attrs, _result)
3403 _result, = _result
3404 return _result
3407def experimental_latency_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
3408 r"""Records the latency of producing `input_dataset` elements in a StatsAggregator.
3410 Args:
3411 input_dataset: A `Tensor` of type `variant`.
3412 tag: A `Tensor` of type `string`.
3413 output_types: A list of `tf.DTypes` that has length `>= 1`.
3414 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3415 name: A name for the operation (optional).
3417 Returns:
3418 A `Tensor` of type `variant`.
3419 """
3420 _ctx = _context._context or _context.context()
3421 tld = _ctx._thread_local_data
3422 if tld.is_eager:
3423 try:
3424 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3425 _ctx, "ExperimentalLatencyStatsDataset", name, input_dataset, tag,
3426 "output_types", output_types, "output_shapes", output_shapes)
3427 return _result
3428 except _core._NotOkStatusException as e:
3429 _ops.raise_from_not_ok_status(e, name)
3430 except _core._FallbackException:
3431 pass
3432 try:
3433 return experimental_latency_stats_dataset_eager_fallback(
3434 input_dataset, tag, output_types=output_types,
3435 output_shapes=output_shapes, name=name, ctx=_ctx)
3436 except _core._SymbolicException:
3437 pass # Add nodes to the TensorFlow graph.
3438 # Add nodes to the TensorFlow graph.
3439 if not isinstance(output_types, (list, tuple)):
3440 raise TypeError(
3441 "Expected list for 'output_types' argument to "
3442 "'experimental_latency_stats_dataset' Op, not %r." % output_types)
3443 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3444 if not isinstance(output_shapes, (list, tuple)):
3445 raise TypeError(
3446 "Expected list for 'output_shapes' argument to "
3447 "'experimental_latency_stats_dataset' Op, not %r." % output_shapes)
3448 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3449 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3450 "ExperimentalLatencyStatsDataset", input_dataset=input_dataset,
3451 tag=tag, output_types=output_types,
3452 output_shapes=output_shapes,
3453 name=name)
3454 _result = _outputs[:]
3455 if _execute.must_record_gradient():
3456 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3457 _op.get_attr("output_shapes"))
3458 _inputs_flat = _op.inputs
3459 _execute.record_gradient(
3460 "ExperimentalLatencyStatsDataset", _inputs_flat, _attrs, _result)
3461 _result, = _result
3462 return _result
3464ExperimentalLatencyStatsDataset = tf_export("raw_ops.ExperimentalLatencyStatsDataset")(_ops.to_raw_op(experimental_latency_stats_dataset))
3467def experimental_latency_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name, ctx):
3468 if not isinstance(output_types, (list, tuple)):
3469 raise TypeError(
3470 "Expected list for 'output_types' argument to "
3471 "'experimental_latency_stats_dataset' Op, not %r." % output_types)
3472 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3473 if not isinstance(output_shapes, (list, tuple)):
3474 raise TypeError(
3475 "Expected list for 'output_shapes' argument to "
3476 "'experimental_latency_stats_dataset' Op, not %r." % output_shapes)
3477 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3478 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3479 tag = _ops.convert_to_tensor(tag, _dtypes.string)
3480 _inputs_flat = [input_dataset, tag]
3481 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3482 _result = _execute.execute(b"ExperimentalLatencyStatsDataset", 1,
3483 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3484 name=name)
3485 if _execute.must_record_gradient():
3486 _execute.record_gradient(
3487 "ExperimentalLatencyStatsDataset", _inputs_flat, _attrs, _result)
3488 _result, = _result
3489 return _result
3492def experimental_map_and_batch_dataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None):
3493 r"""Creates a dataset that fuses mapping with batching.
3495 Creates a dataset that applies `f` to the outputs of `input_dataset` and then
3496 batches `batch_size` of them.
3498 Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
3499 to `batch_size * num_parallel_batches` copies of `f` in parallel.
3501 Args:
3502 input_dataset: A `Tensor` of type `variant`.
3503 A variant tensor representing the input dataset.
3504 other_arguments: A list of `Tensor` objects.
3505 A list of tensors, typically values that were captured when building a closure
3506 for `f`.
3507 batch_size: A `Tensor` of type `int64`.
3508 A scalar representing the number of elements to accumulate in a
3509 batch. It determines the number of concurrent invocations of `f` that process
3510 elements from `input_dataset` in parallel.
3511 num_parallel_calls: A `Tensor` of type `int64`.
3512 A scalar representing the maximum number of parallel invocations of the `map_fn`
3513 function. Applying the `map_fn` on consecutive input elements in parallel has
3514 the potential to improve input pipeline throughput.
3515 drop_remainder: A `Tensor` of type `bool`.
3516 A scalar representing whether the last batch should be dropped in case its size
3517 is smaller than desired.
3518 f: A function decorated with @Defun.
3519 A function to apply to the outputs of `input_dataset`.
3520 output_types: A list of `tf.DTypes` that has length `>= 1`.
3521 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3522 preserve_cardinality: An optional `bool`. Defaults to `False`.
3523 name: A name for the operation (optional).
3525 Returns:
3526 A `Tensor` of type `variant`.
3527 """
3528 _ctx = _context._context or _context.context()
3529 tld = _ctx._thread_local_data
3530 if tld.is_eager:
3531 try:
3532 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3533 _ctx, "ExperimentalMapAndBatchDataset", name, input_dataset,
3534 other_arguments, batch_size, num_parallel_calls, drop_remainder, "f",
3535 f, "output_types", output_types, "output_shapes", output_shapes,
3536 "preserve_cardinality", preserve_cardinality)
3537 return _result
3538 except _core._NotOkStatusException as e:
3539 _ops.raise_from_not_ok_status(e, name)
3540 except _core._FallbackException:
3541 pass
3542 try:
3543 return experimental_map_and_batch_dataset_eager_fallback(
3544 input_dataset, other_arguments, batch_size, num_parallel_calls,
3545 drop_remainder, f=f, output_types=output_types,
3546 output_shapes=output_shapes,
3547 preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
3548 except _core._SymbolicException:
3549 pass # Add nodes to the TensorFlow graph.
3550 # Add nodes to the TensorFlow graph.
3551 if not isinstance(output_types, (list, tuple)):
3552 raise TypeError(
3553 "Expected list for 'output_types' argument to "
3554 "'experimental_map_and_batch_dataset' Op, not %r." % output_types)
3555 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3556 if not isinstance(output_shapes, (list, tuple)):
3557 raise TypeError(
3558 "Expected list for 'output_shapes' argument to "
3559 "'experimental_map_and_batch_dataset' Op, not %r." % output_shapes)
3560 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3561 if preserve_cardinality is None:
3562 preserve_cardinality = False
3563 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
3564 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3565 "ExperimentalMapAndBatchDataset", input_dataset=input_dataset,
3566 other_arguments=other_arguments,
3567 batch_size=batch_size,
3568 num_parallel_calls=num_parallel_calls,
3569 drop_remainder=drop_remainder, f=f,
3570 output_types=output_types,
3571 output_shapes=output_shapes,
3572 preserve_cardinality=preserve_cardinality,
3573 name=name)
3574 _result = _outputs[:]
3575 if _execute.must_record_gradient():
3576 _attrs = ("f", _op.get_attr("f"), "Targuments",
3577 _op.get_attr("Targuments"), "output_types",
3578 _op.get_attr("output_types"), "output_shapes",
3579 _op.get_attr("output_shapes"), "preserve_cardinality",
3580 _op._get_attr_bool("preserve_cardinality"))
3581 _inputs_flat = _op.inputs
3582 _execute.record_gradient(
3583 "ExperimentalMapAndBatchDataset", _inputs_flat, _attrs, _result)
3584 _result, = _result
3585 return _result
3587ExperimentalMapAndBatchDataset = tf_export("raw_ops.ExperimentalMapAndBatchDataset")(_ops.to_raw_op(experimental_map_and_batch_dataset))
3590def experimental_map_and_batch_dataset_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality, name, ctx):
3591 if not isinstance(output_types, (list, tuple)):
3592 raise TypeError(
3593 "Expected list for 'output_types' argument to "
3594 "'experimental_map_and_batch_dataset' Op, not %r." % output_types)
3595 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3596 if not isinstance(output_shapes, (list, tuple)):
3597 raise TypeError(
3598 "Expected list for 'output_shapes' argument to "
3599 "'experimental_map_and_batch_dataset' Op, not %r." % output_shapes)
3600 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3601 if preserve_cardinality is None:
3602 preserve_cardinality = False
3603 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
3604 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
3605 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3606 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
3607 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
3608 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
3609 _inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_calls, drop_remainder]
3610 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
3611 output_types, "output_shapes", output_shapes, "preserve_cardinality",
3612 preserve_cardinality)
3613 _result = _execute.execute(b"ExperimentalMapAndBatchDataset", 1,
3614 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3615 name=name)
3616 if _execute.must_record_gradient():
3617 _execute.record_gradient(
3618 "ExperimentalMapAndBatchDataset", _inputs_flat, _attrs, _result)
3619 _result, = _result
3620 return _result
3623def experimental_map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, preserve_cardinality=False, name=None):
3624 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
3626 Args:
3627 input_dataset: A `Tensor` of type `variant`.
3628 other_arguments: A list of `Tensor` objects.
3629 f: A function decorated with @Defun.
3630 output_types: A list of `tf.DTypes` that has length `>= 1`.
3631 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3632 use_inter_op_parallelism: An optional `bool`. Defaults to `True`.
3633 preserve_cardinality: An optional `bool`. Defaults to `False`.
3634 name: A name for the operation (optional).
3636 Returns:
3637 A `Tensor` of type `variant`.
3638 """
3639 _ctx = _context._context or _context.context()
3640 tld = _ctx._thread_local_data
3641 if tld.is_eager:
3642 try:
3643 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3644 _ctx, "ExperimentalMapDataset", name, input_dataset, other_arguments,
3645 "f", f, "output_types", output_types, "output_shapes", output_shapes,
3646 "use_inter_op_parallelism", use_inter_op_parallelism,
3647 "preserve_cardinality", preserve_cardinality)
3648 return _result
3649 except _core._NotOkStatusException as e:
3650 _ops.raise_from_not_ok_status(e, name)
3651 except _core._FallbackException:
3652 pass
3653 try:
3654 return experimental_map_dataset_eager_fallback(
3655 input_dataset, other_arguments, f=f, output_types=output_types,
3656 output_shapes=output_shapes,
3657 use_inter_op_parallelism=use_inter_op_parallelism,
3658 preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
3659 except _core._SymbolicException:
3660 pass # Add nodes to the TensorFlow graph.
3661 # Add nodes to the TensorFlow graph.
3662 if not isinstance(output_types, (list, tuple)):
3663 raise TypeError(
3664 "Expected list for 'output_types' argument to "
3665 "'experimental_map_dataset' Op, not %r." % output_types)
3666 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3667 if not isinstance(output_shapes, (list, tuple)):
3668 raise TypeError(
3669 "Expected list for 'output_shapes' argument to "
3670 "'experimental_map_dataset' Op, not %r." % output_shapes)
3671 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3672 if use_inter_op_parallelism is None:
3673 use_inter_op_parallelism = True
3674 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
3675 if preserve_cardinality is None:
3676 preserve_cardinality = False
3677 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
3678 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3679 "ExperimentalMapDataset", input_dataset=input_dataset,
3680 other_arguments=other_arguments, f=f,
3681 output_types=output_types,
3682 output_shapes=output_shapes,
3683 use_inter_op_parallelism=use_inter_op_parallelism,
3684 preserve_cardinality=preserve_cardinality,
3685 name=name)
3686 _result = _outputs[:]
3687 if _execute.must_record_gradient():
3688 _attrs = ("f", _op.get_attr("f"), "Targuments",
3689 _op.get_attr("Targuments"), "output_types",
3690 _op.get_attr("output_types"), "output_shapes",
3691 _op.get_attr("output_shapes"), "use_inter_op_parallelism",
3692 _op._get_attr_bool("use_inter_op_parallelism"),
3693 "preserve_cardinality",
3694 _op._get_attr_bool("preserve_cardinality"))
3695 _inputs_flat = _op.inputs
3696 _execute.record_gradient(
3697 "ExperimentalMapDataset", _inputs_flat, _attrs, _result)
3698 _result, = _result
3699 return _result
3701ExperimentalMapDataset = tf_export("raw_ops.ExperimentalMapDataset")(_ops.to_raw_op(experimental_map_dataset))
3704def experimental_map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism, preserve_cardinality, name, ctx):
3705 if not isinstance(output_types, (list, tuple)):
3706 raise TypeError(
3707 "Expected list for 'output_types' argument to "
3708 "'experimental_map_dataset' Op, not %r." % output_types)
3709 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3710 if not isinstance(output_shapes, (list, tuple)):
3711 raise TypeError(
3712 "Expected list for 'output_shapes' argument to "
3713 "'experimental_map_dataset' Op, not %r." % output_shapes)
3714 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3715 if use_inter_op_parallelism is None:
3716 use_inter_op_parallelism = True
3717 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
3718 if preserve_cardinality is None:
3719 preserve_cardinality = False
3720 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
3721 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
3722 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3723 _inputs_flat = [input_dataset] + list(other_arguments)
3724 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
3725 output_types, "output_shapes", output_shapes, "use_inter_op_parallelism",
3726 use_inter_op_parallelism, "preserve_cardinality", preserve_cardinality)
3727 _result = _execute.execute(b"ExperimentalMapDataset", 1,
3728 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3729 name=name)
3730 if _execute.must_record_gradient():
3731 _execute.record_gradient(
3732 "ExperimentalMapDataset", _inputs_flat, _attrs, _result)
3733 _result, = _result
3734 return _result
3737def experimental_matching_files_dataset(patterns, name=None):
3738 r"""TODO: add doc.
3740 Args:
3741 patterns: A `Tensor` of type `string`.
3742 name: A name for the operation (optional).
3744 Returns:
3745 A `Tensor` of type `variant`.
3746 """
3747 _ctx = _context._context or _context.context()
3748 tld = _ctx._thread_local_data
3749 if tld.is_eager:
3750 try:
3751 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3752 _ctx, "ExperimentalMatchingFilesDataset", name, patterns)
3753 return _result
3754 except _core._NotOkStatusException as e:
3755 _ops.raise_from_not_ok_status(e, name)
3756 except _core._FallbackException:
3757 pass
3758 try:
3759 return experimental_matching_files_dataset_eager_fallback(
3760 patterns, name=name, ctx=_ctx)
3761 except _core._SymbolicException:
3762 pass # Add nodes to the TensorFlow graph.
3763 # Add nodes to the TensorFlow graph.
3764 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3765 "ExperimentalMatchingFilesDataset", patterns=patterns, name=name)
3766 _result = _outputs[:]
3767 if _execute.must_record_gradient():
3768 _attrs = ()
3769 _inputs_flat = _op.inputs
3770 _execute.record_gradient(
3771 "ExperimentalMatchingFilesDataset", _inputs_flat, _attrs, _result)
3772 _result, = _result
3773 return _result
3775ExperimentalMatchingFilesDataset = tf_export("raw_ops.ExperimentalMatchingFilesDataset")(_ops.to_raw_op(experimental_matching_files_dataset))
3778def experimental_matching_files_dataset_eager_fallback(patterns, name, ctx):
3779 patterns = _ops.convert_to_tensor(patterns, _dtypes.string)
3780 _inputs_flat = [patterns]
3781 _attrs = None
3782 _result = _execute.execute(b"ExperimentalMatchingFilesDataset", 1,
3783 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3784 name=name)
3785 if _execute.must_record_gradient():
3786 _execute.record_gradient(
3787 "ExperimentalMatchingFilesDataset", _inputs_flat, _attrs, _result)
3788 _result, = _result
3789 return _result
3792def experimental_max_intra_op_parallelism_dataset(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name=None):
3793 r"""Creates a dataset that overrides the maximum intra-op parallelism.
3795 Args:
3796 input_dataset: A `Tensor` of type `variant`.
3797 max_intra_op_parallelism: A `Tensor` of type `int64`.
3798 Identifies the maximum intra-op parallelism to use.
3799 output_types: A list of `tf.DTypes` that has length `>= 1`.
3800 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3801 name: A name for the operation (optional).
3803 Returns:
3804 A `Tensor` of type `variant`.
3805 """
3806 _ctx = _context._context or _context.context()
3807 tld = _ctx._thread_local_data
3808 if tld.is_eager:
3809 try:
3810 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3811 _ctx, "ExperimentalMaxIntraOpParallelismDataset", name, input_dataset,
3812 max_intra_op_parallelism, "output_types", output_types,
3813 "output_shapes", output_shapes)
3814 return _result
3815 except _core._NotOkStatusException as e:
3816 _ops.raise_from_not_ok_status(e, name)
3817 except _core._FallbackException:
3818 pass
3819 try:
3820 return experimental_max_intra_op_parallelism_dataset_eager_fallback(
3821 input_dataset, max_intra_op_parallelism, output_types=output_types,
3822 output_shapes=output_shapes, name=name, ctx=_ctx)
3823 except _core._SymbolicException:
3824 pass # Add nodes to the TensorFlow graph.
3825 # Add nodes to the TensorFlow graph.
3826 if not isinstance(output_types, (list, tuple)):
3827 raise TypeError(
3828 "Expected list for 'output_types' argument to "
3829 "'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_types)
3830 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3831 if not isinstance(output_shapes, (list, tuple)):
3832 raise TypeError(
3833 "Expected list for 'output_shapes' argument to "
3834 "'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
3835 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3836 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3837 "ExperimentalMaxIntraOpParallelismDataset", input_dataset=input_dataset,
3838 max_intra_op_parallelism=max_intra_op_parallelism,
3839 output_types=output_types,
3840 output_shapes=output_shapes,
3841 name=name)
3842 _result = _outputs[:]
3843 if _execute.must_record_gradient():
3844 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3845 _op.get_attr("output_shapes"))
3846 _inputs_flat = _op.inputs
3847 _execute.record_gradient(
3848 "ExperimentalMaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result)
3849 _result, = _result
3850 return _result
3852ExperimentalMaxIntraOpParallelismDataset = tf_export("raw_ops.ExperimentalMaxIntraOpParallelismDataset")(_ops.to_raw_op(experimental_max_intra_op_parallelism_dataset))
3855def experimental_max_intra_op_parallelism_dataset_eager_fallback(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name, ctx):
3856 if not isinstance(output_types, (list, tuple)):
3857 raise TypeError(
3858 "Expected list for 'output_types' argument to "
3859 "'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_types)
3860 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3861 if not isinstance(output_shapes, (list, tuple)):
3862 raise TypeError(
3863 "Expected list for 'output_shapes' argument to "
3864 "'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
3865 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3866 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3867 max_intra_op_parallelism = _ops.convert_to_tensor(max_intra_op_parallelism, _dtypes.int64)
3868 _inputs_flat = [input_dataset, max_intra_op_parallelism]
3869 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3870 _result = _execute.execute(b"ExperimentalMaxIntraOpParallelismDataset", 1,
3871 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3872 name=name)
3873 if _execute.must_record_gradient():
3874 _execute.record_gradient(
3875 "ExperimentalMaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result)
3876 _result, = _result
3877 return _result
3880def experimental_non_serializable_dataset(input_dataset, output_types, output_shapes, name=None):
3881 r"""TODO: add doc.
3883 Args:
3884 input_dataset: A `Tensor` of type `variant`.
3885 output_types: A list of `tf.DTypes` that has length `>= 1`.
3886 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3887 name: A name for the operation (optional).
3889 Returns:
3890 A `Tensor` of type `variant`.
3891 """
3892 _ctx = _context._context or _context.context()
3893 tld = _ctx._thread_local_data
3894 if tld.is_eager:
3895 try:
3896 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3897 _ctx, "ExperimentalNonSerializableDataset", name, input_dataset,
3898 "output_types", output_types, "output_shapes", output_shapes)
3899 return _result
3900 except _core._NotOkStatusException as e:
3901 _ops.raise_from_not_ok_status(e, name)
3902 except _core._FallbackException:
3903 pass
3904 try:
3905 return experimental_non_serializable_dataset_eager_fallback(
3906 input_dataset, output_types=output_types,
3907 output_shapes=output_shapes, name=name, ctx=_ctx)
3908 except _core._SymbolicException:
3909 pass # Add nodes to the TensorFlow graph.
3910 # Add nodes to the TensorFlow graph.
3911 if not isinstance(output_types, (list, tuple)):
3912 raise TypeError(
3913 "Expected list for 'output_types' argument to "
3914 "'experimental_non_serializable_dataset' Op, not %r." % output_types)
3915 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3916 if not isinstance(output_shapes, (list, tuple)):
3917 raise TypeError(
3918 "Expected list for 'output_shapes' argument to "
3919 "'experimental_non_serializable_dataset' Op, not %r." % output_shapes)
3920 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3921 _, _, _op, _outputs = _op_def_library._apply_op_helper(
3922 "ExperimentalNonSerializableDataset", input_dataset=input_dataset,
3923 output_types=output_types,
3924 output_shapes=output_shapes,
3925 name=name)
3926 _result = _outputs[:]
3927 if _execute.must_record_gradient():
3928 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3929 _op.get_attr("output_shapes"))
3930 _inputs_flat = _op.inputs
3931 _execute.record_gradient(
3932 "ExperimentalNonSerializableDataset", _inputs_flat, _attrs, _result)
3933 _result, = _result
3934 return _result
3936ExperimentalNonSerializableDataset = tf_export("raw_ops.ExperimentalNonSerializableDataset")(_ops.to_raw_op(experimental_non_serializable_dataset))
3939def experimental_non_serializable_dataset_eager_fallback(input_dataset, output_types, output_shapes, name, ctx):
3940 if not isinstance(output_types, (list, tuple)):
3941 raise TypeError(
3942 "Expected list for 'output_types' argument to "
3943 "'experimental_non_serializable_dataset' Op, not %r." % output_types)
3944 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3945 if not isinstance(output_shapes, (list, tuple)):
3946 raise TypeError(
3947 "Expected list for 'output_shapes' argument to "
3948 "'experimental_non_serializable_dataset' Op, not %r." % output_shapes)
3949 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3950 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3951 _inputs_flat = [input_dataset]
3952 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3953 _result = _execute.execute(b"ExperimentalNonSerializableDataset", 1,
3954 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
3955 name=name)
3956 if _execute.must_record_gradient():
3957 _execute.record_gradient(
3958 "ExperimentalNonSerializableDataset", _inputs_flat, _attrs, _result)
3959 _result, = _result
3960 return _result
3963def experimental_parallel_interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None):
3964 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
3966 The resulting dataset is similar to the `InterleaveDataset`, with the exception
3967 that if retrieving the next value from a dataset would cause the requester to
3968 block, it will skip that input dataset. This dataset is especially useful
3969 when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
3970 allows the training step to proceed so long as some data is available.
3972 !! WARNING !! This dataset is not deterministic!
3974 Args:
3975 input_dataset: A `Tensor` of type `variant`.
3976 other_arguments: A list of `Tensor` objects.
3977 cycle_length: A `Tensor` of type `int64`.
3978 block_length: A `Tensor` of type `int64`.
3979 sloppy: A `Tensor` of type `bool`.
3980 buffer_output_elements: A `Tensor` of type `int64`.
3981 prefetch_input_elements: A `Tensor` of type `int64`.
3982 f: A function decorated with @Defun.
3983 A function mapping elements of `input_dataset`, concatenated with
3984 `other_arguments`, to a Dataset variant that contains elements matching
3985 `output_types` and `output_shapes`.
3986 output_types: A list of `tf.DTypes` that has length `>= 1`.
3987 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3988 name: A name for the operation (optional).
3990 Returns:
3991 A `Tensor` of type `variant`.
3992 """
3993 _ctx = _context._context or _context.context()
3994 tld = _ctx._thread_local_data
3995 if tld.is_eager:
3996 try:
3997 _result = pywrap_tfe.TFE_Py_FastPathExecute(
3998 _ctx, "ExperimentalParallelInterleaveDataset", name, input_dataset,
3999 other_arguments, cycle_length, block_length, sloppy,
4000 buffer_output_elements, prefetch_input_elements, "f", f,
4001 "output_types", output_types, "output_shapes", output_shapes)
4002 return _result
4003 except _core._NotOkStatusException as e:
4004 _ops.raise_from_not_ok_status(e, name)
4005 except _core._FallbackException:
4006 pass
4007 try:
4008 return experimental_parallel_interleave_dataset_eager_fallback(
4009 input_dataset, other_arguments, cycle_length, block_length, sloppy,
4010 buffer_output_elements, prefetch_input_elements, f=f,
4011 output_types=output_types, output_shapes=output_shapes, name=name,
4012 ctx=_ctx)
4013 except _core._SymbolicException:
4014 pass # Add nodes to the TensorFlow graph.
4015 # Add nodes to the TensorFlow graph.
4016 if not isinstance(output_types, (list, tuple)):
4017 raise TypeError(
4018 "Expected list for 'output_types' argument to "
4019 "'experimental_parallel_interleave_dataset' Op, not %r." % output_types)
4020 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4021 if not isinstance(output_shapes, (list, tuple)):
4022 raise TypeError(
4023 "Expected list for 'output_shapes' argument to "
4024 "'experimental_parallel_interleave_dataset' Op, not %r." % output_shapes)
4025 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4026 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4027 "ExperimentalParallelInterleaveDataset", input_dataset=input_dataset,
4028 other_arguments=other_arguments,
4029 cycle_length=cycle_length,
4030 block_length=block_length,
4031 sloppy=sloppy,
4032 buffer_output_elements=buffer_output_elements,
4033 prefetch_input_elements=prefetch_input_elements,
4034 f=f,
4035 output_types=output_types,
4036 output_shapes=output_shapes,
4037 name=name)
4038 _result = _outputs[:]
4039 if _execute.must_record_gradient():
4040 _attrs = ("f", _op.get_attr("f"), "Targuments",
4041 _op.get_attr("Targuments"), "output_types",
4042 _op.get_attr("output_types"), "output_shapes",
4043 _op.get_attr("output_shapes"))
4044 _inputs_flat = _op.inputs
4045 _execute.record_gradient(
4046 "ExperimentalParallelInterleaveDataset", _inputs_flat, _attrs, _result)
4047 _result, = _result
4048 return _result
4050ExperimentalParallelInterleaveDataset = tf_export("raw_ops.ExperimentalParallelInterleaveDataset")(_ops.to_raw_op(experimental_parallel_interleave_dataset))
4053def experimental_parallel_interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name, ctx):
4054 if not isinstance(output_types, (list, tuple)):
4055 raise TypeError(
4056 "Expected list for 'output_types' argument to "
4057 "'experimental_parallel_interleave_dataset' Op, not %r." % output_types)
4058 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4059 if not isinstance(output_shapes, (list, tuple)):
4060 raise TypeError(
4061 "Expected list for 'output_shapes' argument to "
4062 "'experimental_parallel_interleave_dataset' Op, not %r." % output_shapes)
4063 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4064 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
4065 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4066 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
4067 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
4068 sloppy = _ops.convert_to_tensor(sloppy, _dtypes.bool)
4069 buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64)
4070 prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64)
4071 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements]
4072 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
4073 output_types, "output_shapes", output_shapes)
4074 _result = _execute.execute(b"ExperimentalParallelInterleaveDataset", 1,
4075 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4076 name=name)
4077 if _execute.must_record_gradient():
4078 _execute.record_gradient(
4079 "ExperimentalParallelInterleaveDataset", _inputs_flat, _attrs, _result)
4080 _result, = _result
4081 return _result
4084def experimental_parse_example_dataset(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy=False, name=None):
4085 r"""Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
4087 Args:
4088 input_dataset: A `Tensor` of type `variant`.
4089 num_parallel_calls: A `Tensor` of type `int64`.
4090 dense_defaults: A list of `Tensor` objects with types from: `float32`, `int64`, `string`.
4091 A dict mapping string keys to `Tensor`s.
4092 The keys of the dict must match the dense_keys of the feature.
4093 sparse_keys: A list of `strings`.
4094 A list of string keys in the examples features.
4095 The results for these keys will be returned as `SparseTensor` objects.
4096 dense_keys: A list of `strings`.
4097 A list of Ndense string Tensors (scalars).
4098 The keys expected in the Examples features associated with dense values.
4099 sparse_types: A list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`.
4100 A list of `DTypes` of the same length as `sparse_keys`.
4101 Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
4102 and `tf.string` (`BytesList`) are supported.
4103 dense_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`).
4104 List of tuples with the same length as `dense_keys`.
4105 The shape of the data for each dense feature referenced by `dense_keys`.
4106 Required for any input tensors identified by `dense_keys`. Must be
4107 either fully defined, or may contain an unknown first dimension.
4108 An unknown first dimension means the feature is treated as having
4109 a variable number of blocks, and the output shape along this dimension
4110 is considered unknown at graph build time. Padding is applied for
4111 minibatch elements smaller than the maximum number of blocks for the
4112 given feature along this dimension.
4113 output_types: A list of `tf.DTypes` that has length `>= 1`.
4114 The type list for the return values.
4115 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4116 The list of shapes being produced.
4117 sloppy: An optional `bool`. Defaults to `False`.
4118 name: A name for the operation (optional).
4120 Returns:
4121 A `Tensor` of type `variant`.
4122 """
4123 _ctx = _context._context or _context.context()
4124 tld = _ctx._thread_local_data
4125 if tld.is_eager:
4126 try:
4127 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4128 _ctx, "ExperimentalParseExampleDataset", name, input_dataset,
4129 num_parallel_calls, dense_defaults, "sparse_keys", sparse_keys,
4130 "dense_keys", dense_keys, "sparse_types", sparse_types,
4131 "dense_shapes", dense_shapes, "output_types", output_types,
4132 "output_shapes", output_shapes, "sloppy", sloppy)
4133 return _result
4134 except _core._NotOkStatusException as e:
4135 _ops.raise_from_not_ok_status(e, name)
4136 except _core._FallbackException:
4137 pass
4138 try:
4139 return experimental_parse_example_dataset_eager_fallback(
4140 input_dataset, num_parallel_calls, dense_defaults,
4141 sparse_keys=sparse_keys, dense_keys=dense_keys,
4142 sparse_types=sparse_types, dense_shapes=dense_shapes,
4143 output_types=output_types, output_shapes=output_shapes,
4144 sloppy=sloppy, name=name, ctx=_ctx)
4145 except _core._SymbolicException:
4146 pass # Add nodes to the TensorFlow graph.
4147 # Add nodes to the TensorFlow graph.
4148 if not isinstance(sparse_keys, (list, tuple)):
4149 raise TypeError(
4150 "Expected list for 'sparse_keys' argument to "
4151 "'experimental_parse_example_dataset' Op, not %r." % sparse_keys)
4152 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
4153 if not isinstance(dense_keys, (list, tuple)):
4154 raise TypeError(
4155 "Expected list for 'dense_keys' argument to "
4156 "'experimental_parse_example_dataset' Op, not %r." % dense_keys)
4157 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
4158 if not isinstance(sparse_types, (list, tuple)):
4159 raise TypeError(
4160 "Expected list for 'sparse_types' argument to "
4161 "'experimental_parse_example_dataset' Op, not %r." % sparse_types)
4162 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
4163 if not isinstance(dense_shapes, (list, tuple)):
4164 raise TypeError(
4165 "Expected list for 'dense_shapes' argument to "
4166 "'experimental_parse_example_dataset' Op, not %r." % dense_shapes)
4167 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
4168 if not isinstance(output_types, (list, tuple)):
4169 raise TypeError(
4170 "Expected list for 'output_types' argument to "
4171 "'experimental_parse_example_dataset' Op, not %r." % output_types)
4172 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4173 if not isinstance(output_shapes, (list, tuple)):
4174 raise TypeError(
4175 "Expected list for 'output_shapes' argument to "
4176 "'experimental_parse_example_dataset' Op, not %r." % output_shapes)
4177 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4178 if sloppy is None:
4179 sloppy = False
4180 sloppy = _execute.make_bool(sloppy, "sloppy")
4181 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4182 "ExperimentalParseExampleDataset", input_dataset=input_dataset,
4183 num_parallel_calls=num_parallel_calls,
4184 dense_defaults=dense_defaults,
4185 sparse_keys=sparse_keys,
4186 dense_keys=dense_keys,
4187 sparse_types=sparse_types,
4188 dense_shapes=dense_shapes,
4189 output_types=output_types,
4190 output_shapes=output_shapes,
4191 sloppy=sloppy, name=name)
4192 _result = _outputs[:]
4193 if _execute.must_record_gradient():
4194 _attrs = ("sparse_keys", _op.get_attr("sparse_keys"), "dense_keys",
4195 _op.get_attr("dense_keys"), "sparse_types",
4196 _op.get_attr("sparse_types"), "Tdense", _op.get_attr("Tdense"),
4197 "dense_shapes", _op.get_attr("dense_shapes"), "output_types",
4198 _op.get_attr("output_types"), "output_shapes",
4199 _op.get_attr("output_shapes"), "sloppy",
4200 _op._get_attr_bool("sloppy"))
4201 _inputs_flat = _op.inputs
4202 _execute.record_gradient(
4203 "ExperimentalParseExampleDataset", _inputs_flat, _attrs, _result)
4204 _result, = _result
4205 return _result
4207ExperimentalParseExampleDataset = tf_export("raw_ops.ExperimentalParseExampleDataset")(_ops.to_raw_op(experimental_parse_example_dataset))
4210def experimental_parse_example_dataset_eager_fallback(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy, name, ctx):
4211 if not isinstance(sparse_keys, (list, tuple)):
4212 raise TypeError(
4213 "Expected list for 'sparse_keys' argument to "
4214 "'experimental_parse_example_dataset' Op, not %r." % sparse_keys)
4215 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
4216 if not isinstance(dense_keys, (list, tuple)):
4217 raise TypeError(
4218 "Expected list for 'dense_keys' argument to "
4219 "'experimental_parse_example_dataset' Op, not %r." % dense_keys)
4220 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
4221 if not isinstance(sparse_types, (list, tuple)):
4222 raise TypeError(
4223 "Expected list for 'sparse_types' argument to "
4224 "'experimental_parse_example_dataset' Op, not %r." % sparse_types)
4225 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
4226 if not isinstance(dense_shapes, (list, tuple)):
4227 raise TypeError(
4228 "Expected list for 'dense_shapes' argument to "
4229 "'experimental_parse_example_dataset' Op, not %r." % dense_shapes)
4230 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
4231 if not isinstance(output_types, (list, tuple)):
4232 raise TypeError(
4233 "Expected list for 'output_types' argument to "
4234 "'experimental_parse_example_dataset' Op, not %r." % output_types)
4235 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4236 if not isinstance(output_shapes, (list, tuple)):
4237 raise TypeError(
4238 "Expected list for 'output_shapes' argument to "
4239 "'experimental_parse_example_dataset' Op, not %r." % output_shapes)
4240 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4241 if sloppy is None:
4242 sloppy = False
4243 sloppy = _execute.make_bool(sloppy, "sloppy")
4244 _attr_Tdense, dense_defaults = _execute.convert_to_mixed_eager_tensors(dense_defaults, ctx)
4245 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4246 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
4247 _inputs_flat = [input_dataset, num_parallel_calls] + list(dense_defaults)
4248 _attrs = ("sparse_keys", sparse_keys, "dense_keys", dense_keys,
4249 "sparse_types", sparse_types, "Tdense", _attr_Tdense, "dense_shapes",
4250 dense_shapes, "output_types", output_types, "output_shapes", output_shapes,
4251 "sloppy", sloppy)
4252 _result = _execute.execute(b"ExperimentalParseExampleDataset", 1,
4253 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4254 name=name)
4255 if _execute.must_record_gradient():
4256 _execute.record_gradient(
4257 "ExperimentalParseExampleDataset", _inputs_flat, _attrs, _result)
4258 _result, = _result
4259 return _result
4262def experimental_private_thread_pool_dataset(input_dataset, num_threads, output_types, output_shapes, name=None):
4263 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
4265 Args:
4266 input_dataset: A `Tensor` of type `variant`.
4267 num_threads: A `Tensor` of type `int64`.
4268 Identifies the number of threads to use for the private threadpool.
4269 output_types: A list of `tf.DTypes` that has length `>= 1`.
4270 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4271 name: A name for the operation (optional).
4273 Returns:
4274 A `Tensor` of type `variant`.
4275 """
4276 _ctx = _context._context or _context.context()
4277 tld = _ctx._thread_local_data
4278 if tld.is_eager:
4279 try:
4280 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4281 _ctx, "ExperimentalPrivateThreadPoolDataset", name, input_dataset,
4282 num_threads, "output_types", output_types, "output_shapes",
4283 output_shapes)
4284 return _result
4285 except _core._NotOkStatusException as e:
4286 _ops.raise_from_not_ok_status(e, name)
4287 except _core._FallbackException:
4288 pass
4289 try:
4290 return experimental_private_thread_pool_dataset_eager_fallback(
4291 input_dataset, num_threads, output_types=output_types,
4292 output_shapes=output_shapes, name=name, ctx=_ctx)
4293 except _core._SymbolicException:
4294 pass # Add nodes to the TensorFlow graph.
4295 # Add nodes to the TensorFlow graph.
4296 if not isinstance(output_types, (list, tuple)):
4297 raise TypeError(
4298 "Expected list for 'output_types' argument to "
4299 "'experimental_private_thread_pool_dataset' Op, not %r." % output_types)
4300 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4301 if not isinstance(output_shapes, (list, tuple)):
4302 raise TypeError(
4303 "Expected list for 'output_shapes' argument to "
4304 "'experimental_private_thread_pool_dataset' Op, not %r." % output_shapes)
4305 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4306 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4307 "ExperimentalPrivateThreadPoolDataset", input_dataset=input_dataset,
4308 num_threads=num_threads,
4309 output_types=output_types,
4310 output_shapes=output_shapes,
4311 name=name)
4312 _result = _outputs[:]
4313 if _execute.must_record_gradient():
4314 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4315 _op.get_attr("output_shapes"))
4316 _inputs_flat = _op.inputs
4317 _execute.record_gradient(
4318 "ExperimentalPrivateThreadPoolDataset", _inputs_flat, _attrs, _result)
4319 _result, = _result
4320 return _result
4322ExperimentalPrivateThreadPoolDataset = tf_export("raw_ops.ExperimentalPrivateThreadPoolDataset")(_ops.to_raw_op(experimental_private_thread_pool_dataset))
4325def experimental_private_thread_pool_dataset_eager_fallback(input_dataset, num_threads, output_types, output_shapes, name, ctx):
4326 if not isinstance(output_types, (list, tuple)):
4327 raise TypeError(
4328 "Expected list for 'output_types' argument to "
4329 "'experimental_private_thread_pool_dataset' Op, not %r." % output_types)
4330 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4331 if not isinstance(output_shapes, (list, tuple)):
4332 raise TypeError(
4333 "Expected list for 'output_shapes' argument to "
4334 "'experimental_private_thread_pool_dataset' Op, not %r." % output_shapes)
4335 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4336 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4337 num_threads = _ops.convert_to_tensor(num_threads, _dtypes.int64)
4338 _inputs_flat = [input_dataset, num_threads]
4339 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4340 _result = _execute.execute(b"ExperimentalPrivateThreadPoolDataset", 1,
4341 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4342 name=name)
4343 if _execute.must_record_gradient():
4344 _execute.record_gradient(
4345 "ExperimentalPrivateThreadPoolDataset", _inputs_flat, _attrs, _result)
4346 _result, = _result
4347 return _result
4350def experimental_random_dataset(seed, seed2, output_types, output_shapes, name=None):
4351 r"""Creates a Dataset that returns pseudorandom numbers.
4353 Args:
4354 seed: A `Tensor` of type `int64`.
4355 A scalar seed for the random number generator. If either seed or
4356 seed2 is set to be non-zero, the random number generator is seeded
4357 by the given seed. Otherwise, a random seed is used.
4358 seed2: A `Tensor` of type `int64`.
4359 A second scalar seed to avoid seed collision.
4360 output_types: A list of `tf.DTypes` that has length `>= 1`.
4361 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4362 name: A name for the operation (optional).
4364 Returns:
4365 A `Tensor` of type `variant`.
4366 """
4367 _ctx = _context._context or _context.context()
4368 tld = _ctx._thread_local_data
4369 if tld.is_eager:
4370 try:
4371 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4372 _ctx, "ExperimentalRandomDataset", name, seed, seed2, "output_types",
4373 output_types, "output_shapes", output_shapes)
4374 return _result
4375 except _core._NotOkStatusException as e:
4376 _ops.raise_from_not_ok_status(e, name)
4377 except _core._FallbackException:
4378 pass
4379 try:
4380 return experimental_random_dataset_eager_fallback(
4381 seed, seed2, output_types=output_types, output_shapes=output_shapes,
4382 name=name, ctx=_ctx)
4383 except _core._SymbolicException:
4384 pass # Add nodes to the TensorFlow graph.
4385 # Add nodes to the TensorFlow graph.
4386 if not isinstance(output_types, (list, tuple)):
4387 raise TypeError(
4388 "Expected list for 'output_types' argument to "
4389 "'experimental_random_dataset' Op, not %r." % output_types)
4390 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4391 if not isinstance(output_shapes, (list, tuple)):
4392 raise TypeError(
4393 "Expected list for 'output_shapes' argument to "
4394 "'experimental_random_dataset' Op, not %r." % output_shapes)
4395 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4396 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4397 "ExperimentalRandomDataset", seed=seed, seed2=seed2,
4398 output_types=output_types,
4399 output_shapes=output_shapes, name=name)
4400 _result = _outputs[:]
4401 if _execute.must_record_gradient():
4402 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4403 _op.get_attr("output_shapes"))
4404 _inputs_flat = _op.inputs
4405 _execute.record_gradient(
4406 "ExperimentalRandomDataset", _inputs_flat, _attrs, _result)
4407 _result, = _result
4408 return _result
4410ExperimentalRandomDataset = tf_export("raw_ops.ExperimentalRandomDataset")(_ops.to_raw_op(experimental_random_dataset))
4413def experimental_random_dataset_eager_fallback(seed, seed2, output_types, output_shapes, name, ctx):
4414 if not isinstance(output_types, (list, tuple)):
4415 raise TypeError(
4416 "Expected list for 'output_types' argument to "
4417 "'experimental_random_dataset' Op, not %r." % output_types)
4418 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4419 if not isinstance(output_shapes, (list, tuple)):
4420 raise TypeError(
4421 "Expected list for 'output_shapes' argument to "
4422 "'experimental_random_dataset' Op, not %r." % output_shapes)
4423 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4424 seed = _ops.convert_to_tensor(seed, _dtypes.int64)
4425 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
4426 _inputs_flat = [seed, seed2]
4427 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4428 _result = _execute.execute(b"ExperimentalRandomDataset", 1,
4429 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4430 name=name)
4431 if _execute.must_record_gradient():
4432 _execute.record_gradient(
4433 "ExperimentalRandomDataset", _inputs_flat, _attrs, _result)
4434 _result, = _result
4435 return _result
4438def experimental_rebatch_dataset(input_dataset, num_replicas, output_types, output_shapes, use_fallback=True, name=None):
4439 r"""Creates a dataset that changes the batch size.
4441 Creates a dataset that changes the batch size of the dataset to current batch
4442 size // num_replicas.
4444 Args:
4445 input_dataset: A `Tensor` of type `variant`.
4446 A variant tensor representing the input dataset.
4447 num_replicas: A `Tensor` of type `int64`.
4448 A scalar representing the number of replicas to distribute this batch across. As
4449 a result of this transformation the current batch size would end up being
4450 divided by this parameter.
4451 output_types: A list of `tf.DTypes` that has length `>= 1`.
4452 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4453 use_fallback: An optional `bool`. Defaults to `True`.
4454 name: A name for the operation (optional).
4456 Returns:
4457 A `Tensor` of type `variant`.
4458 """
4459 _ctx = _context._context or _context.context()
4460 tld = _ctx._thread_local_data
4461 if tld.is_eager:
4462 try:
4463 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4464 _ctx, "ExperimentalRebatchDataset", name, input_dataset, num_replicas,
4465 "output_types", output_types, "output_shapes", output_shapes,
4466 "use_fallback", use_fallback)
4467 return _result
4468 except _core._NotOkStatusException as e:
4469 _ops.raise_from_not_ok_status(e, name)
4470 except _core._FallbackException:
4471 pass
4472 try:
4473 return experimental_rebatch_dataset_eager_fallback(
4474 input_dataset, num_replicas, output_types=output_types,
4475 output_shapes=output_shapes, use_fallback=use_fallback, name=name,
4476 ctx=_ctx)
4477 except _core._SymbolicException:
4478 pass # Add nodes to the TensorFlow graph.
4479 # Add nodes to the TensorFlow graph.
4480 if not isinstance(output_types, (list, tuple)):
4481 raise TypeError(
4482 "Expected list for 'output_types' argument to "
4483 "'experimental_rebatch_dataset' Op, not %r." % output_types)
4484 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4485 if not isinstance(output_shapes, (list, tuple)):
4486 raise TypeError(
4487 "Expected list for 'output_shapes' argument to "
4488 "'experimental_rebatch_dataset' Op, not %r." % output_shapes)
4489 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4490 if use_fallback is None:
4491 use_fallback = True
4492 use_fallback = _execute.make_bool(use_fallback, "use_fallback")
4493 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4494 "ExperimentalRebatchDataset", input_dataset=input_dataset,
4495 num_replicas=num_replicas,
4496 output_types=output_types,
4497 output_shapes=output_shapes,
4498 use_fallback=use_fallback, name=name)
4499 _result = _outputs[:]
4500 if _execute.must_record_gradient():
4501 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4502 _op.get_attr("output_shapes"), "use_fallback",
4503 _op._get_attr_bool("use_fallback"))
4504 _inputs_flat = _op.inputs
4505 _execute.record_gradient(
4506 "ExperimentalRebatchDataset", _inputs_flat, _attrs, _result)
4507 _result, = _result
4508 return _result
4510ExperimentalRebatchDataset = tf_export("raw_ops.ExperimentalRebatchDataset")(_ops.to_raw_op(experimental_rebatch_dataset))
4513def experimental_rebatch_dataset_eager_fallback(input_dataset, num_replicas, output_types, output_shapes, use_fallback, name, ctx):
4514 if not isinstance(output_types, (list, tuple)):
4515 raise TypeError(
4516 "Expected list for 'output_types' argument to "
4517 "'experimental_rebatch_dataset' Op, not %r." % output_types)
4518 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4519 if not isinstance(output_shapes, (list, tuple)):
4520 raise TypeError(
4521 "Expected list for 'output_shapes' argument to "
4522 "'experimental_rebatch_dataset' Op, not %r." % output_shapes)
4523 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4524 if use_fallback is None:
4525 use_fallback = True
4526 use_fallback = _execute.make_bool(use_fallback, "use_fallback")
4527 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4528 num_replicas = _ops.convert_to_tensor(num_replicas, _dtypes.int64)
4529 _inputs_flat = [input_dataset, num_replicas]
4530 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
4531 "use_fallback", use_fallback)
4532 _result = _execute.execute(b"ExperimentalRebatchDataset", 1,
4533 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4534 name=name)
4535 if _execute.must_record_gradient():
4536 _execute.record_gradient(
4537 "ExperimentalRebatchDataset", _inputs_flat, _attrs, _result)
4538 _result, = _result
4539 return _result
4542def experimental_scan_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality=False, name=None):
4543 r"""Creates a dataset successively reduces `f` over the elements of `input_dataset`.
4545 Args:
4546 input_dataset: A `Tensor` of type `variant`.
4547 initial_state: A list of `Tensor` objects.
4548 other_arguments: A list of `Tensor` objects.
4549 f: A function decorated with @Defun.
4550 output_types: A list of `tf.DTypes` that has length `>= 1`.
4551 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4552 preserve_cardinality: An optional `bool`. Defaults to `False`.
4553 name: A name for the operation (optional).
4555 Returns:
4556 A `Tensor` of type `variant`.
4557 """
4558 _ctx = _context._context or _context.context()
4559 tld = _ctx._thread_local_data
4560 if tld.is_eager:
4561 try:
4562 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4563 _ctx, "ExperimentalScanDataset", name, input_dataset, initial_state,
4564 other_arguments, "f", f, "output_types", output_types,
4565 "output_shapes", output_shapes, "preserve_cardinality",
4566 preserve_cardinality)
4567 return _result
4568 except _core._NotOkStatusException as e:
4569 _ops.raise_from_not_ok_status(e, name)
4570 except _core._FallbackException:
4571 pass
4572 try:
4573 return experimental_scan_dataset_eager_fallback(
4574 input_dataset, initial_state, other_arguments, f=f,
4575 output_types=output_types, output_shapes=output_shapes,
4576 preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
4577 except _core._SymbolicException:
4578 pass # Add nodes to the TensorFlow graph.
4579 # Add nodes to the TensorFlow graph.
4580 if not isinstance(output_types, (list, tuple)):
4581 raise TypeError(
4582 "Expected list for 'output_types' argument to "
4583 "'experimental_scan_dataset' Op, not %r." % output_types)
4584 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4585 if not isinstance(output_shapes, (list, tuple)):
4586 raise TypeError(
4587 "Expected list for 'output_shapes' argument to "
4588 "'experimental_scan_dataset' Op, not %r." % output_shapes)
4589 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4590 if preserve_cardinality is None:
4591 preserve_cardinality = False
4592 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
4593 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4594 "ExperimentalScanDataset", input_dataset=input_dataset,
4595 initial_state=initial_state,
4596 other_arguments=other_arguments, f=f,
4597 output_types=output_types,
4598 output_shapes=output_shapes,
4599 preserve_cardinality=preserve_cardinality,
4600 name=name)
4601 _result = _outputs[:]
4602 if _execute.must_record_gradient():
4603 _attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"),
4604 "Targuments", _op.get_attr("Targuments"), "output_types",
4605 _op.get_attr("output_types"), "output_shapes",
4606 _op.get_attr("output_shapes"), "preserve_cardinality",
4607 _op._get_attr_bool("preserve_cardinality"))
4608 _inputs_flat = _op.inputs
4609 _execute.record_gradient(
4610 "ExperimentalScanDataset", _inputs_flat, _attrs, _result)
4611 _result, = _result
4612 return _result
4614ExperimentalScanDataset = tf_export("raw_ops.ExperimentalScanDataset")(_ops.to_raw_op(experimental_scan_dataset))
4617def experimental_scan_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality, name, ctx):
4618 if not isinstance(output_types, (list, tuple)):
4619 raise TypeError(
4620 "Expected list for 'output_types' argument to "
4621 "'experimental_scan_dataset' Op, not %r." % output_types)
4622 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4623 if not isinstance(output_shapes, (list, tuple)):
4624 raise TypeError(
4625 "Expected list for 'output_shapes' argument to "
4626 "'experimental_scan_dataset' Op, not %r." % output_shapes)
4627 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4628 if preserve_cardinality is None:
4629 preserve_cardinality = False
4630 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
4631 _attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, ctx)
4632 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
4633 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4634 _inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments)
4635 _attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments,
4636 "output_types", output_types, "output_shapes", output_shapes,
4637 "preserve_cardinality", preserve_cardinality)
4638 _result = _execute.execute(b"ExperimentalScanDataset", 1,
4639 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4640 name=name)
4641 if _execute.must_record_gradient():
4642 _execute.record_gradient(
4643 "ExperimentalScanDataset", _inputs_flat, _attrs, _result)
4644 _result, = _result
4645 return _result
4648def experimental_set_stats_aggregator_dataset(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name=None):
4649 r"""TODO: add doc.
4651 Args:
4652 input_dataset: A `Tensor` of type `variant`.
4653 stats_aggregator: A `Tensor` of type `resource`.
4654 tag: A `Tensor` of type `string`.
4655 counter_prefix: A `Tensor` of type `string`.
4656 output_types: A list of `tf.DTypes` that has length `>= 1`.
4657 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4658 name: A name for the operation (optional).
4660 Returns:
4661 A `Tensor` of type `variant`.
4662 """
4663 _ctx = _context._context or _context.context()
4664 tld = _ctx._thread_local_data
4665 if tld.is_eager:
4666 try:
4667 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4668 _ctx, "ExperimentalSetStatsAggregatorDataset", name, input_dataset,
4669 stats_aggregator, tag, counter_prefix, "output_types", output_types,
4670 "output_shapes", output_shapes)
4671 return _result
4672 except _core._NotOkStatusException as e:
4673 _ops.raise_from_not_ok_status(e, name)
4674 except _core._FallbackException:
4675 pass
4676 try:
4677 return experimental_set_stats_aggregator_dataset_eager_fallback(
4678 input_dataset, stats_aggregator, tag, counter_prefix,
4679 output_types=output_types, output_shapes=output_shapes, name=name,
4680 ctx=_ctx)
4681 except _core._SymbolicException:
4682 pass # Add nodes to the TensorFlow graph.
4683 # Add nodes to the TensorFlow graph.
4684 if not isinstance(output_types, (list, tuple)):
4685 raise TypeError(
4686 "Expected list for 'output_types' argument to "
4687 "'experimental_set_stats_aggregator_dataset' Op, not %r." % output_types)
4688 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4689 if not isinstance(output_shapes, (list, tuple)):
4690 raise TypeError(
4691 "Expected list for 'output_shapes' argument to "
4692 "'experimental_set_stats_aggregator_dataset' Op, not %r." % output_shapes)
4693 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4694 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4695 "ExperimentalSetStatsAggregatorDataset", input_dataset=input_dataset,
4696 stats_aggregator=stats_aggregator,
4697 tag=tag,
4698 counter_prefix=counter_prefix,
4699 output_types=output_types,
4700 output_shapes=output_shapes,
4701 name=name)
4702 _result = _outputs[:]
4703 if _execute.must_record_gradient():
4704 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4705 _op.get_attr("output_shapes"))
4706 _inputs_flat = _op.inputs
4707 _execute.record_gradient(
4708 "ExperimentalSetStatsAggregatorDataset", _inputs_flat, _attrs, _result)
4709 _result, = _result
4710 return _result
4712ExperimentalSetStatsAggregatorDataset = tf_export("raw_ops.ExperimentalSetStatsAggregatorDataset")(_ops.to_raw_op(experimental_set_stats_aggregator_dataset))
4715def experimental_set_stats_aggregator_dataset_eager_fallback(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name, ctx):
4716 if not isinstance(output_types, (list, tuple)):
4717 raise TypeError(
4718 "Expected list for 'output_types' argument to "
4719 "'experimental_set_stats_aggregator_dataset' Op, not %r." % output_types)
4720 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4721 if not isinstance(output_shapes, (list, tuple)):
4722 raise TypeError(
4723 "Expected list for 'output_shapes' argument to "
4724 "'experimental_set_stats_aggregator_dataset' Op, not %r." % output_shapes)
4725 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4726 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4727 stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
4728 tag = _ops.convert_to_tensor(tag, _dtypes.string)
4729 counter_prefix = _ops.convert_to_tensor(counter_prefix, _dtypes.string)
4730 _inputs_flat = [input_dataset, stats_aggregator, tag, counter_prefix]
4731 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4732 _result = _execute.execute(b"ExperimentalSetStatsAggregatorDataset", 1,
4733 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4734 name=name)
4735 if _execute.must_record_gradient():
4736 _execute.record_gradient(
4737 "ExperimentalSetStatsAggregatorDataset", _inputs_flat, _attrs, _result)
4738 _result, = _result
4739 return _result
4742def experimental_sleep_dataset(input_dataset, sleep_microseconds, output_types, output_shapes, name=None):
4743 r"""TODO: add doc.
4745 Args:
4746 input_dataset: A `Tensor` of type `variant`.
4747 sleep_microseconds: A `Tensor` of type `int64`.
4748 output_types: A list of `tf.DTypes` that has length `>= 1`.
4749 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4750 name: A name for the operation (optional).
4752 Returns:
4753 A `Tensor` of type `variant`.
4754 """
4755 _ctx = _context._context or _context.context()
4756 tld = _ctx._thread_local_data
4757 if tld.is_eager:
4758 try:
4759 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4760 _ctx, "ExperimentalSleepDataset", name, input_dataset,
4761 sleep_microseconds, "output_types", output_types, "output_shapes",
4762 output_shapes)
4763 return _result
4764 except _core._NotOkStatusException as e:
4765 _ops.raise_from_not_ok_status(e, name)
4766 except _core._FallbackException:
4767 pass
4768 try:
4769 return experimental_sleep_dataset_eager_fallback(
4770 input_dataset, sleep_microseconds, output_types=output_types,
4771 output_shapes=output_shapes, name=name, ctx=_ctx)
4772 except _core._SymbolicException:
4773 pass # Add nodes to the TensorFlow graph.
4774 # Add nodes to the TensorFlow graph.
4775 if not isinstance(output_types, (list, tuple)):
4776 raise TypeError(
4777 "Expected list for 'output_types' argument to "
4778 "'experimental_sleep_dataset' Op, not %r." % output_types)
4779 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4780 if not isinstance(output_shapes, (list, tuple)):
4781 raise TypeError(
4782 "Expected list for 'output_shapes' argument to "
4783 "'experimental_sleep_dataset' Op, not %r." % output_shapes)
4784 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4785 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4786 "ExperimentalSleepDataset", input_dataset=input_dataset,
4787 sleep_microseconds=sleep_microseconds,
4788 output_types=output_types,
4789 output_shapes=output_shapes, name=name)
4790 _result = _outputs[:]
4791 if _execute.must_record_gradient():
4792 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4793 _op.get_attr("output_shapes"))
4794 _inputs_flat = _op.inputs
4795 _execute.record_gradient(
4796 "ExperimentalSleepDataset", _inputs_flat, _attrs, _result)
4797 _result, = _result
4798 return _result
4800ExperimentalSleepDataset = tf_export("raw_ops.ExperimentalSleepDataset")(_ops.to_raw_op(experimental_sleep_dataset))
4803def experimental_sleep_dataset_eager_fallback(input_dataset, sleep_microseconds, output_types, output_shapes, name, ctx):
4804 if not isinstance(output_types, (list, tuple)):
4805 raise TypeError(
4806 "Expected list for 'output_types' argument to "
4807 "'experimental_sleep_dataset' Op, not %r." % output_types)
4808 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4809 if not isinstance(output_shapes, (list, tuple)):
4810 raise TypeError(
4811 "Expected list for 'output_shapes' argument to "
4812 "'experimental_sleep_dataset' Op, not %r." % output_shapes)
4813 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4814 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4815 sleep_microseconds = _ops.convert_to_tensor(sleep_microseconds, _dtypes.int64)
4816 _inputs_flat = [input_dataset, sleep_microseconds]
4817 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4818 _result = _execute.execute(b"ExperimentalSleepDataset", 1,
4819 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4820 name=name)
4821 if _execute.must_record_gradient():
4822 _execute.record_gradient(
4823 "ExperimentalSleepDataset", _inputs_flat, _attrs, _result)
4824 _result, = _result
4825 return _result
4828def experimental_sliding_window_dataset(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None):
4829 r"""Creates a dataset that passes a sliding window over `input_dataset`.
4831 Args:
4832 input_dataset: A `Tensor` of type `variant`.
4833 window_size: A `Tensor` of type `int64`.
4834 A scalar representing the number of elements in the
4835 sliding window.
4836 window_shift: A `Tensor` of type `int64`.
4837 A scalar representing the steps moving the sliding window
4838 forward in one iteration. It must be positive.
4839 window_stride: A `Tensor` of type `int64`.
4840 A scalar representing the stride of the input elements of the sliding window.
4841 It must be positive.
4842 output_types: A list of `tf.DTypes` that has length `>= 1`.
4843 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4844 name: A name for the operation (optional).
4846 Returns:
4847 A `Tensor` of type `variant`.
4848 """
4849 _ctx = _context._context or _context.context()
4850 tld = _ctx._thread_local_data
4851 if tld.is_eager:
4852 try:
4853 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4854 _ctx, "ExperimentalSlidingWindowDataset", name, input_dataset,
4855 window_size, window_shift, window_stride, "output_types",
4856 output_types, "output_shapes", output_shapes)
4857 return _result
4858 except _core._NotOkStatusException as e:
4859 _ops.raise_from_not_ok_status(e, name)
4860 except _core._FallbackException:
4861 pass
4862 try:
4863 return experimental_sliding_window_dataset_eager_fallback(
4864 input_dataset, window_size, window_shift, window_stride,
4865 output_types=output_types, output_shapes=output_shapes, name=name,
4866 ctx=_ctx)
4867 except _core._SymbolicException:
4868 pass # Add nodes to the TensorFlow graph.
4869 # Add nodes to the TensorFlow graph.
4870 if not isinstance(output_types, (list, tuple)):
4871 raise TypeError(
4872 "Expected list for 'output_types' argument to "
4873 "'experimental_sliding_window_dataset' Op, not %r." % output_types)
4874 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4875 if not isinstance(output_shapes, (list, tuple)):
4876 raise TypeError(
4877 "Expected list for 'output_shapes' argument to "
4878 "'experimental_sliding_window_dataset' Op, not %r." % output_shapes)
4879 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4880 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4881 "ExperimentalSlidingWindowDataset", input_dataset=input_dataset,
4882 window_size=window_size,
4883 window_shift=window_shift,
4884 window_stride=window_stride,
4885 output_types=output_types,
4886 output_shapes=output_shapes,
4887 name=name)
4888 _result = _outputs[:]
4889 if _execute.must_record_gradient():
4890 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4891 _op.get_attr("output_shapes"))
4892 _inputs_flat = _op.inputs
4893 _execute.record_gradient(
4894 "ExperimentalSlidingWindowDataset", _inputs_flat, _attrs, _result)
4895 _result, = _result
4896 return _result
4898ExperimentalSlidingWindowDataset = tf_export("raw_ops.ExperimentalSlidingWindowDataset")(_ops.to_raw_op(experimental_sliding_window_dataset))
4901def experimental_sliding_window_dataset_eager_fallback(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name, ctx):
4902 if not isinstance(output_types, (list, tuple)):
4903 raise TypeError(
4904 "Expected list for 'output_types' argument to "
4905 "'experimental_sliding_window_dataset' Op, not %r." % output_types)
4906 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4907 if not isinstance(output_shapes, (list, tuple)):
4908 raise TypeError(
4909 "Expected list for 'output_shapes' argument to "
4910 "'experimental_sliding_window_dataset' Op, not %r." % output_shapes)
4911 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4912 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4913 window_size = _ops.convert_to_tensor(window_size, _dtypes.int64)
4914 window_shift = _ops.convert_to_tensor(window_shift, _dtypes.int64)
4915 window_stride = _ops.convert_to_tensor(window_stride, _dtypes.int64)
4916 _inputs_flat = [input_dataset, window_size, window_shift, window_stride]
4917 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4918 _result = _execute.execute(b"ExperimentalSlidingWindowDataset", 1,
4919 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
4920 name=name)
4921 if _execute.must_record_gradient():
4922 _execute.record_gradient(
4923 "ExperimentalSlidingWindowDataset", _inputs_flat, _attrs, _result)
4924 _result, = _result
4925 return _result
4928def experimental_sql_dataset(driver_name, data_source_name, query, output_types, output_shapes, name=None):
4929 r"""Creates a dataset that executes a SQL query and emits rows of the result set.
4931 Args:
4932 driver_name: A `Tensor` of type `string`.
4933 The database type. Currently, the only supported type is 'sqlite'.
4934 data_source_name: A `Tensor` of type `string`.
4935 A connection string to connect to the database.
4936 query: A `Tensor` of type `string`. A SQL query to execute.
4937 output_types: A list of `tf.DTypes` that has length `>= 1`.
4938 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4939 name: A name for the operation (optional).
4941 Returns:
4942 A `Tensor` of type `variant`.
4943 """
4944 _ctx = _context._context or _context.context()
4945 tld = _ctx._thread_local_data
4946 if tld.is_eager:
4947 try:
4948 _result = pywrap_tfe.TFE_Py_FastPathExecute(
4949 _ctx, "ExperimentalSqlDataset", name, driver_name, data_source_name,
4950 query, "output_types", output_types, "output_shapes", output_shapes)
4951 return _result
4952 except _core._NotOkStatusException as e:
4953 _ops.raise_from_not_ok_status(e, name)
4954 except _core._FallbackException:
4955 pass
4956 try:
4957 return experimental_sql_dataset_eager_fallback(
4958 driver_name, data_source_name, query, output_types=output_types,
4959 output_shapes=output_shapes, name=name, ctx=_ctx)
4960 except _core._SymbolicException:
4961 pass # Add nodes to the TensorFlow graph.
4962 # Add nodes to the TensorFlow graph.
4963 if not isinstance(output_types, (list, tuple)):
4964 raise TypeError(
4965 "Expected list for 'output_types' argument to "
4966 "'experimental_sql_dataset' Op, not %r." % output_types)
4967 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4968 if not isinstance(output_shapes, (list, tuple)):
4969 raise TypeError(
4970 "Expected list for 'output_shapes' argument to "
4971 "'experimental_sql_dataset' Op, not %r." % output_shapes)
4972 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4973 _, _, _op, _outputs = _op_def_library._apply_op_helper(
4974 "ExperimentalSqlDataset", driver_name=driver_name,
4975 data_source_name=data_source_name,
4976 query=query, output_types=output_types,
4977 output_shapes=output_shapes, name=name)
4978 _result = _outputs[:]
4979 if _execute.must_record_gradient():
4980 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4981 _op.get_attr("output_shapes"))
4982 _inputs_flat = _op.inputs
4983 _execute.record_gradient(
4984 "ExperimentalSqlDataset", _inputs_flat, _attrs, _result)
4985 _result, = _result
4986 return _result
4988ExperimentalSqlDataset = tf_export("raw_ops.ExperimentalSqlDataset")(_ops.to_raw_op(experimental_sql_dataset))
4991def experimental_sql_dataset_eager_fallback(driver_name, data_source_name, query, output_types, output_shapes, name, ctx):
4992 if not isinstance(output_types, (list, tuple)):
4993 raise TypeError(
4994 "Expected list for 'output_types' argument to "
4995 "'experimental_sql_dataset' Op, not %r." % output_types)
4996 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4997 if not isinstance(output_shapes, (list, tuple)):
4998 raise TypeError(
4999 "Expected list for 'output_shapes' argument to "
5000 "'experimental_sql_dataset' Op, not %r." % output_shapes)
5001 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5002 driver_name = _ops.convert_to_tensor(driver_name, _dtypes.string)
5003 data_source_name = _ops.convert_to_tensor(data_source_name, _dtypes.string)
5004 query = _ops.convert_to_tensor(query, _dtypes.string)
5005 _inputs_flat = [driver_name, data_source_name, query]
5006 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5007 _result = _execute.execute(b"ExperimentalSqlDataset", 1,
5008 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5009 name=name)
5010 if _execute.must_record_gradient():
5011 _execute.record_gradient(
5012 "ExperimentalSqlDataset", _inputs_flat, _attrs, _result)
5013 _result, = _result
5014 return _result
5017def experimental_stats_aggregator_handle(container="", shared_name="", name=None):
5018 r"""Creates a statistics manager resource.
5020 Args:
5021 container: An optional `string`. Defaults to `""`.
5022 shared_name: An optional `string`. Defaults to `""`.
5023 name: A name for the operation (optional).
5025 Returns:
5026 A `Tensor` of type `resource`.
5027 """
5028 _ctx = _context._context or _context.context()
5029 tld = _ctx._thread_local_data
5030 if tld.is_eager:
5031 try:
5032 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5033 _ctx, "ExperimentalStatsAggregatorHandle", name, "container",
5034 container, "shared_name", shared_name)
5035 return _result
5036 except _core._NotOkStatusException as e:
5037 _ops.raise_from_not_ok_status(e, name)
5038 except _core._FallbackException:
5039 pass
5040 try:
5041 return experimental_stats_aggregator_handle_eager_fallback(
5042 container=container, shared_name=shared_name, name=name, ctx=_ctx)
5043 except _core._SymbolicException:
5044 pass # Add nodes to the TensorFlow graph.
5045 # Add nodes to the TensorFlow graph.
5046 if container is None:
5047 container = ""
5048 container = _execute.make_str(container, "container")
5049 if shared_name is None:
5050 shared_name = ""
5051 shared_name = _execute.make_str(shared_name, "shared_name")
5052 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5053 "ExperimentalStatsAggregatorHandle", container=container,
5054 shared_name=shared_name,
5055 name=name)
5056 _result = _outputs[:]
5057 if _execute.must_record_gradient():
5058 _attrs = ("container", _op.get_attr("container"), "shared_name",
5059 _op.get_attr("shared_name"))
5060 _inputs_flat = _op.inputs
5061 _execute.record_gradient(
5062 "ExperimentalStatsAggregatorHandle", _inputs_flat, _attrs, _result)
5063 _result, = _result
5064 return _result
5066ExperimentalStatsAggregatorHandle = tf_export("raw_ops.ExperimentalStatsAggregatorHandle")(_ops.to_raw_op(experimental_stats_aggregator_handle))
5069def experimental_stats_aggregator_handle_eager_fallback(container, shared_name, name, ctx):
5070 if container is None:
5071 container = ""
5072 container = _execute.make_str(container, "container")
5073 if shared_name is None:
5074 shared_name = ""
5075 shared_name = _execute.make_str(shared_name, "shared_name")
5076 _inputs_flat = []
5077 _attrs = ("container", container, "shared_name", shared_name)
5078 _result = _execute.execute(b"ExperimentalStatsAggregatorHandle", 1,
5079 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5080 name=name)
5081 if _execute.must_record_gradient():
5082 _execute.record_gradient(
5083 "ExperimentalStatsAggregatorHandle", _inputs_flat, _attrs, _result)
5084 _result, = _result
5085 return _result
5088def experimental_stats_aggregator_summary(iterator, name=None):
5089 r"""Produces a summary of any statistics recorded by the given statistics manager.
5091 Args:
5092 iterator: A `Tensor` of type `resource`.
5093 name: A name for the operation (optional).
5095 Returns:
5096 A `Tensor` of type `string`.
5097 """
5098 _ctx = _context._context or _context.context()
5099 tld = _ctx._thread_local_data
5100 if tld.is_eager:
5101 try:
5102 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5103 _ctx, "ExperimentalStatsAggregatorSummary", name, iterator)
5104 return _result
5105 except _core._NotOkStatusException as e:
5106 _ops.raise_from_not_ok_status(e, name)
5107 except _core._FallbackException:
5108 pass
5109 try:
5110 return experimental_stats_aggregator_summary_eager_fallback(
5111 iterator, name=name, ctx=_ctx)
5112 except _core._SymbolicException:
5113 pass # Add nodes to the TensorFlow graph.
5114 # Add nodes to the TensorFlow graph.
5115 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5116 "ExperimentalStatsAggregatorSummary", iterator=iterator, name=name)
5117 _result = _outputs[:]
5118 if _execute.must_record_gradient():
5119 _attrs = ()
5120 _inputs_flat = _op.inputs
5121 _execute.record_gradient(
5122 "ExperimentalStatsAggregatorSummary", _inputs_flat, _attrs, _result)
5123 _result, = _result
5124 return _result
5126ExperimentalStatsAggregatorSummary = tf_export("raw_ops.ExperimentalStatsAggregatorSummary")(_ops.to_raw_op(experimental_stats_aggregator_summary))
5129def experimental_stats_aggregator_summary_eager_fallback(iterator, name, ctx):
5130 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
5131 _inputs_flat = [iterator]
5132 _attrs = None
5133 _result = _execute.execute(b"ExperimentalStatsAggregatorSummary", 1,
5134 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5135 name=name)
5136 if _execute.must_record_gradient():
5137 _execute.record_gradient(
5138 "ExperimentalStatsAggregatorSummary", _inputs_flat, _attrs, _result)
5139 _result, = _result
5140 return _result
5143def experimental_take_while_dataset(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None):
5144 r"""Creates a dataset that stops iteration when predicate` is false.
5146 The `predicate` function must return a scalar boolean and accept the
5147 following arguments:
5149 * One tensor for each component of an element of `input_dataset`.
5150 * One tensor for each value in `other_arguments`.
5152 Args:
5153 input_dataset: A `Tensor` of type `variant`.
5154 other_arguments: A list of `Tensor` objects.
5155 A list of tensors, typically values that were captured when
5156 building a closure for `predicate`.
5157 predicate: A function decorated with @Defun.
5158 A function returning a scalar boolean.
5159 output_types: A list of `tf.DTypes` that has length `>= 1`.
5160 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5161 name: A name for the operation (optional).
5163 Returns:
5164 A `Tensor` of type `variant`.
5165 """
5166 _ctx = _context._context or _context.context()
5167 tld = _ctx._thread_local_data
5168 if tld.is_eager:
5169 try:
5170 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5171 _ctx, "ExperimentalTakeWhileDataset", name, input_dataset,
5172 other_arguments, "predicate", predicate, "output_types", output_types,
5173 "output_shapes", output_shapes)
5174 return _result
5175 except _core._NotOkStatusException as e:
5176 _ops.raise_from_not_ok_status(e, name)
5177 except _core._FallbackException:
5178 pass
5179 try:
5180 return experimental_take_while_dataset_eager_fallback(
5181 input_dataset, other_arguments, predicate=predicate,
5182 output_types=output_types, output_shapes=output_shapes, name=name,
5183 ctx=_ctx)
5184 except _core._SymbolicException:
5185 pass # Add nodes to the TensorFlow graph.
5186 # Add nodes to the TensorFlow graph.
5187 if not isinstance(output_types, (list, tuple)):
5188 raise TypeError(
5189 "Expected list for 'output_types' argument to "
5190 "'experimental_take_while_dataset' Op, not %r." % output_types)
5191 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5192 if not isinstance(output_shapes, (list, tuple)):
5193 raise TypeError(
5194 "Expected list for 'output_shapes' argument to "
5195 "'experimental_take_while_dataset' Op, not %r." % output_shapes)
5196 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5197 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5198 "ExperimentalTakeWhileDataset", input_dataset=input_dataset,
5199 other_arguments=other_arguments,
5200 predicate=predicate,
5201 output_types=output_types,
5202 output_shapes=output_shapes,
5203 name=name)
5204 _result = _outputs[:]
5205 if _execute.must_record_gradient():
5206 _attrs = ("predicate", _op.get_attr("predicate"), "Targuments",
5207 _op.get_attr("Targuments"), "output_types",
5208 _op.get_attr("output_types"), "output_shapes",
5209 _op.get_attr("output_shapes"))
5210 _inputs_flat = _op.inputs
5211 _execute.record_gradient(
5212 "ExperimentalTakeWhileDataset", _inputs_flat, _attrs, _result)
5213 _result, = _result
5214 return _result
5216ExperimentalTakeWhileDataset = tf_export("raw_ops.ExperimentalTakeWhileDataset")(_ops.to_raw_op(experimental_take_while_dataset))
5219def experimental_take_while_dataset_eager_fallback(input_dataset, other_arguments, predicate, output_types, output_shapes, name, ctx):
5220 if not isinstance(output_types, (list, tuple)):
5221 raise TypeError(
5222 "Expected list for 'output_types' argument to "
5223 "'experimental_take_while_dataset' Op, not %r." % output_types)
5224 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5225 if not isinstance(output_shapes, (list, tuple)):
5226 raise TypeError(
5227 "Expected list for 'output_shapes' argument to "
5228 "'experimental_take_while_dataset' Op, not %r." % output_shapes)
5229 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5230 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
5231 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5232 _inputs_flat = [input_dataset] + list(other_arguments)
5233 _attrs = ("predicate", predicate, "Targuments", _attr_Targuments,
5234 "output_types", output_types, "output_shapes", output_shapes)
5235 _result = _execute.execute(b"ExperimentalTakeWhileDataset", 1,
5236 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5237 name=name)
5238 if _execute.must_record_gradient():
5239 _execute.record_gradient(
5240 "ExperimentalTakeWhileDataset", _inputs_flat, _attrs, _result)
5241 _result, = _result
5242 return _result
5245def experimental_thread_pool_dataset(input_dataset, thread_pool, output_types, output_shapes, name=None):
5246 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
5248 Args:
5249 input_dataset: A `Tensor` of type `variant`.
5250 thread_pool: A `Tensor` of type `resource`.
5251 A resource produced by the ThreadPoolHandle op.
5252 output_types: A list of `tf.DTypes` that has length `>= 1`.
5253 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5254 name: A name for the operation (optional).
5256 Returns:
5257 A `Tensor` of type `variant`.
5258 """
5259 _ctx = _context._context or _context.context()
5260 tld = _ctx._thread_local_data
5261 if tld.is_eager:
5262 try:
5263 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5264 _ctx, "ExperimentalThreadPoolDataset", name, input_dataset,
5265 thread_pool, "output_types", output_types, "output_shapes",
5266 output_shapes)
5267 return _result
5268 except _core._NotOkStatusException as e:
5269 _ops.raise_from_not_ok_status(e, name)
5270 except _core._FallbackException:
5271 pass
5272 try:
5273 return experimental_thread_pool_dataset_eager_fallback(
5274 input_dataset, thread_pool, output_types=output_types,
5275 output_shapes=output_shapes, name=name, ctx=_ctx)
5276 except _core._SymbolicException:
5277 pass # Add nodes to the TensorFlow graph.
5278 # Add nodes to the TensorFlow graph.
5279 if not isinstance(output_types, (list, tuple)):
5280 raise TypeError(
5281 "Expected list for 'output_types' argument to "
5282 "'experimental_thread_pool_dataset' Op, not %r." % output_types)
5283 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5284 if not isinstance(output_shapes, (list, tuple)):
5285 raise TypeError(
5286 "Expected list for 'output_shapes' argument to "
5287 "'experimental_thread_pool_dataset' Op, not %r." % output_shapes)
5288 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5289 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5290 "ExperimentalThreadPoolDataset", input_dataset=input_dataset,
5291 thread_pool=thread_pool,
5292 output_types=output_types,
5293 output_shapes=output_shapes,
5294 name=name)
5295 _result = _outputs[:]
5296 if _execute.must_record_gradient():
5297 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5298 _op.get_attr("output_shapes"))
5299 _inputs_flat = _op.inputs
5300 _execute.record_gradient(
5301 "ExperimentalThreadPoolDataset", _inputs_flat, _attrs, _result)
5302 _result, = _result
5303 return _result
5305ExperimentalThreadPoolDataset = tf_export("raw_ops.ExperimentalThreadPoolDataset")(_ops.to_raw_op(experimental_thread_pool_dataset))
5308def experimental_thread_pool_dataset_eager_fallback(input_dataset, thread_pool, output_types, output_shapes, name, ctx):
5309 if not isinstance(output_types, (list, tuple)):
5310 raise TypeError(
5311 "Expected list for 'output_types' argument to "
5312 "'experimental_thread_pool_dataset' Op, not %r." % output_types)
5313 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5314 if not isinstance(output_shapes, (list, tuple)):
5315 raise TypeError(
5316 "Expected list for 'output_shapes' argument to "
5317 "'experimental_thread_pool_dataset' Op, not %r." % output_shapes)
5318 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5319 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5320 thread_pool = _ops.convert_to_tensor(thread_pool, _dtypes.resource)
5321 _inputs_flat = [input_dataset, thread_pool]
5322 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5323 _result = _execute.execute(b"ExperimentalThreadPoolDataset", 1,
5324 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5325 name=name)
5326 if _execute.must_record_gradient():
5327 _execute.record_gradient(
5328 "ExperimentalThreadPoolDataset", _inputs_flat, _attrs, _result)
5329 _result, = _result
5330 return _result
5333def experimental_thread_pool_handle(num_threads, display_name, max_intra_op_parallelism=1, container="", shared_name="", name=None):
5334 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
5336 Args:
5337 num_threads: An `int`. The number of threads in the thread pool.
5338 display_name: A `string`.
5339 A human-readable name for the threads that may be visible in some
5340 visualizations.
5341 threadpool.
5342 max_intra_op_parallelism: An optional `int`. Defaults to `1`.
5343 The maximum degree of parallelism to use within operations that execute on this
5344 threadpool.
5345 container: An optional `string`. Defaults to `""`.
5346 shared_name: An optional `string`. Defaults to `""`.
5347 name: A name for the operation (optional).
5349 Returns:
5350 A `Tensor` of type `resource`.
5351 """
5352 _ctx = _context._context or _context.context()
5353 tld = _ctx._thread_local_data
5354 if tld.is_eager:
5355 try:
5356 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5357 _ctx, "ExperimentalThreadPoolHandle", name, "num_threads",
5358 num_threads, "max_intra_op_parallelism", max_intra_op_parallelism,
5359 "display_name", display_name, "container", container, "shared_name",
5360 shared_name)
5361 return _result
5362 except _core._NotOkStatusException as e:
5363 _ops.raise_from_not_ok_status(e, name)
5364 except _core._FallbackException:
5365 pass
5366 try:
5367 return experimental_thread_pool_handle_eager_fallback(
5368 num_threads=num_threads,
5369 max_intra_op_parallelism=max_intra_op_parallelism,
5370 display_name=display_name, container=container,
5371 shared_name=shared_name, name=name, ctx=_ctx)
5372 except _core._SymbolicException:
5373 pass # Add nodes to the TensorFlow graph.
5374 # Add nodes to the TensorFlow graph.
5375 num_threads = _execute.make_int(num_threads, "num_threads")
5376 display_name = _execute.make_str(display_name, "display_name")
5377 if max_intra_op_parallelism is None:
5378 max_intra_op_parallelism = 1
5379 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
5380 if container is None:
5381 container = ""
5382 container = _execute.make_str(container, "container")
5383 if shared_name is None:
5384 shared_name = ""
5385 shared_name = _execute.make_str(shared_name, "shared_name")
5386 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5387 "ExperimentalThreadPoolHandle", num_threads=num_threads,
5388 display_name=display_name,
5389 max_intra_op_parallelism=max_intra_op_parallelism,
5390 container=container,
5391 shared_name=shared_name, name=name)
5392 _result = _outputs[:]
5393 if _execute.must_record_gradient():
5394 _attrs = ("num_threads", _op._get_attr_int("num_threads"),
5395 "max_intra_op_parallelism",
5396 _op._get_attr_int("max_intra_op_parallelism"), "display_name",
5397 _op.get_attr("display_name"), "container",
5398 _op.get_attr("container"), "shared_name",
5399 _op.get_attr("shared_name"))
5400 _inputs_flat = _op.inputs
5401 _execute.record_gradient(
5402 "ExperimentalThreadPoolHandle", _inputs_flat, _attrs, _result)
5403 _result, = _result
5404 return _result
5406ExperimentalThreadPoolHandle = tf_export("raw_ops.ExperimentalThreadPoolHandle")(_ops.to_raw_op(experimental_thread_pool_handle))
5409def experimental_thread_pool_handle_eager_fallback(num_threads, display_name, max_intra_op_parallelism, container, shared_name, name, ctx):
5410 num_threads = _execute.make_int(num_threads, "num_threads")
5411 display_name = _execute.make_str(display_name, "display_name")
5412 if max_intra_op_parallelism is None:
5413 max_intra_op_parallelism = 1
5414 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
5415 if container is None:
5416 container = ""
5417 container = _execute.make_str(container, "container")
5418 if shared_name is None:
5419 shared_name = ""
5420 shared_name = _execute.make_str(shared_name, "shared_name")
5421 _inputs_flat = []
5422 _attrs = ("num_threads", num_threads, "max_intra_op_parallelism",
5423 max_intra_op_parallelism, "display_name", display_name, "container",
5424 container, "shared_name", shared_name)
5425 _result = _execute.execute(b"ExperimentalThreadPoolHandle", 1,
5426 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5427 name=name)
5428 if _execute.must_record_gradient():
5429 _execute.record_gradient(
5430 "ExperimentalThreadPoolHandle", _inputs_flat, _attrs, _result)
5431 _result, = _result
5432 return _result
5435def experimental_unbatch_dataset(input_dataset, output_types, output_shapes, name=None):
5436 r"""A dataset that splits the elements of its input into multiple elements.
5438 Args:
5439 input_dataset: A `Tensor` of type `variant`.
5440 output_types: A list of `tf.DTypes` that has length `>= 1`.
5441 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5442 name: A name for the operation (optional).
5444 Returns:
5445 A `Tensor` of type `variant`.
5446 """
5447 _ctx = _context._context or _context.context()
5448 tld = _ctx._thread_local_data
5449 if tld.is_eager:
5450 try:
5451 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5452 _ctx, "ExperimentalUnbatchDataset", name, input_dataset,
5453 "output_types", output_types, "output_shapes", output_shapes)
5454 return _result
5455 except _core._NotOkStatusException as e:
5456 _ops.raise_from_not_ok_status(e, name)
5457 except _core._FallbackException:
5458 pass
5459 try:
5460 return experimental_unbatch_dataset_eager_fallback(
5461 input_dataset, output_types=output_types,
5462 output_shapes=output_shapes, name=name, ctx=_ctx)
5463 except _core._SymbolicException:
5464 pass # Add nodes to the TensorFlow graph.
5465 # Add nodes to the TensorFlow graph.
5466 if not isinstance(output_types, (list, tuple)):
5467 raise TypeError(
5468 "Expected list for 'output_types' argument to "
5469 "'experimental_unbatch_dataset' Op, not %r." % output_types)
5470 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5471 if not isinstance(output_shapes, (list, tuple)):
5472 raise TypeError(
5473 "Expected list for 'output_shapes' argument to "
5474 "'experimental_unbatch_dataset' Op, not %r." % output_shapes)
5475 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5476 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5477 "ExperimentalUnbatchDataset", input_dataset=input_dataset,
5478 output_types=output_types,
5479 output_shapes=output_shapes, name=name)
5480 _result = _outputs[:]
5481 if _execute.must_record_gradient():
5482 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5483 _op.get_attr("output_shapes"))
5484 _inputs_flat = _op.inputs
5485 _execute.record_gradient(
5486 "ExperimentalUnbatchDataset", _inputs_flat, _attrs, _result)
5487 _result, = _result
5488 return _result
5490ExperimentalUnbatchDataset = tf_export("raw_ops.ExperimentalUnbatchDataset")(_ops.to_raw_op(experimental_unbatch_dataset))
5493def experimental_unbatch_dataset_eager_fallback(input_dataset, output_types, output_shapes, name, ctx):
5494 if not isinstance(output_types, (list, tuple)):
5495 raise TypeError(
5496 "Expected list for 'output_types' argument to "
5497 "'experimental_unbatch_dataset' Op, not %r." % output_types)
5498 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5499 if not isinstance(output_shapes, (list, tuple)):
5500 raise TypeError(
5501 "Expected list for 'output_shapes' argument to "
5502 "'experimental_unbatch_dataset' Op, not %r." % output_shapes)
5503 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5504 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5505 _inputs_flat = [input_dataset]
5506 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5507 _result = _execute.execute(b"ExperimentalUnbatchDataset", 1,
5508 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5509 name=name)
5510 if _execute.must_record_gradient():
5511 _execute.record_gradient(
5512 "ExperimentalUnbatchDataset", _inputs_flat, _attrs, _result)
5513 _result, = _result
5514 return _result
5517def experimental_unique_dataset(input_dataset, output_types, output_shapes, name=None):
5518 r"""Creates a dataset that contains the unique elements of `input_dataset`.
5520 Args:
5521 input_dataset: A `Tensor` of type `variant`.
5522 output_types: A list of `tf.DTypes` that has length `>= 1`.
5523 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5524 name: A name for the operation (optional).
5526 Returns:
5527 A `Tensor` of type `variant`.
5528 """
5529 _ctx = _context._context or _context.context()
5530 tld = _ctx._thread_local_data
5531 if tld.is_eager:
5532 try:
5533 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5534 _ctx, "ExperimentalUniqueDataset", name, input_dataset,
5535 "output_types", output_types, "output_shapes", output_shapes)
5536 return _result
5537 except _core._NotOkStatusException as e:
5538 _ops.raise_from_not_ok_status(e, name)
5539 except _core._FallbackException:
5540 pass
5541 try:
5542 return experimental_unique_dataset_eager_fallback(
5543 input_dataset, output_types=output_types,
5544 output_shapes=output_shapes, name=name, ctx=_ctx)
5545 except _core._SymbolicException:
5546 pass # Add nodes to the TensorFlow graph.
5547 # Add nodes to the TensorFlow graph.
5548 if not isinstance(output_types, (list, tuple)):
5549 raise TypeError(
5550 "Expected list for 'output_types' argument to "
5551 "'experimental_unique_dataset' Op, not %r." % output_types)
5552 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5553 if not isinstance(output_shapes, (list, tuple)):
5554 raise TypeError(
5555 "Expected list for 'output_shapes' argument to "
5556 "'experimental_unique_dataset' Op, not %r." % output_shapes)
5557 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5558 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5559 "ExperimentalUniqueDataset", input_dataset=input_dataset,
5560 output_types=output_types,
5561 output_shapes=output_shapes, name=name)
5562 _result = _outputs[:]
5563 if _execute.must_record_gradient():
5564 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5565 _op.get_attr("output_shapes"))
5566 _inputs_flat = _op.inputs
5567 _execute.record_gradient(
5568 "ExperimentalUniqueDataset", _inputs_flat, _attrs, _result)
5569 _result, = _result
5570 return _result
5572ExperimentalUniqueDataset = tf_export("raw_ops.ExperimentalUniqueDataset")(_ops.to_raw_op(experimental_unique_dataset))
5575def experimental_unique_dataset_eager_fallback(input_dataset, output_types, output_shapes, name, ctx):
5576 if not isinstance(output_types, (list, tuple)):
5577 raise TypeError(
5578 "Expected list for 'output_types' argument to "
5579 "'experimental_unique_dataset' Op, not %r." % output_types)
5580 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5581 if not isinstance(output_shapes, (list, tuple)):
5582 raise TypeError(
5583 "Expected list for 'output_shapes' argument to "
5584 "'experimental_unique_dataset' Op, not %r." % output_shapes)
5585 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5586 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5587 _inputs_flat = [input_dataset]
5588 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5589 _result = _execute.execute(b"ExperimentalUniqueDataset", 1,
5590 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5591 name=name)
5592 if _execute.must_record_gradient():
5593 _execute.record_gradient(
5594 "ExperimentalUniqueDataset", _inputs_flat, _attrs, _result)
5595 _result, = _result
5596 return _result
5599def get_element_at_index(dataset, index, output_types, output_shapes, name=None):
5600 r"""Gets the element at the specified index in a dataset.
5602 Args:
5603 dataset: A `Tensor` of type `variant`.
5604 index: A `Tensor` of type `int64`.
5605 output_types: A list of `tf.DTypes` that has length `>= 1`.
5606 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5607 name: A name for the operation (optional).
5609 Returns:
5610 A list of `Tensor` objects of type `output_types`.
5611 """
5612 _ctx = _context._context or _context.context()
5613 tld = _ctx._thread_local_data
5614 if tld.is_eager:
5615 try:
5616 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5617 _ctx, "GetElementAtIndex", name, dataset, index, "output_types",
5618 output_types, "output_shapes", output_shapes)
5619 return _result
5620 except _core._NotOkStatusException as e:
5621 _ops.raise_from_not_ok_status(e, name)
5622 except _core._FallbackException:
5623 pass
5624 try:
5625 return get_element_at_index_eager_fallback(
5626 dataset, index, output_types=output_types,
5627 output_shapes=output_shapes, name=name, ctx=_ctx)
5628 except _core._SymbolicException:
5629 pass # Add nodes to the TensorFlow graph.
5630 # Add nodes to the TensorFlow graph.
5631 if not isinstance(output_types, (list, tuple)):
5632 raise TypeError(
5633 "Expected list for 'output_types' argument to "
5634 "'get_element_at_index' Op, not %r." % output_types)
5635 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5636 if not isinstance(output_shapes, (list, tuple)):
5637 raise TypeError(
5638 "Expected list for 'output_shapes' argument to "
5639 "'get_element_at_index' Op, not %r." % output_shapes)
5640 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5641 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5642 "GetElementAtIndex", dataset=dataset, index=index,
5643 output_types=output_types,
5644 output_shapes=output_shapes, name=name)
5645 _result = _outputs[:]
5646 if _execute.must_record_gradient():
5647 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5648 _op.get_attr("output_shapes"))
5649 _inputs_flat = _op.inputs
5650 _execute.record_gradient(
5651 "GetElementAtIndex", _inputs_flat, _attrs, _result)
5652 return _result
5654GetElementAtIndex = tf_export("raw_ops.GetElementAtIndex")(_ops.to_raw_op(get_element_at_index))
5657def get_element_at_index_eager_fallback(dataset, index, output_types, output_shapes, name, ctx):
5658 if not isinstance(output_types, (list, tuple)):
5659 raise TypeError(
5660 "Expected list for 'output_types' argument to "
5661 "'get_element_at_index' Op, not %r." % output_types)
5662 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5663 if not isinstance(output_shapes, (list, tuple)):
5664 raise TypeError(
5665 "Expected list for 'output_shapes' argument to "
5666 "'get_element_at_index' Op, not %r." % output_shapes)
5667 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5668 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
5669 index = _ops.convert_to_tensor(index, _dtypes.int64)
5670 _inputs_flat = [dataset, index]
5671 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5672 _result = _execute.execute(b"GetElementAtIndex", len(output_types),
5673 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
5674 name=name)
5675 if _execute.must_record_gradient():
5676 _execute.record_gradient(
5677 "GetElementAtIndex", _inputs_flat, _attrs, _result)
5678 return _result
5681def group_by_reducer_dataset(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None):
5682 r"""Creates a dataset that computes a group-by on `input_dataset`.
5684 Creates a dataset that computes a group-by on `input_dataset`.
5686 Args:
5687 input_dataset: A `Tensor` of type `variant`.
5688 A variant tensor representing the input dataset.
5689 key_func_other_arguments: A list of `Tensor` objects.
5690 A list of tensors, typically values that were captured when
5691 building a closure for `key_func`.
5692 init_func_other_arguments: A list of `Tensor` objects.
5693 A list of tensors, typically values that were captured when
5694 building a closure for `init_func`.
5695 reduce_func_other_arguments: A list of `Tensor` objects.
5696 A list of tensors, typically values that were captured when
5697 building a closure for `reduce_func`.
5698 finalize_func_other_arguments: A list of `Tensor` objects.
5699 A list of tensors, typically values that were captured when
5700 building a closure for `finalize_func`.
5701 key_func: A function decorated with @Defun.
5702 A function mapping an element of `input_dataset`, concatenated
5703 with `key_func_other_arguments` to a scalar value of type DT_INT64.
5704 init_func: A function decorated with @Defun.
5705 A function mapping a key of type DT_INT64, concatenated with
5706 `init_func_other_arguments` to the initial reducer state.
5707 reduce_func: A function decorated with @Defun.
5708 A function mapping the current reducer state and an element of `input_dataset`,
5709 concatenated with `reduce_func_other_arguments` to a new reducer state.
5710 finalize_func: A function decorated with @Defun.
5711 A function mapping the final reducer state to an output element.
5712 output_types: A list of `tf.DTypes` that has length `>= 1`.
5713 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5714 name: A name for the operation (optional).
5716 Returns:
5717 A `Tensor` of type `variant`.
5718 """
5719 _ctx = _context._context or _context.context()
5720 tld = _ctx._thread_local_data
5721 if tld.is_eager:
5722 try:
5723 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5724 _ctx, "GroupByReducerDataset", name, input_dataset,
5725 key_func_other_arguments, init_func_other_arguments,
5726 reduce_func_other_arguments, finalize_func_other_arguments,
5727 "key_func", key_func, "init_func", init_func, "reduce_func",
5728 reduce_func, "finalize_func", finalize_func, "output_types",
5729 output_types, "output_shapes", output_shapes)
5730 return _result
5731 except _core._NotOkStatusException as e:
5732 _ops.raise_from_not_ok_status(e, name)
5733 except _core._FallbackException:
5734 pass
5735 try:
5736 return group_by_reducer_dataset_eager_fallback(
5737 input_dataset, key_func_other_arguments, init_func_other_arguments,
5738 reduce_func_other_arguments, finalize_func_other_arguments,
5739 key_func=key_func, init_func=init_func, reduce_func=reduce_func,
5740 finalize_func=finalize_func, output_types=output_types,
5741 output_shapes=output_shapes, name=name, ctx=_ctx)
5742 except _core._SymbolicException:
5743 pass # Add nodes to the TensorFlow graph.
5744 # Add nodes to the TensorFlow graph.
5745 if not isinstance(output_types, (list, tuple)):
5746 raise TypeError(
5747 "Expected list for 'output_types' argument to "
5748 "'group_by_reducer_dataset' Op, not %r." % output_types)
5749 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5750 if not isinstance(output_shapes, (list, tuple)):
5751 raise TypeError(
5752 "Expected list for 'output_shapes' argument to "
5753 "'group_by_reducer_dataset' Op, not %r." % output_shapes)
5754 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5755 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5756 "GroupByReducerDataset", input_dataset=input_dataset,
5757 key_func_other_arguments=key_func_other_arguments,
5758 init_func_other_arguments=init_func_other_arguments,
5759 reduce_func_other_arguments=reduce_func_other_arguments,
5760 finalize_func_other_arguments=finalize_func_other_arguments,
5761 key_func=key_func, init_func=init_func,
5762 reduce_func=reduce_func,
5763 finalize_func=finalize_func,
5764 output_types=output_types,
5765 output_shapes=output_shapes, name=name)
5766 _result = _outputs[:]
5767 if _execute.must_record_gradient():
5768 _attrs = ("key_func", _op.get_attr("key_func"), "init_func",
5769 _op.get_attr("init_func"), "reduce_func",
5770 _op.get_attr("reduce_func"), "finalize_func",
5771 _op.get_attr("finalize_func"), "Tkey_func_other_arguments",
5772 _op.get_attr("Tkey_func_other_arguments"),
5773 "Tinit_func_other_arguments",
5774 _op.get_attr("Tinit_func_other_arguments"),
5775 "Treduce_func_other_arguments",
5776 _op.get_attr("Treduce_func_other_arguments"),
5777 "Tfinalize_func_other_arguments",
5778 _op.get_attr("Tfinalize_func_other_arguments"), "output_types",
5779 _op.get_attr("output_types"), "output_shapes",
5780 _op.get_attr("output_shapes"))
5781 _inputs_flat = _op.inputs
5782 _execute.record_gradient(
5783 "GroupByReducerDataset", _inputs_flat, _attrs, _result)
5784 _result, = _result
5785 return _result
5787GroupByReducerDataset = tf_export("raw_ops.GroupByReducerDataset")(_ops.to_raw_op(group_by_reducer_dataset))
5790def group_by_reducer_dataset_eager_fallback(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name, ctx):
5791 if not isinstance(output_types, (list, tuple)):
5792 raise TypeError(
5793 "Expected list for 'output_types' argument to "
5794 "'group_by_reducer_dataset' Op, not %r." % output_types)
5795 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5796 if not isinstance(output_shapes, (list, tuple)):
5797 raise TypeError(
5798 "Expected list for 'output_shapes' argument to "
5799 "'group_by_reducer_dataset' Op, not %r." % output_shapes)
5800 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5801 _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, ctx)
5802 _attr_Tinit_func_other_arguments, init_func_other_arguments = _execute.convert_to_mixed_eager_tensors(init_func_other_arguments, ctx)
5803 _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, ctx)
5804 _attr_Tfinalize_func_other_arguments, finalize_func_other_arguments = _execute.convert_to_mixed_eager_tensors(finalize_func_other_arguments, ctx)
5805 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5806 _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(init_func_other_arguments) + list(reduce_func_other_arguments) + list(finalize_func_other_arguments)
5807 _attrs = ("key_func", key_func, "init_func", init_func, "reduce_func",
5808 reduce_func, "finalize_func", finalize_func, "Tkey_func_other_arguments",
5809 _attr_Tkey_func_other_arguments, "Tinit_func_other_arguments",
5810 _attr_Tinit_func_other_arguments, "Treduce_func_other_arguments",
5811 _attr_Treduce_func_other_arguments, "Tfinalize_func_other_arguments",
5812 _attr_Tfinalize_func_other_arguments, "output_types", output_types,
5813 "output_shapes", output_shapes)
5814 _result = _execute.execute(b"GroupByReducerDataset", 1, inputs=_inputs_flat,
5815 attrs=_attrs, ctx=ctx, name=name)
5816 if _execute.must_record_gradient():
5817 _execute.record_gradient(
5818 "GroupByReducerDataset", _inputs_flat, _attrs, _result)
5819 _result, = _result
5820 return _result
5823def group_by_window_dataset(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, metadata="", name=None):
5824 r"""Creates a dataset that computes a windowed group-by on `input_dataset`.
5826 // TODO(mrry): Support non-int64 keys.
5828 Args:
5829 input_dataset: A `Tensor` of type `variant`.
5830 key_func_other_arguments: A list of `Tensor` objects.
5831 reduce_func_other_arguments: A list of `Tensor` objects.
5832 window_size_func_other_arguments: A list of `Tensor` objects.
5833 key_func: A function decorated with @Defun.
5834 A function mapping an element of `input_dataset`, concatenated
5835 with `key_func_other_arguments` to a scalar value of type DT_INT64.
5836 reduce_func: A function decorated with @Defun.
5837 window_size_func: A function decorated with @Defun.
5838 output_types: A list of `tf.DTypes` that has length `>= 1`.
5839 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5840 metadata: An optional `string`. Defaults to `""`.
5841 name: A name for the operation (optional).
5843 Returns:
5844 A `Tensor` of type `variant`.
5845 """
5846 _ctx = _context._context or _context.context()
5847 tld = _ctx._thread_local_data
5848 if tld.is_eager:
5849 try:
5850 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5851 _ctx, "GroupByWindowDataset", name, input_dataset,
5852 key_func_other_arguments, reduce_func_other_arguments,
5853 window_size_func_other_arguments, "key_func", key_func, "reduce_func",
5854 reduce_func, "window_size_func", window_size_func, "output_types",
5855 output_types, "output_shapes", output_shapes, "metadata", metadata)
5856 return _result
5857 except _core._NotOkStatusException as e:
5858 _ops.raise_from_not_ok_status(e, name)
5859 except _core._FallbackException:
5860 pass
5861 try:
5862 return group_by_window_dataset_eager_fallback(
5863 input_dataset, key_func_other_arguments,
5864 reduce_func_other_arguments, window_size_func_other_arguments,
5865 key_func=key_func, reduce_func=reduce_func,
5866 window_size_func=window_size_func, output_types=output_types,
5867 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx)
5868 except _core._SymbolicException:
5869 pass # Add nodes to the TensorFlow graph.
5870 # Add nodes to the TensorFlow graph.
5871 if not isinstance(output_types, (list, tuple)):
5872 raise TypeError(
5873 "Expected list for 'output_types' argument to "
5874 "'group_by_window_dataset' Op, not %r." % output_types)
5875 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5876 if not isinstance(output_shapes, (list, tuple)):
5877 raise TypeError(
5878 "Expected list for 'output_shapes' argument to "
5879 "'group_by_window_dataset' Op, not %r." % output_shapes)
5880 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5881 if metadata is None:
5882 metadata = ""
5883 metadata = _execute.make_str(metadata, "metadata")
5884 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5885 "GroupByWindowDataset", input_dataset=input_dataset,
5886 key_func_other_arguments=key_func_other_arguments,
5887 reduce_func_other_arguments=reduce_func_other_arguments,
5888 window_size_func_other_arguments=window_size_func_other_arguments,
5889 key_func=key_func, reduce_func=reduce_func,
5890 window_size_func=window_size_func,
5891 output_types=output_types,
5892 output_shapes=output_shapes,
5893 metadata=metadata, name=name)
5894 _result = _outputs[:]
5895 if _execute.must_record_gradient():
5896 _attrs = ("key_func", _op.get_attr("key_func"), "reduce_func",
5897 _op.get_attr("reduce_func"), "window_size_func",
5898 _op.get_attr("window_size_func"), "Tkey_func_other_arguments",
5899 _op.get_attr("Tkey_func_other_arguments"),
5900 "Treduce_func_other_arguments",
5901 _op.get_attr("Treduce_func_other_arguments"),
5902 "Twindow_size_func_other_arguments",
5903 _op.get_attr("Twindow_size_func_other_arguments"),
5904 "output_types", _op.get_attr("output_types"), "output_shapes",
5905 _op.get_attr("output_shapes"), "metadata",
5906 _op.get_attr("metadata"))
5907 _inputs_flat = _op.inputs
5908 _execute.record_gradient(
5909 "GroupByWindowDataset", _inputs_flat, _attrs, _result)
5910 _result, = _result
5911 return _result
5913GroupByWindowDataset = tf_export("raw_ops.GroupByWindowDataset")(_ops.to_raw_op(group_by_window_dataset))
5916def group_by_window_dataset_eager_fallback(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, metadata, name, ctx):
5917 if not isinstance(output_types, (list, tuple)):
5918 raise TypeError(
5919 "Expected list for 'output_types' argument to "
5920 "'group_by_window_dataset' Op, not %r." % output_types)
5921 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5922 if not isinstance(output_shapes, (list, tuple)):
5923 raise TypeError(
5924 "Expected list for 'output_shapes' argument to "
5925 "'group_by_window_dataset' Op, not %r." % output_shapes)
5926 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5927 if metadata is None:
5928 metadata = ""
5929 metadata = _execute.make_str(metadata, "metadata")
5930 _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, ctx)
5931 _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, ctx)
5932 _attr_Twindow_size_func_other_arguments, window_size_func_other_arguments = _execute.convert_to_mixed_eager_tensors(window_size_func_other_arguments, ctx)
5933 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5934 _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(reduce_func_other_arguments) + list(window_size_func_other_arguments)
5935 _attrs = ("key_func", key_func, "reduce_func", reduce_func,
5936 "window_size_func", window_size_func, "Tkey_func_other_arguments",
5937 _attr_Tkey_func_other_arguments, "Treduce_func_other_arguments",
5938 _attr_Treduce_func_other_arguments, "Twindow_size_func_other_arguments",
5939 _attr_Twindow_size_func_other_arguments, "output_types", output_types,
5940 "output_shapes", output_shapes, "metadata", metadata)
5941 _result = _execute.execute(b"GroupByWindowDataset", 1, inputs=_inputs_flat,
5942 attrs=_attrs, ctx=ctx, name=name)
5943 if _execute.must_record_gradient():
5944 _execute.record_gradient(
5945 "GroupByWindowDataset", _inputs_flat, _attrs, _result)
5946 _result, = _result
5947 return _result
5950def ignore_errors_dataset(input_dataset, output_types, output_shapes, log_warning=False, name=None):
5951 r"""Creates a dataset that contains the elements of `input_dataset` ignoring errors.
5953 Args:
5954 input_dataset: A `Tensor` of type `variant`.
5955 output_types: A list of `tf.DTypes` that has length `>= 1`.
5956 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5957 log_warning: An optional `bool`. Defaults to `False`.
5958 name: A name for the operation (optional).
5960 Returns:
5961 A `Tensor` of type `variant`.
5962 """
5963 _ctx = _context._context or _context.context()
5964 tld = _ctx._thread_local_data
5965 if tld.is_eager:
5966 try:
5967 _result = pywrap_tfe.TFE_Py_FastPathExecute(
5968 _ctx, "IgnoreErrorsDataset", name, input_dataset, "output_types",
5969 output_types, "output_shapes", output_shapes, "log_warning",
5970 log_warning)
5971 return _result
5972 except _core._NotOkStatusException as e:
5973 _ops.raise_from_not_ok_status(e, name)
5974 except _core._FallbackException:
5975 pass
5976 try:
5977 return ignore_errors_dataset_eager_fallback(
5978 input_dataset, output_types=output_types,
5979 output_shapes=output_shapes, log_warning=log_warning, name=name,
5980 ctx=_ctx)
5981 except _core._SymbolicException:
5982 pass # Add nodes to the TensorFlow graph.
5983 # Add nodes to the TensorFlow graph.
5984 if not isinstance(output_types, (list, tuple)):
5985 raise TypeError(
5986 "Expected list for 'output_types' argument to "
5987 "'ignore_errors_dataset' Op, not %r." % output_types)
5988 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5989 if not isinstance(output_shapes, (list, tuple)):
5990 raise TypeError(
5991 "Expected list for 'output_shapes' argument to "
5992 "'ignore_errors_dataset' Op, not %r." % output_shapes)
5993 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5994 if log_warning is None:
5995 log_warning = False
5996 log_warning = _execute.make_bool(log_warning, "log_warning")
5997 _, _, _op, _outputs = _op_def_library._apply_op_helper(
5998 "IgnoreErrorsDataset", input_dataset=input_dataset,
5999 output_types=output_types,
6000 output_shapes=output_shapes,
6001 log_warning=log_warning, name=name)
6002 _result = _outputs[:]
6003 if _execute.must_record_gradient():
6004 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6005 _op.get_attr("output_shapes"), "log_warning",
6006 _op._get_attr_bool("log_warning"))
6007 _inputs_flat = _op.inputs
6008 _execute.record_gradient(
6009 "IgnoreErrorsDataset", _inputs_flat, _attrs, _result)
6010 _result, = _result
6011 return _result
6013IgnoreErrorsDataset = tf_export("raw_ops.IgnoreErrorsDataset")(_ops.to_raw_op(ignore_errors_dataset))
6016def ignore_errors_dataset_eager_fallback(input_dataset, output_types, output_shapes, log_warning, name, ctx):
6017 if not isinstance(output_types, (list, tuple)):
6018 raise TypeError(
6019 "Expected list for 'output_types' argument to "
6020 "'ignore_errors_dataset' Op, not %r." % output_types)
6021 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6022 if not isinstance(output_shapes, (list, tuple)):
6023 raise TypeError(
6024 "Expected list for 'output_shapes' argument to "
6025 "'ignore_errors_dataset' Op, not %r." % output_shapes)
6026 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6027 if log_warning is None:
6028 log_warning = False
6029 log_warning = _execute.make_bool(log_warning, "log_warning")
6030 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6031 _inputs_flat = [input_dataset]
6032 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
6033 "log_warning", log_warning)
6034 _result = _execute.execute(b"IgnoreErrorsDataset", 1, inputs=_inputs_flat,
6035 attrs=_attrs, ctx=ctx, name=name)
6036 if _execute.must_record_gradient():
6037 _execute.record_gradient(
6038 "IgnoreErrorsDataset", _inputs_flat, _attrs, _result)
6039 _result, = _result
6040 return _result
6043def initialize_table_from_dataset(table_handle, dataset, name=None):
6044 r"""TODO: add doc.
6046 Args:
6047 table_handle: A `Tensor` of type `resource`.
6048 dataset: A `Tensor` of type `variant`.
6049 name: A name for the operation (optional).
6051 Returns:
6052 The created Operation.
6053 """
6054 _ctx = _context._context or _context.context()
6055 tld = _ctx._thread_local_data
6056 if tld.is_eager:
6057 try:
6058 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6059 _ctx, "InitializeTableFromDataset", name, table_handle, dataset)
6060 return _result
6061 except _core._NotOkStatusException as e:
6062 _ops.raise_from_not_ok_status(e, name)
6063 except _core._FallbackException:
6064 pass
6065 try:
6066 return initialize_table_from_dataset_eager_fallback(
6067 table_handle, dataset, name=name, ctx=_ctx)
6068 except _core._SymbolicException:
6069 pass # Add nodes to the TensorFlow graph.
6070 # Add nodes to the TensorFlow graph.
6071 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6072 "InitializeTableFromDataset", table_handle=table_handle,
6073 dataset=dataset, name=name)
6074 return _op
6075InitializeTableFromDataset = tf_export("raw_ops.InitializeTableFromDataset")(_ops.to_raw_op(initialize_table_from_dataset))
6078def initialize_table_from_dataset_eager_fallback(table_handle, dataset, name, ctx):
6079 table_handle = _ops.convert_to_tensor(table_handle, _dtypes.resource)
6080 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
6081 _inputs_flat = [table_handle, dataset]
6082 _attrs = None
6083 _result = _execute.execute(b"InitializeTableFromDataset", 0,
6084 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
6085 name=name)
6086 _result = None
6087 return _result
6090def iterator_get_device(resource, name=None):
6091 r"""Returns the name of the device on which `resource` has been placed.
6093 Args:
6094 resource: A `Tensor` of type `resource`.
6095 name: A name for the operation (optional).
6097 Returns:
6098 A `Tensor` of type `string`.
6099 """
6100 _ctx = _context._context or _context.context()
6101 tld = _ctx._thread_local_data
6102 if tld.is_eager:
6103 try:
6104 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6105 _ctx, "IteratorGetDevice", name, resource)
6106 return _result
6107 except _core._NotOkStatusException as e:
6108 _ops.raise_from_not_ok_status(e, name)
6109 except _core._FallbackException:
6110 pass
6111 try:
6112 return iterator_get_device_eager_fallback(
6113 resource, name=name, ctx=_ctx)
6114 except _core._SymbolicException:
6115 pass # Add nodes to the TensorFlow graph.
6116 # Add nodes to the TensorFlow graph.
6117 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6118 "IteratorGetDevice", resource=resource, name=name)
6119 _result = _outputs[:]
6120 if _execute.must_record_gradient():
6121 _attrs = ()
6122 _inputs_flat = _op.inputs
6123 _execute.record_gradient(
6124 "IteratorGetDevice", _inputs_flat, _attrs, _result)
6125 _result, = _result
6126 return _result
6128IteratorGetDevice = tf_export("raw_ops.IteratorGetDevice")(_ops.to_raw_op(iterator_get_device))
6131def iterator_get_device_eager_fallback(resource, name, ctx):
6132 resource = _ops.convert_to_tensor(resource, _dtypes.resource)
6133 _inputs_flat = [resource]
6134 _attrs = None
6135 _result = _execute.execute(b"IteratorGetDevice", 1, inputs=_inputs_flat,
6136 attrs=_attrs, ctx=ctx, name=name)
6137 if _execute.must_record_gradient():
6138 _execute.record_gradient(
6139 "IteratorGetDevice", _inputs_flat, _attrs, _result)
6140 _result, = _result
6141 return _result
6144def lmdb_dataset(filenames, output_types, output_shapes, name=None):
6145 r"""Creates a dataset that emits the key-value pairs in one or more LMDB files.
6147 The Lightning Memory-Mapped Database Manager, or LMDB, is an embedded binary
6148 key-value database. This dataset can read the contents of LMDB database files,
6149 the names of which generally have the `.mdb` suffix.
6151 Each output element consists of a key-value pair represented as a pair of
6152 scalar string `Tensor`s, where the first `Tensor` contains the key and the
6153 second `Tensor` contains the value.
6155 LMDB uses different file formats on big- and little-endian machines.
6156 `LMDBDataset` can only read files in the format of the host machine.
6158 Args:
6159 filenames: A `Tensor` of type `string`.
6160 A scalar or a vector containing the name(s) of the binary file(s) to be
6161 read.
6162 output_types: A list of `tf.DTypes` that has length `>= 1`.
6163 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6164 name: A name for the operation (optional).
6166 Returns:
6167 A `Tensor` of type `variant`.
6168 """
6169 _ctx = _context._context or _context.context()
6170 tld = _ctx._thread_local_data
6171 if tld.is_eager:
6172 try:
6173 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6174 _ctx, "LMDBDataset", name, filenames, "output_types", output_types,
6175 "output_shapes", output_shapes)
6176 return _result
6177 except _core._NotOkStatusException as e:
6178 _ops.raise_from_not_ok_status(e, name)
6179 except _core._FallbackException:
6180 pass
6181 try:
6182 return lmdb_dataset_eager_fallback(
6183 filenames, output_types=output_types, output_shapes=output_shapes,
6184 name=name, ctx=_ctx)
6185 except _core._SymbolicException:
6186 pass # Add nodes to the TensorFlow graph.
6187 # Add nodes to the TensorFlow graph.
6188 if not isinstance(output_types, (list, tuple)):
6189 raise TypeError(
6190 "Expected list for 'output_types' argument to "
6191 "'lmdb_dataset' Op, not %r." % output_types)
6192 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6193 if not isinstance(output_shapes, (list, tuple)):
6194 raise TypeError(
6195 "Expected list for 'output_shapes' argument to "
6196 "'lmdb_dataset' Op, not %r." % output_shapes)
6197 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6198 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6199 "LMDBDataset", filenames=filenames, output_types=output_types,
6200 output_shapes=output_shapes, name=name)
6201 _result = _outputs[:]
6202 if _execute.must_record_gradient():
6203 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6204 _op.get_attr("output_shapes"))
6205 _inputs_flat = _op.inputs
6206 _execute.record_gradient(
6207 "LMDBDataset", _inputs_flat, _attrs, _result)
6208 _result, = _result
6209 return _result
6211LMDBDataset = tf_export("raw_ops.LMDBDataset")(_ops.to_raw_op(lmdb_dataset))
6214def lmdb_dataset_eager_fallback(filenames, output_types, output_shapes, name, ctx):
6215 if not isinstance(output_types, (list, tuple)):
6216 raise TypeError(
6217 "Expected list for 'output_types' argument to "
6218 "'lmdb_dataset' Op, not %r." % output_types)
6219 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6220 if not isinstance(output_shapes, (list, tuple)):
6221 raise TypeError(
6222 "Expected list for 'output_shapes' argument to "
6223 "'lmdb_dataset' Op, not %r." % output_shapes)
6224 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6225 filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
6226 _inputs_flat = [filenames]
6227 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6228 _result = _execute.execute(b"LMDBDataset", 1, inputs=_inputs_flat,
6229 attrs=_attrs, ctx=ctx, name=name)
6230 if _execute.must_record_gradient():
6231 _execute.record_gradient(
6232 "LMDBDataset", _inputs_flat, _attrs, _result)
6233 _result, = _result
6234 return _result
6237def latency_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
6238 r"""Records the latency of producing `input_dataset` elements in a StatsAggregator.
6240 Args:
6241 input_dataset: A `Tensor` of type `variant`.
6242 tag: A `Tensor` of type `string`.
6243 output_types: A list of `tf.DTypes` that has length `>= 1`.
6244 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6245 name: A name for the operation (optional).
6247 Returns:
6248 A `Tensor` of type `variant`.
6249 """
6250 _ctx = _context._context or _context.context()
6251 tld = _ctx._thread_local_data
6252 if tld.is_eager:
6253 try:
6254 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6255 _ctx, "LatencyStatsDataset", name, input_dataset, tag, "output_types",
6256 output_types, "output_shapes", output_shapes)
6257 return _result
6258 except _core._NotOkStatusException as e:
6259 _ops.raise_from_not_ok_status(e, name)
6260 except _core._FallbackException:
6261 pass
6262 try:
6263 return latency_stats_dataset_eager_fallback(
6264 input_dataset, tag, output_types=output_types,
6265 output_shapes=output_shapes, name=name, ctx=_ctx)
6266 except _core._SymbolicException:
6267 pass # Add nodes to the TensorFlow graph.
6268 # Add nodes to the TensorFlow graph.
6269 if not isinstance(output_types, (list, tuple)):
6270 raise TypeError(
6271 "Expected list for 'output_types' argument to "
6272 "'latency_stats_dataset' Op, not %r." % output_types)
6273 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6274 if not isinstance(output_shapes, (list, tuple)):
6275 raise TypeError(
6276 "Expected list for 'output_shapes' argument to "
6277 "'latency_stats_dataset' Op, not %r." % output_shapes)
6278 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6279 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6280 "LatencyStatsDataset", input_dataset=input_dataset, tag=tag,
6281 output_types=output_types,
6282 output_shapes=output_shapes, name=name)
6283 _result = _outputs[:]
6284 if _execute.must_record_gradient():
6285 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6286 _op.get_attr("output_shapes"))
6287 _inputs_flat = _op.inputs
6288 _execute.record_gradient(
6289 "LatencyStatsDataset", _inputs_flat, _attrs, _result)
6290 _result, = _result
6291 return _result
6293LatencyStatsDataset = tf_export("raw_ops.LatencyStatsDataset")(_ops.to_raw_op(latency_stats_dataset))
6296def latency_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name, ctx):
6297 if not isinstance(output_types, (list, tuple)):
6298 raise TypeError(
6299 "Expected list for 'output_types' argument to "
6300 "'latency_stats_dataset' Op, not %r." % output_types)
6301 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6302 if not isinstance(output_shapes, (list, tuple)):
6303 raise TypeError(
6304 "Expected list for 'output_shapes' argument to "
6305 "'latency_stats_dataset' Op, not %r." % output_shapes)
6306 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6307 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6308 tag = _ops.convert_to_tensor(tag, _dtypes.string)
6309 _inputs_flat = [input_dataset, tag]
6310 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6311 _result = _execute.execute(b"LatencyStatsDataset", 1, inputs=_inputs_flat,
6312 attrs=_attrs, ctx=ctx, name=name)
6313 if _execute.must_record_gradient():
6314 _execute.record_gradient(
6315 "LatencyStatsDataset", _inputs_flat, _attrs, _result)
6316 _result, = _result
6317 return _result
6320def legacy_parallel_interleave_dataset_v2(input_dataset, other_arguments, cycle_length, block_length, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, deterministic="default", metadata="", name=None):
6321 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
6323 The resulting dataset is similar to the `InterleaveDataset`, with the exception
6324 that if retrieving the next value from a dataset would cause the requester to
6325 block, it will skip that input dataset. This dataset is especially useful
6326 when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
6327 allows the training step to proceed so long as some data is available.
6329 !! WARNING !! This dataset is not deterministic!
6331 Args:
6332 input_dataset: A `Tensor` of type `variant`.
6333 other_arguments: A list of `Tensor` objects.
6334 cycle_length: A `Tensor` of type `int64`.
6335 block_length: A `Tensor` of type `int64`.
6336 buffer_output_elements: A `Tensor` of type `int64`.
6337 prefetch_input_elements: A `Tensor` of type `int64`.
6338 f: A function decorated with @Defun.
6339 A function mapping elements of `input_dataset`, concatenated with
6340 `other_arguments`, to a Dataset variant that contains elements matching
6341 `output_types` and `output_shapes`.
6342 output_types: A list of `tf.DTypes` that has length `>= 1`.
6343 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6344 deterministic: An optional `string`. Defaults to `"default"`.
6345 metadata: An optional `string`. Defaults to `""`.
6346 name: A name for the operation (optional).
6348 Returns:
6349 A `Tensor` of type `variant`.
6350 """
6351 _ctx = _context._context or _context.context()
6352 tld = _ctx._thread_local_data
6353 if tld.is_eager:
6354 try:
6355 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6356 _ctx, "LegacyParallelInterleaveDatasetV2", name, input_dataset,
6357 other_arguments, cycle_length, block_length, buffer_output_elements,
6358 prefetch_input_elements, "f", f, "deterministic", deterministic,
6359 "output_types", output_types, "output_shapes", output_shapes,
6360 "metadata", metadata)
6361 return _result
6362 except _core._NotOkStatusException as e:
6363 _ops.raise_from_not_ok_status(e, name)
6364 except _core._FallbackException:
6365 pass
6366 try:
6367 return legacy_parallel_interleave_dataset_v2_eager_fallback(
6368 input_dataset, other_arguments, cycle_length, block_length,
6369 buffer_output_elements, prefetch_input_elements, f=f,
6370 deterministic=deterministic, output_types=output_types,
6371 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx)
6372 except _core._SymbolicException:
6373 pass # Add nodes to the TensorFlow graph.
6374 # Add nodes to the TensorFlow graph.
6375 if not isinstance(output_types, (list, tuple)):
6376 raise TypeError(
6377 "Expected list for 'output_types' argument to "
6378 "'legacy_parallel_interleave_dataset_v2' Op, not %r." % output_types)
6379 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6380 if not isinstance(output_shapes, (list, tuple)):
6381 raise TypeError(
6382 "Expected list for 'output_shapes' argument to "
6383 "'legacy_parallel_interleave_dataset_v2' Op, not %r." % output_shapes)
6384 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6385 if deterministic is None:
6386 deterministic = "default"
6387 deterministic = _execute.make_str(deterministic, "deterministic")
6388 if metadata is None:
6389 metadata = ""
6390 metadata = _execute.make_str(metadata, "metadata")
6391 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6392 "LegacyParallelInterleaveDatasetV2", input_dataset=input_dataset,
6393 other_arguments=other_arguments,
6394 cycle_length=cycle_length,
6395 block_length=block_length,
6396 buffer_output_elements=buffer_output_elements,
6397 prefetch_input_elements=prefetch_input_elements,
6398 f=f, output_types=output_types,
6399 output_shapes=output_shapes,
6400 deterministic=deterministic,
6401 metadata=metadata, name=name)
6402 _result = _outputs[:]
6403 if _execute.must_record_gradient():
6404 _attrs = ("f", _op.get_attr("f"), "deterministic",
6405 _op.get_attr("deterministic"), "Targuments",
6406 _op.get_attr("Targuments"), "output_types",
6407 _op.get_attr("output_types"), "output_shapes",
6408 _op.get_attr("output_shapes"), "metadata",
6409 _op.get_attr("metadata"))
6410 _inputs_flat = _op.inputs
6411 _execute.record_gradient(
6412 "LegacyParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result)
6413 _result, = _result
6414 return _result
6416LegacyParallelInterleaveDatasetV2 = tf_export("raw_ops.LegacyParallelInterleaveDatasetV2")(_ops.to_raw_op(legacy_parallel_interleave_dataset_v2))
6419def legacy_parallel_interleave_dataset_v2_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, deterministic, metadata, name, ctx):
6420 if not isinstance(output_types, (list, tuple)):
6421 raise TypeError(
6422 "Expected list for 'output_types' argument to "
6423 "'legacy_parallel_interleave_dataset_v2' Op, not %r." % output_types)
6424 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6425 if not isinstance(output_shapes, (list, tuple)):
6426 raise TypeError(
6427 "Expected list for 'output_shapes' argument to "
6428 "'legacy_parallel_interleave_dataset_v2' Op, not %r." % output_shapes)
6429 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6430 if deterministic is None:
6431 deterministic = "default"
6432 deterministic = _execute.make_str(deterministic, "deterministic")
6433 if metadata is None:
6434 metadata = ""
6435 metadata = _execute.make_str(metadata, "metadata")
6436 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
6437 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6438 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
6439 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
6440 buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64)
6441 prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64)
6442 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, buffer_output_elements, prefetch_input_elements]
6443 _attrs = ("f", f, "deterministic", deterministic, "Targuments",
6444 _attr_Targuments, "output_types", output_types, "output_shapes",
6445 output_shapes, "metadata", metadata)
6446 _result = _execute.execute(b"LegacyParallelInterleaveDatasetV2", 1,
6447 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
6448 name=name)
6449 if _execute.must_record_gradient():
6450 _execute.record_gradient(
6451 "LegacyParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result)
6452 _result, = _result
6453 return _result
6456def list_dataset(tensors, output_types, output_shapes, metadata="", name=None):
6457 r"""Creates a dataset that emits each of `tensors` once.
6459 Args:
6460 tensors: A list of `Tensor` objects.
6461 output_types: A list of `tf.DTypes` that has length `>= 1`.
6462 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6463 metadata: An optional `string`. Defaults to `""`.
6464 name: A name for the operation (optional).
6466 Returns:
6467 A `Tensor` of type `variant`.
6468 """
6469 _ctx = _context._context or _context.context()
6470 tld = _ctx._thread_local_data
6471 if tld.is_eager:
6472 try:
6473 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6474 _ctx, "ListDataset", name, tensors, "output_types", output_types,
6475 "output_shapes", output_shapes, "metadata", metadata)
6476 return _result
6477 except _core._NotOkStatusException as e:
6478 _ops.raise_from_not_ok_status(e, name)
6479 except _core._FallbackException:
6480 pass
6481 try:
6482 return list_dataset_eager_fallback(
6483 tensors, output_types=output_types, output_shapes=output_shapes,
6484 metadata=metadata, name=name, ctx=_ctx)
6485 except _core._SymbolicException:
6486 pass # Add nodes to the TensorFlow graph.
6487 # Add nodes to the TensorFlow graph.
6488 if not isinstance(output_types, (list, tuple)):
6489 raise TypeError(
6490 "Expected list for 'output_types' argument to "
6491 "'list_dataset' Op, not %r." % output_types)
6492 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6493 if not isinstance(output_shapes, (list, tuple)):
6494 raise TypeError(
6495 "Expected list for 'output_shapes' argument to "
6496 "'list_dataset' Op, not %r." % output_shapes)
6497 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6498 if metadata is None:
6499 metadata = ""
6500 metadata = _execute.make_str(metadata, "metadata")
6501 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6502 "ListDataset", tensors=tensors, output_types=output_types,
6503 output_shapes=output_shapes, metadata=metadata,
6504 name=name)
6505 _result = _outputs[:]
6506 if _execute.must_record_gradient():
6507 _attrs = ("Tinput_types", _op.get_attr("Tinput_types"), "output_types",
6508 _op.get_attr("output_types"), "output_shapes",
6509 _op.get_attr("output_shapes"), "metadata",
6510 _op.get_attr("metadata"))
6511 _inputs_flat = _op.inputs
6512 _execute.record_gradient(
6513 "ListDataset", _inputs_flat, _attrs, _result)
6514 _result, = _result
6515 return _result
6517ListDataset = tf_export("raw_ops.ListDataset")(_ops.to_raw_op(list_dataset))
6520def list_dataset_eager_fallback(tensors, output_types, output_shapes, metadata, name, ctx):
6521 if not isinstance(output_types, (list, tuple)):
6522 raise TypeError(
6523 "Expected list for 'output_types' argument to "
6524 "'list_dataset' Op, not %r." % output_types)
6525 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6526 if not isinstance(output_shapes, (list, tuple)):
6527 raise TypeError(
6528 "Expected list for 'output_shapes' argument to "
6529 "'list_dataset' Op, not %r." % output_shapes)
6530 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6531 if metadata is None:
6532 metadata = ""
6533 metadata = _execute.make_str(metadata, "metadata")
6534 _attr_Tinput_types, tensors = _execute.convert_to_mixed_eager_tensors(tensors, ctx)
6535 _inputs_flat = list(tensors)
6536 _attrs = ("Tinput_types", _attr_Tinput_types, "output_types", output_types,
6537 "output_shapes", output_shapes, "metadata", metadata)
6538 _result = _execute.execute(b"ListDataset", 1, inputs=_inputs_flat,
6539 attrs=_attrs, ctx=ctx, name=name)
6540 if _execute.must_record_gradient():
6541 _execute.record_gradient(
6542 "ListDataset", _inputs_flat, _attrs, _result)
6543 _result, = _result
6544 return _result
6547def load_dataset(path, reader_func_other_args, output_types, output_shapes, reader_func, compression="", name=None):
6548 r"""TODO: add doc.
6550 Args:
6551 path: A `Tensor` of type `string`.
6552 reader_func_other_args: A list of `Tensor` objects.
6553 output_types: A list of `tf.DTypes` that has length `>= 1`.
6554 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6555 reader_func: A function decorated with @Defun.
6556 compression: An optional `string`. Defaults to `""`.
6557 name: A name for the operation (optional).
6559 Returns:
6560 A `Tensor` of type `variant`.
6561 """
6562 _ctx = _context._context or _context.context()
6563 tld = _ctx._thread_local_data
6564 if tld.is_eager:
6565 try:
6566 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6567 _ctx, "LoadDataset", name, path, reader_func_other_args,
6568 "output_types", output_types, "output_shapes", output_shapes,
6569 "compression", compression, "reader_func", reader_func)
6570 return _result
6571 except _core._NotOkStatusException as e:
6572 _ops.raise_from_not_ok_status(e, name)
6573 except _core._FallbackException:
6574 pass
6575 try:
6576 return load_dataset_eager_fallback(
6577 path, reader_func_other_args, output_types=output_types,
6578 output_shapes=output_shapes, compression=compression,
6579 reader_func=reader_func, name=name, ctx=_ctx)
6580 except _core._SymbolicException:
6581 pass # Add nodes to the TensorFlow graph.
6582 # Add nodes to the TensorFlow graph.
6583 if not isinstance(output_types, (list, tuple)):
6584 raise TypeError(
6585 "Expected list for 'output_types' argument to "
6586 "'load_dataset' Op, not %r." % output_types)
6587 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6588 if not isinstance(output_shapes, (list, tuple)):
6589 raise TypeError(
6590 "Expected list for 'output_shapes' argument to "
6591 "'load_dataset' Op, not %r." % output_shapes)
6592 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6593 if compression is None:
6594 compression = ""
6595 compression = _execute.make_str(compression, "compression")
6596 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6597 "LoadDataset", path=path,
6598 reader_func_other_args=reader_func_other_args,
6599 output_types=output_types, output_shapes=output_shapes,
6600 reader_func=reader_func, compression=compression,
6601 name=name)
6602 _result = _outputs[:]
6603 if _execute.must_record_gradient():
6604 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6605 _op.get_attr("output_shapes"), "compression",
6606 _op.get_attr("compression"), "reader_func",
6607 _op.get_attr("reader_func"), "Treader_func_args",
6608 _op.get_attr("Treader_func_args"))
6609 _inputs_flat = _op.inputs
6610 _execute.record_gradient(
6611 "LoadDataset", _inputs_flat, _attrs, _result)
6612 _result, = _result
6613 return _result
6615LoadDataset = tf_export("raw_ops.LoadDataset")(_ops.to_raw_op(load_dataset))
6618def load_dataset_eager_fallback(path, reader_func_other_args, output_types, output_shapes, reader_func, compression, name, ctx):
6619 if not isinstance(output_types, (list, tuple)):
6620 raise TypeError(
6621 "Expected list for 'output_types' argument to "
6622 "'load_dataset' Op, not %r." % output_types)
6623 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6624 if not isinstance(output_shapes, (list, tuple)):
6625 raise TypeError(
6626 "Expected list for 'output_shapes' argument to "
6627 "'load_dataset' Op, not %r." % output_shapes)
6628 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6629 if compression is None:
6630 compression = ""
6631 compression = _execute.make_str(compression, "compression")
6632 _attr_Treader_func_args, reader_func_other_args = _execute.convert_to_mixed_eager_tensors(reader_func_other_args, ctx)
6633 path = _ops.convert_to_tensor(path, _dtypes.string)
6634 _inputs_flat = [path] + list(reader_func_other_args)
6635 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
6636 "compression", compression, "reader_func", reader_func, "Treader_func_args",
6637 _attr_Treader_func_args)
6638 _result = _execute.execute(b"LoadDataset", 1, inputs=_inputs_flat,
6639 attrs=_attrs, ctx=ctx, name=name)
6640 if _execute.must_record_gradient():
6641 _execute.record_gradient(
6642 "LoadDataset", _inputs_flat, _attrs, _result)
6643 _result, = _result
6644 return _result
6647def map_and_batch_dataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, metadata="", name=None):
6648 r"""Creates a dataset that fuses mapping with batching.
6650 Creates a dataset that applies `f` to the outputs of `input_dataset` and then
6651 batches `batch_size` of them.
6653 Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
6654 to `batch_size * num_parallel_batches` copies of `f` in parallel.
6656 Args:
6657 input_dataset: A `Tensor` of type `variant`.
6658 A variant tensor representing the input dataset.
6659 other_arguments: A list of `Tensor` objects.
6660 A list of tensors, typically values that were captured when building a closure
6661 for `f`.
6662 batch_size: A `Tensor` of type `int64`.
6663 A scalar representing the number of elements to accumulate in a
6664 batch. It determines the number of concurrent invocations of `f` that process
6665 elements from `input_dataset` in parallel.
6666 num_parallel_calls: A `Tensor` of type `int64`.
6667 A scalar representing the maximum number of parallel invocations of the `map_fn`
6668 function. Applying the `map_fn` on consecutive input elements in parallel has
6669 the potential to improve input pipeline throughput.
6670 drop_remainder: A `Tensor` of type `bool`.
6671 A scalar representing whether the last batch should be dropped in case its size
6672 is smaller than desired.
6673 f: A function decorated with @Defun.
6674 A function to apply to the outputs of `input_dataset`.
6675 output_types: A list of `tf.DTypes` that has length `>= 1`.
6676 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6677 preserve_cardinality: An optional `bool`. Defaults to `False`.
6678 metadata: An optional `string`. Defaults to `""`.
6679 name: A name for the operation (optional).
6681 Returns:
6682 A `Tensor` of type `variant`.
6683 """
6684 _ctx = _context._context or _context.context()
6685 tld = _ctx._thread_local_data
6686 if tld.is_eager:
6687 try:
6688 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6689 _ctx, "MapAndBatchDataset", name, input_dataset, other_arguments,
6690 batch_size, num_parallel_calls, drop_remainder, "f", f,
6691 "output_types", output_types, "output_shapes", output_shapes,
6692 "preserve_cardinality", preserve_cardinality, "metadata", metadata)
6693 return _result
6694 except _core._NotOkStatusException as e:
6695 _ops.raise_from_not_ok_status(e, name)
6696 except _core._FallbackException:
6697 pass
6698 try:
6699 return map_and_batch_dataset_eager_fallback(
6700 input_dataset, other_arguments, batch_size, num_parallel_calls,
6701 drop_remainder, f=f, output_types=output_types,
6702 output_shapes=output_shapes,
6703 preserve_cardinality=preserve_cardinality, metadata=metadata,
6704 name=name, ctx=_ctx)
6705 except _core._SymbolicException:
6706 pass # Add nodes to the TensorFlow graph.
6707 # Add nodes to the TensorFlow graph.
6708 if not isinstance(output_types, (list, tuple)):
6709 raise TypeError(
6710 "Expected list for 'output_types' argument to "
6711 "'map_and_batch_dataset' Op, not %r." % output_types)
6712 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6713 if not isinstance(output_shapes, (list, tuple)):
6714 raise TypeError(
6715 "Expected list for 'output_shapes' argument to "
6716 "'map_and_batch_dataset' Op, not %r." % output_shapes)
6717 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6718 if preserve_cardinality is None:
6719 preserve_cardinality = False
6720 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
6721 if metadata is None:
6722 metadata = ""
6723 metadata = _execute.make_str(metadata, "metadata")
6724 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6725 "MapAndBatchDataset", input_dataset=input_dataset,
6726 other_arguments=other_arguments,
6727 batch_size=batch_size,
6728 num_parallel_calls=num_parallel_calls,
6729 drop_remainder=drop_remainder, f=f,
6730 output_types=output_types,
6731 output_shapes=output_shapes,
6732 preserve_cardinality=preserve_cardinality,
6733 metadata=metadata, name=name)
6734 _result = _outputs[:]
6735 if _execute.must_record_gradient():
6736 _attrs = ("f", _op.get_attr("f"), "Targuments",
6737 _op.get_attr("Targuments"), "output_types",
6738 _op.get_attr("output_types"), "output_shapes",
6739 _op.get_attr("output_shapes"), "preserve_cardinality",
6740 _op._get_attr_bool("preserve_cardinality"), "metadata",
6741 _op.get_attr("metadata"))
6742 _inputs_flat = _op.inputs
6743 _execute.record_gradient(
6744 "MapAndBatchDataset", _inputs_flat, _attrs, _result)
6745 _result, = _result
6746 return _result
6748MapAndBatchDataset = tf_export("raw_ops.MapAndBatchDataset")(_ops.to_raw_op(map_and_batch_dataset))
6751def map_and_batch_dataset_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality, metadata, name, ctx):
6752 if not isinstance(output_types, (list, tuple)):
6753 raise TypeError(
6754 "Expected list for 'output_types' argument to "
6755 "'map_and_batch_dataset' Op, not %r." % output_types)
6756 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6757 if not isinstance(output_shapes, (list, tuple)):
6758 raise TypeError(
6759 "Expected list for 'output_shapes' argument to "
6760 "'map_and_batch_dataset' Op, not %r." % output_shapes)
6761 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6762 if preserve_cardinality is None:
6763 preserve_cardinality = False
6764 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
6765 if metadata is None:
6766 metadata = ""
6767 metadata = _execute.make_str(metadata, "metadata")
6768 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
6769 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6770 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
6771 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
6772 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
6773 _inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_calls, drop_remainder]
6774 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
6775 output_types, "output_shapes", output_shapes, "preserve_cardinality",
6776 preserve_cardinality, "metadata", metadata)
6777 _result = _execute.execute(b"MapAndBatchDataset", 1, inputs=_inputs_flat,
6778 attrs=_attrs, ctx=ctx, name=name)
6779 if _execute.must_record_gradient():
6780 _execute.record_gradient(
6781 "MapAndBatchDataset", _inputs_flat, _attrs, _result)
6782 _result, = _result
6783 return _result
6786def matching_files_dataset(patterns, name=None):
6787 r"""TODO: add doc.
6789 Args:
6790 patterns: A `Tensor` of type `string`.
6791 name: A name for the operation (optional).
6793 Returns:
6794 A `Tensor` of type `variant`.
6795 """
6796 _ctx = _context._context or _context.context()
6797 tld = _ctx._thread_local_data
6798 if tld.is_eager:
6799 try:
6800 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6801 _ctx, "MatchingFilesDataset", name, patterns)
6802 return _result
6803 except _core._NotOkStatusException as e:
6804 _ops.raise_from_not_ok_status(e, name)
6805 except _core._FallbackException:
6806 pass
6807 try:
6808 return matching_files_dataset_eager_fallback(
6809 patterns, name=name, ctx=_ctx)
6810 except _core._SymbolicException:
6811 pass # Add nodes to the TensorFlow graph.
6812 # Add nodes to the TensorFlow graph.
6813 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6814 "MatchingFilesDataset", patterns=patterns, name=name)
6815 _result = _outputs[:]
6816 if _execute.must_record_gradient():
6817 _attrs = ()
6818 _inputs_flat = _op.inputs
6819 _execute.record_gradient(
6820 "MatchingFilesDataset", _inputs_flat, _attrs, _result)
6821 _result, = _result
6822 return _result
6824MatchingFilesDataset = tf_export("raw_ops.MatchingFilesDataset")(_ops.to_raw_op(matching_files_dataset))
6827def matching_files_dataset_eager_fallback(patterns, name, ctx):
6828 patterns = _ops.convert_to_tensor(patterns, _dtypes.string)
6829 _inputs_flat = [patterns]
6830 _attrs = None
6831 _result = _execute.execute(b"MatchingFilesDataset", 1, inputs=_inputs_flat,
6832 attrs=_attrs, ctx=ctx, name=name)
6833 if _execute.must_record_gradient():
6834 _execute.record_gradient(
6835 "MatchingFilesDataset", _inputs_flat, _attrs, _result)
6836 _result, = _result
6837 return _result
6840def max_intra_op_parallelism_dataset(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name=None):
6841 r"""Creates a dataset that overrides the maximum intra-op parallelism.
6843 Args:
6844 input_dataset: A `Tensor` of type `variant`.
6845 max_intra_op_parallelism: A `Tensor` of type `int64`.
6846 Identifies the maximum intra-op parallelism to use.
6847 output_types: A list of `tf.DTypes` that has length `>= 1`.
6848 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6849 name: A name for the operation (optional).
6851 Returns:
6852 A `Tensor` of type `variant`.
6853 """
6854 _ctx = _context._context or _context.context()
6855 tld = _ctx._thread_local_data
6856 if tld.is_eager:
6857 try:
6858 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6859 _ctx, "MaxIntraOpParallelismDataset", name, input_dataset,
6860 max_intra_op_parallelism, "output_types", output_types,
6861 "output_shapes", output_shapes)
6862 return _result
6863 except _core._NotOkStatusException as e:
6864 _ops.raise_from_not_ok_status(e, name)
6865 except _core._FallbackException:
6866 pass
6867 try:
6868 return max_intra_op_parallelism_dataset_eager_fallback(
6869 input_dataset, max_intra_op_parallelism, output_types=output_types,
6870 output_shapes=output_shapes, name=name, ctx=_ctx)
6871 except _core._SymbolicException:
6872 pass # Add nodes to the TensorFlow graph.
6873 # Add nodes to the TensorFlow graph.
6874 if not isinstance(output_types, (list, tuple)):
6875 raise TypeError(
6876 "Expected list for 'output_types' argument to "
6877 "'max_intra_op_parallelism_dataset' Op, not %r." % output_types)
6878 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6879 if not isinstance(output_shapes, (list, tuple)):
6880 raise TypeError(
6881 "Expected list for 'output_shapes' argument to "
6882 "'max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
6883 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6884 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6885 "MaxIntraOpParallelismDataset", input_dataset=input_dataset,
6886 max_intra_op_parallelism=max_intra_op_parallelism,
6887 output_types=output_types,
6888 output_shapes=output_shapes,
6889 name=name)
6890 _result = _outputs[:]
6891 if _execute.must_record_gradient():
6892 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6893 _op.get_attr("output_shapes"))
6894 _inputs_flat = _op.inputs
6895 _execute.record_gradient(
6896 "MaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result)
6897 _result, = _result
6898 return _result
6900MaxIntraOpParallelismDataset = tf_export("raw_ops.MaxIntraOpParallelismDataset")(_ops.to_raw_op(max_intra_op_parallelism_dataset))
6903def max_intra_op_parallelism_dataset_eager_fallback(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name, ctx):
6904 if not isinstance(output_types, (list, tuple)):
6905 raise TypeError(
6906 "Expected list for 'output_types' argument to "
6907 "'max_intra_op_parallelism_dataset' Op, not %r." % output_types)
6908 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6909 if not isinstance(output_shapes, (list, tuple)):
6910 raise TypeError(
6911 "Expected list for 'output_shapes' argument to "
6912 "'max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
6913 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6914 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6915 max_intra_op_parallelism = _ops.convert_to_tensor(max_intra_op_parallelism, _dtypes.int64)
6916 _inputs_flat = [input_dataset, max_intra_op_parallelism]
6917 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6918 _result = _execute.execute(b"MaxIntraOpParallelismDataset", 1,
6919 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
6920 name=name)
6921 if _execute.must_record_gradient():
6922 _execute.record_gradient(
6923 "MaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result)
6924 _result, = _result
6925 return _result
6928def non_serializable_dataset(input_dataset, output_types, output_shapes, name=None):
6929 r"""TODO: add doc.
6931 Args:
6932 input_dataset: A `Tensor` of type `variant`.
6933 output_types: A list of `tf.DTypes` that has length `>= 1`.
6934 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6935 name: A name for the operation (optional).
6937 Returns:
6938 A `Tensor` of type `variant`.
6939 """
6940 _ctx = _context._context or _context.context()
6941 tld = _ctx._thread_local_data
6942 if tld.is_eager:
6943 try:
6944 _result = pywrap_tfe.TFE_Py_FastPathExecute(
6945 _ctx, "NonSerializableDataset", name, input_dataset, "output_types",
6946 output_types, "output_shapes", output_shapes)
6947 return _result
6948 except _core._NotOkStatusException as e:
6949 _ops.raise_from_not_ok_status(e, name)
6950 except _core._FallbackException:
6951 pass
6952 try:
6953 return non_serializable_dataset_eager_fallback(
6954 input_dataset, output_types=output_types,
6955 output_shapes=output_shapes, name=name, ctx=_ctx)
6956 except _core._SymbolicException:
6957 pass # Add nodes to the TensorFlow graph.
6958 # Add nodes to the TensorFlow graph.
6959 if not isinstance(output_types, (list, tuple)):
6960 raise TypeError(
6961 "Expected list for 'output_types' argument to "
6962 "'non_serializable_dataset' Op, not %r." % output_types)
6963 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6964 if not isinstance(output_shapes, (list, tuple)):
6965 raise TypeError(
6966 "Expected list for 'output_shapes' argument to "
6967 "'non_serializable_dataset' Op, not %r." % output_shapes)
6968 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6969 _, _, _op, _outputs = _op_def_library._apply_op_helper(
6970 "NonSerializableDataset", input_dataset=input_dataset,
6971 output_types=output_types,
6972 output_shapes=output_shapes, name=name)
6973 _result = _outputs[:]
6974 if _execute.must_record_gradient():
6975 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6976 _op.get_attr("output_shapes"))
6977 _inputs_flat = _op.inputs
6978 _execute.record_gradient(
6979 "NonSerializableDataset", _inputs_flat, _attrs, _result)
6980 _result, = _result
6981 return _result
6983NonSerializableDataset = tf_export("raw_ops.NonSerializableDataset")(_ops.to_raw_op(non_serializable_dataset))
6986def non_serializable_dataset_eager_fallback(input_dataset, output_types, output_shapes, name, ctx):
6987 if not isinstance(output_types, (list, tuple)):
6988 raise TypeError(
6989 "Expected list for 'output_types' argument to "
6990 "'non_serializable_dataset' Op, not %r." % output_types)
6991 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6992 if not isinstance(output_shapes, (list, tuple)):
6993 raise TypeError(
6994 "Expected list for 'output_shapes' argument to "
6995 "'non_serializable_dataset' Op, not %r." % output_shapes)
6996 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6997 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6998 _inputs_flat = [input_dataset]
6999 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
7000 _result = _execute.execute(b"NonSerializableDataset", 1,
7001 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
7002 name=name)
7003 if _execute.must_record_gradient():
7004 _execute.record_gradient(
7005 "NonSerializableDataset", _inputs_flat, _attrs, _result)
7006 _result, = _result
7007 return _result
7010def parallel_interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, metadata="", name=None):
7011 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
7013 The resulting dataset is similar to the `InterleaveDataset`, with the exception
7014 that if retrieving the next value from a dataset would cause the requester to
7015 block, it will skip that input dataset. This dataset is especially useful
7016 when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
7017 allows the training step to proceed so long as some data is available.
7019 !! WARNING !! If the `sloppy` parameter is set to `True`, the operation of this
7020 dataset will not be deterministic!
7022 This dataset has been superseded by `ParallelInterleaveDatasetV2`. New code
7023 should use `ParallelInterleaveDatasetV2`.
7025 The Python API `tf.data.experimental.parallel_interleave` creates instances of
7026 this op. `tf.data.experimental.parallel_interleave` is a deprecated API.
7028 Args:
7029 input_dataset: A `Tensor` of type `variant`.
7030 Dataset that produces a stream of arguments for the function `f`.
7031 other_arguments: A list of `Tensor` objects.
7032 Additional arguments to pass to `f` beyond those produced by `input_dataset`.
7033 Evaluated once when the dataset is instantiated.
7034 cycle_length: A `Tensor` of type `int64`.
7035 Number of datasets (each created by applying `f` to the elements of
7036 `input_dataset`) among which the `ParallelInterleaveDataset` will cycle in a
7037 round-robin fashion.
7038 block_length: A `Tensor` of type `int64`.
7039 Number of elements at a time to produce from each interleaved invocation of a
7040 dataset returned by `f`.
7041 sloppy: A `Tensor` of type `bool`.
7042 If `True`, return elements as they become available, even if that means returning
7043 these elements in a non-deterministic order. Sloppy operation may result in better
7044 performance in the presence of stragglers, but the dataset will still block if
7045 all of its open streams are blocked.
7046 If `False`, always return elements in a deterministic order.
7047 buffer_output_elements: A `Tensor` of type `int64`.
7048 The number of elements each iterator being interleaved should buffer (similar
7049 to the `.prefetch()` transformation for each interleaved iterator).
7050 prefetch_input_elements: A `Tensor` of type `int64`.
7051 Determines the number of iterators to prefetch, allowing buffers to warm up and
7052 data to be pre-fetched without blocking the main thread.
7053 f: A function decorated with @Defun.
7054 A function mapping elements of `input_dataset`, concatenated with
7055 `other_arguments`, to a Dataset variant that contains elements matching
7056 `output_types` and `output_shapes`.
7057 output_types: A list of `tf.DTypes` that has length `>= 1`.
7058 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7059 metadata: An optional `string`. Defaults to `""`.
7060 name: A name for the operation (optional).
7062 Returns:
7063 A `Tensor` of type `variant`.
7064 """
7065 _ctx = _context._context or _context.context()
7066 tld = _ctx._thread_local_data
7067 if tld.is_eager:
7068 try:
7069 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7070 _ctx, "ParallelInterleaveDataset", name, input_dataset,
7071 other_arguments, cycle_length, block_length, sloppy,
7072 buffer_output_elements, prefetch_input_elements, "f", f,
7073 "output_types", output_types, "output_shapes", output_shapes,
7074 "metadata", metadata)
7075 return _result
7076 except _core._NotOkStatusException as e:
7077 _ops.raise_from_not_ok_status(e, name)
7078 except _core._FallbackException:
7079 pass
7080 try:
7081 return parallel_interleave_dataset_eager_fallback(
7082 input_dataset, other_arguments, cycle_length, block_length, sloppy,
7083 buffer_output_elements, prefetch_input_elements, f=f,
7084 output_types=output_types, output_shapes=output_shapes,
7085 metadata=metadata, name=name, ctx=_ctx)
7086 except _core._SymbolicException:
7087 pass # Add nodes to the TensorFlow graph.
7088 # Add nodes to the TensorFlow graph.
7089 if not isinstance(output_types, (list, tuple)):
7090 raise TypeError(
7091 "Expected list for 'output_types' argument to "
7092 "'parallel_interleave_dataset' Op, not %r." % output_types)
7093 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7094 if not isinstance(output_shapes, (list, tuple)):
7095 raise TypeError(
7096 "Expected list for 'output_shapes' argument to "
7097 "'parallel_interleave_dataset' Op, not %r." % output_shapes)
7098 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7099 if metadata is None:
7100 metadata = ""
7101 metadata = _execute.make_str(metadata, "metadata")
7102 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7103 "ParallelInterleaveDataset", input_dataset=input_dataset,
7104 other_arguments=other_arguments,
7105 cycle_length=cycle_length,
7106 block_length=block_length, sloppy=sloppy,
7107 buffer_output_elements=buffer_output_elements,
7108 prefetch_input_elements=prefetch_input_elements,
7109 f=f, output_types=output_types,
7110 output_shapes=output_shapes,
7111 metadata=metadata, name=name)
7112 _result = _outputs[:]
7113 if _execute.must_record_gradient():
7114 _attrs = ("f", _op.get_attr("f"), "Targuments",
7115 _op.get_attr("Targuments"), "output_types",
7116 _op.get_attr("output_types"), "output_shapes",
7117 _op.get_attr("output_shapes"), "metadata",
7118 _op.get_attr("metadata"))
7119 _inputs_flat = _op.inputs
7120 _execute.record_gradient(
7121 "ParallelInterleaveDataset", _inputs_flat, _attrs, _result)
7122 _result, = _result
7123 return _result
7125ParallelInterleaveDataset = tf_export("raw_ops.ParallelInterleaveDataset")(_ops.to_raw_op(parallel_interleave_dataset))
7128def parallel_interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, metadata, name, ctx):
7129 if not isinstance(output_types, (list, tuple)):
7130 raise TypeError(
7131 "Expected list for 'output_types' argument to "
7132 "'parallel_interleave_dataset' Op, not %r." % output_types)
7133 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7134 if not isinstance(output_shapes, (list, tuple)):
7135 raise TypeError(
7136 "Expected list for 'output_shapes' argument to "
7137 "'parallel_interleave_dataset' Op, not %r." % output_shapes)
7138 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7139 if metadata is None:
7140 metadata = ""
7141 metadata = _execute.make_str(metadata, "metadata")
7142 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
7143 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
7144 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
7145 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
7146 sloppy = _ops.convert_to_tensor(sloppy, _dtypes.bool)
7147 buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64)
7148 prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64)
7149 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements]
7150 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
7151 output_types, "output_shapes", output_shapes, "metadata", metadata)
7152 _result = _execute.execute(b"ParallelInterleaveDataset", 1,
7153 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
7154 name=name)
7155 if _execute.must_record_gradient():
7156 _execute.record_gradient(
7157 "ParallelInterleaveDataset", _inputs_flat, _attrs, _result)
7158 _result, = _result
7159 return _result
7162def parse_example_dataset(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy=False, ragged_keys=[], ragged_value_types=[], ragged_split_types=[], name=None):
7163 r"""Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
7165 Args:
7166 input_dataset: A `Tensor` of type `variant`.
7167 num_parallel_calls: A `Tensor` of type `int64`.
7168 dense_defaults: A list of `Tensor` objects with types from: `float32`, `int64`, `string`.
7169 A dict mapping string keys to `Tensor`s.
7170 The keys of the dict must match the dense_keys of the feature.
7171 sparse_keys: A list of `strings`.
7172 A list of string keys in the examples features.
7173 The results for these keys will be returned as `SparseTensor` objects.
7174 dense_keys: A list of `strings`.
7175 A list of Ndense string Tensors (scalars).
7176 The keys expected in the Examples features associated with dense values.
7177 sparse_types: A list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`.
7178 A list of `DTypes` of the same length as `sparse_keys`.
7179 Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
7180 and `tf.string` (`BytesList`) are supported.
7181 dense_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`).
7182 List of tuples with the same length as `dense_keys`.
7183 The shape of the data for each dense feature referenced by `dense_keys`.
7184 Required for any input tensors identified by `dense_keys`. Must be
7185 either fully defined, or may contain an unknown first dimension.
7186 An unknown first dimension means the feature is treated as having
7187 a variable number of blocks, and the output shape along this dimension
7188 is considered unknown at graph build time. Padding is applied for
7189 minibatch elements smaller than the maximum number of blocks for the
7190 given feature along this dimension.
7191 output_types: A list of `tf.DTypes` that has length `>= 1`.
7192 The type list for the return values.
7193 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7194 The list of shapes being produced.
7195 sloppy: An optional `bool`. Defaults to `False`.
7196 ragged_keys: An optional list of `strings`. Defaults to `[]`.
7197 ragged_value_types: An optional list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`. Defaults to `[]`.
7198 ragged_split_types: An optional list of `tf.DTypes` from: `tf.int32, tf.int64`. Defaults to `[]`.
7199 name: A name for the operation (optional).
7201 Returns:
7202 A `Tensor` of type `variant`.
7203 """
7204 _ctx = _context._context or _context.context()
7205 tld = _ctx._thread_local_data
7206 if tld.is_eager:
7207 try:
7208 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7209 _ctx, "ParseExampleDataset", name, input_dataset, num_parallel_calls,
7210 dense_defaults, "sparse_keys", sparse_keys, "dense_keys", dense_keys,
7211 "sparse_types", sparse_types, "dense_shapes", dense_shapes,
7212 "output_types", output_types, "output_shapes", output_shapes,
7213 "sloppy", sloppy, "ragged_keys", ragged_keys, "ragged_value_types",
7214 ragged_value_types, "ragged_split_types", ragged_split_types)
7215 return _result
7216 except _core._NotOkStatusException as e:
7217 _ops.raise_from_not_ok_status(e, name)
7218 except _core._FallbackException:
7219 pass
7220 try:
7221 return parse_example_dataset_eager_fallback(
7222 input_dataset, num_parallel_calls, dense_defaults,
7223 sparse_keys=sparse_keys, dense_keys=dense_keys,
7224 sparse_types=sparse_types, dense_shapes=dense_shapes,
7225 output_types=output_types, output_shapes=output_shapes,
7226 sloppy=sloppy, ragged_keys=ragged_keys,
7227 ragged_value_types=ragged_value_types,
7228 ragged_split_types=ragged_split_types, name=name, ctx=_ctx)
7229 except _core._SymbolicException:
7230 pass # Add nodes to the TensorFlow graph.
7231 # Add nodes to the TensorFlow graph.
7232 if not isinstance(sparse_keys, (list, tuple)):
7233 raise TypeError(
7234 "Expected list for 'sparse_keys' argument to "
7235 "'parse_example_dataset' Op, not %r." % sparse_keys)
7236 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
7237 if not isinstance(dense_keys, (list, tuple)):
7238 raise TypeError(
7239 "Expected list for 'dense_keys' argument to "
7240 "'parse_example_dataset' Op, not %r." % dense_keys)
7241 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
7242 if not isinstance(sparse_types, (list, tuple)):
7243 raise TypeError(
7244 "Expected list for 'sparse_types' argument to "
7245 "'parse_example_dataset' Op, not %r." % sparse_types)
7246 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
7247 if not isinstance(dense_shapes, (list, tuple)):
7248 raise TypeError(
7249 "Expected list for 'dense_shapes' argument to "
7250 "'parse_example_dataset' Op, not %r." % dense_shapes)
7251 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
7252 if not isinstance(output_types, (list, tuple)):
7253 raise TypeError(
7254 "Expected list for 'output_types' argument to "
7255 "'parse_example_dataset' Op, not %r." % output_types)
7256 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7257 if not isinstance(output_shapes, (list, tuple)):
7258 raise TypeError(
7259 "Expected list for 'output_shapes' argument to "
7260 "'parse_example_dataset' Op, not %r." % output_shapes)
7261 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7262 if sloppy is None:
7263 sloppy = False
7264 sloppy = _execute.make_bool(sloppy, "sloppy")
7265 if ragged_keys is None:
7266 ragged_keys = []
7267 if not isinstance(ragged_keys, (list, tuple)):
7268 raise TypeError(
7269 "Expected list for 'ragged_keys' argument to "
7270 "'parse_example_dataset' Op, not %r." % ragged_keys)
7271 ragged_keys = [_execute.make_str(_s, "ragged_keys") for _s in ragged_keys]
7272 if ragged_value_types is None:
7273 ragged_value_types = []
7274 if not isinstance(ragged_value_types, (list, tuple)):
7275 raise TypeError(
7276 "Expected list for 'ragged_value_types' argument to "
7277 "'parse_example_dataset' Op, not %r." % ragged_value_types)
7278 ragged_value_types = [_execute.make_type(_t, "ragged_value_types") for _t in ragged_value_types]
7279 if ragged_split_types is None:
7280 ragged_split_types = []
7281 if not isinstance(ragged_split_types, (list, tuple)):
7282 raise TypeError(
7283 "Expected list for 'ragged_split_types' argument to "
7284 "'parse_example_dataset' Op, not %r." % ragged_split_types)
7285 ragged_split_types = [_execute.make_type(_t, "ragged_split_types") for _t in ragged_split_types]
7286 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7287 "ParseExampleDataset", input_dataset=input_dataset,
7288 num_parallel_calls=num_parallel_calls,
7289 dense_defaults=dense_defaults,
7290 sparse_keys=sparse_keys, dense_keys=dense_keys,
7291 sparse_types=sparse_types,
7292 dense_shapes=dense_shapes,
7293 output_types=output_types,
7294 output_shapes=output_shapes, sloppy=sloppy,
7295 ragged_keys=ragged_keys,
7296 ragged_value_types=ragged_value_types,
7297 ragged_split_types=ragged_split_types,
7298 name=name)
7299 _result = _outputs[:]
7300 if _execute.must_record_gradient():
7301 _attrs = ("sparse_keys", _op.get_attr("sparse_keys"), "dense_keys",
7302 _op.get_attr("dense_keys"), "sparse_types",
7303 _op.get_attr("sparse_types"), "Tdense", _op.get_attr("Tdense"),
7304 "dense_shapes", _op.get_attr("dense_shapes"), "output_types",
7305 _op.get_attr("output_types"), "output_shapes",
7306 _op.get_attr("output_shapes"), "sloppy",
7307 _op._get_attr_bool("sloppy"), "ragged_keys",
7308 _op.get_attr("ragged_keys"), "ragged_value_types",
7309 _op.get_attr("ragged_value_types"), "ragged_split_types",
7310 _op.get_attr("ragged_split_types"))
7311 _inputs_flat = _op.inputs
7312 _execute.record_gradient(
7313 "ParseExampleDataset", _inputs_flat, _attrs, _result)
7314 _result, = _result
7315 return _result
7317ParseExampleDataset = tf_export("raw_ops.ParseExampleDataset")(_ops.to_raw_op(parse_example_dataset))
7320def parse_example_dataset_eager_fallback(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy, ragged_keys, ragged_value_types, ragged_split_types, name, ctx):
7321 if not isinstance(sparse_keys, (list, tuple)):
7322 raise TypeError(
7323 "Expected list for 'sparse_keys' argument to "
7324 "'parse_example_dataset' Op, not %r." % sparse_keys)
7325 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
7326 if not isinstance(dense_keys, (list, tuple)):
7327 raise TypeError(
7328 "Expected list for 'dense_keys' argument to "
7329 "'parse_example_dataset' Op, not %r." % dense_keys)
7330 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
7331 if not isinstance(sparse_types, (list, tuple)):
7332 raise TypeError(
7333 "Expected list for 'sparse_types' argument to "
7334 "'parse_example_dataset' Op, not %r." % sparse_types)
7335 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
7336 if not isinstance(dense_shapes, (list, tuple)):
7337 raise TypeError(
7338 "Expected list for 'dense_shapes' argument to "
7339 "'parse_example_dataset' Op, not %r." % dense_shapes)
7340 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
7341 if not isinstance(output_types, (list, tuple)):
7342 raise TypeError(
7343 "Expected list for 'output_types' argument to "
7344 "'parse_example_dataset' Op, not %r." % output_types)
7345 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7346 if not isinstance(output_shapes, (list, tuple)):
7347 raise TypeError(
7348 "Expected list for 'output_shapes' argument to "
7349 "'parse_example_dataset' Op, not %r." % output_shapes)
7350 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7351 if sloppy is None:
7352 sloppy = False
7353 sloppy = _execute.make_bool(sloppy, "sloppy")
7354 if ragged_keys is None:
7355 ragged_keys = []
7356 if not isinstance(ragged_keys, (list, tuple)):
7357 raise TypeError(
7358 "Expected list for 'ragged_keys' argument to "
7359 "'parse_example_dataset' Op, not %r." % ragged_keys)
7360 ragged_keys = [_execute.make_str(_s, "ragged_keys") for _s in ragged_keys]
7361 if ragged_value_types is None:
7362 ragged_value_types = []
7363 if not isinstance(ragged_value_types, (list, tuple)):
7364 raise TypeError(
7365 "Expected list for 'ragged_value_types' argument to "
7366 "'parse_example_dataset' Op, not %r." % ragged_value_types)
7367 ragged_value_types = [_execute.make_type(_t, "ragged_value_types") for _t in ragged_value_types]
7368 if ragged_split_types is None:
7369 ragged_split_types = []
7370 if not isinstance(ragged_split_types, (list, tuple)):
7371 raise TypeError(
7372 "Expected list for 'ragged_split_types' argument to "
7373 "'parse_example_dataset' Op, not %r." % ragged_split_types)
7374 ragged_split_types = [_execute.make_type(_t, "ragged_split_types") for _t in ragged_split_types]
7375 _attr_Tdense, dense_defaults = _execute.convert_to_mixed_eager_tensors(dense_defaults, ctx)
7376 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
7377 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
7378 _inputs_flat = [input_dataset, num_parallel_calls] + list(dense_defaults)
7379 _attrs = ("sparse_keys", sparse_keys, "dense_keys", dense_keys,
7380 "sparse_types", sparse_types, "Tdense", _attr_Tdense, "dense_shapes",
7381 dense_shapes, "output_types", output_types, "output_shapes", output_shapes,
7382 "sloppy", sloppy, "ragged_keys", ragged_keys, "ragged_value_types",
7383 ragged_value_types, "ragged_split_types", ragged_split_types)
7384 _result = _execute.execute(b"ParseExampleDataset", 1, inputs=_inputs_flat,
7385 attrs=_attrs, ctx=ctx, name=name)
7386 if _execute.must_record_gradient():
7387 _execute.record_gradient(
7388 "ParseExampleDataset", _inputs_flat, _attrs, _result)
7389 _result, = _result
7390 return _result
7393def parse_example_dataset_v2(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, deterministic="default", ragged_keys=[], ragged_value_types=[], ragged_split_types=[], name=None):
7394 r"""Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
7396 Args:
7397 input_dataset: A `Tensor` of type `variant`.
7398 num_parallel_calls: A `Tensor` of type `int64`.
7399 dense_defaults: A list of `Tensor` objects with types from: `float32`, `int64`, `string`.
7400 A dict mapping string keys to `Tensor`s.
7401 The keys of the dict must match the dense_keys of the feature.
7402 sparse_keys: A list of `strings`.
7403 A list of string keys in the examples features.
7404 The results for these keys will be returned as `SparseTensor` objects.
7405 dense_keys: A list of `strings`.
7406 A list of Ndense string Tensors (scalars).
7407 The keys expected in the Examples features associated with dense values.
7408 sparse_types: A list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`.
7409 A list of `DTypes` of the same length as `sparse_keys`.
7410 Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
7411 and `tf.string` (`BytesList`) are supported.
7412 dense_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`).
7413 List of tuples with the same length as `dense_keys`.
7414 The shape of the data for each dense feature referenced by `dense_keys`.
7415 Required for any input tensors identified by `dense_keys`. Must be
7416 either fully defined, or may contain an unknown first dimension.
7417 An unknown first dimension means the feature is treated as having
7418 a variable number of blocks, and the output shape along this dimension
7419 is considered unknown at graph build time. Padding is applied for
7420 minibatch elements smaller than the maximum number of blocks for the
7421 given feature along this dimension.
7422 output_types: A list of `tf.DTypes` that has length `>= 1`.
7423 The type list for the return values.
7424 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7425 The list of shapes being produced.
7426 deterministic: An optional `string`. Defaults to `"default"`.
7427 A string indicating the op-level determinism to use. Deterministic controls
7428 whether the dataset is allowed to return elements out of order if the next
7429 element to be returned isn't available, but a later element is. Options are
7430 "true", "false", and "default". "default" indicates that determinism should be
7431 decided by the `experimental_deterministic` parameter of `tf.data.Options`.
7432 ragged_keys: An optional list of `strings`. Defaults to `[]`.
7433 ragged_value_types: An optional list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`. Defaults to `[]`.
7434 ragged_split_types: An optional list of `tf.DTypes` from: `tf.int32, tf.int64`. Defaults to `[]`.
7435 name: A name for the operation (optional).
7437 Returns:
7438 A `Tensor` of type `variant`.
7439 """
7440 _ctx = _context._context or _context.context()
7441 tld = _ctx._thread_local_data
7442 if tld.is_eager:
7443 try:
7444 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7445 _ctx, "ParseExampleDatasetV2", name, input_dataset,
7446 num_parallel_calls, dense_defaults, "sparse_keys", sparse_keys,
7447 "dense_keys", dense_keys, "sparse_types", sparse_types,
7448 "dense_shapes", dense_shapes, "output_types", output_types,
7449 "output_shapes", output_shapes, "deterministic", deterministic,
7450 "ragged_keys", ragged_keys, "ragged_value_types", ragged_value_types,
7451 "ragged_split_types", ragged_split_types)
7452 return _result
7453 except _core._NotOkStatusException as e:
7454 _ops.raise_from_not_ok_status(e, name)
7455 except _core._FallbackException:
7456 pass
7457 try:
7458 return parse_example_dataset_v2_eager_fallback(
7459 input_dataset, num_parallel_calls, dense_defaults,
7460 sparse_keys=sparse_keys, dense_keys=dense_keys,
7461 sparse_types=sparse_types, dense_shapes=dense_shapes,
7462 output_types=output_types, output_shapes=output_shapes,
7463 deterministic=deterministic, ragged_keys=ragged_keys,
7464 ragged_value_types=ragged_value_types,
7465 ragged_split_types=ragged_split_types, name=name, ctx=_ctx)
7466 except _core._SymbolicException:
7467 pass # Add nodes to the TensorFlow graph.
7468 # Add nodes to the TensorFlow graph.
7469 if not isinstance(sparse_keys, (list, tuple)):
7470 raise TypeError(
7471 "Expected list for 'sparse_keys' argument to "
7472 "'parse_example_dataset_v2' Op, not %r." % sparse_keys)
7473 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
7474 if not isinstance(dense_keys, (list, tuple)):
7475 raise TypeError(
7476 "Expected list for 'dense_keys' argument to "
7477 "'parse_example_dataset_v2' Op, not %r." % dense_keys)
7478 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
7479 if not isinstance(sparse_types, (list, tuple)):
7480 raise TypeError(
7481 "Expected list for 'sparse_types' argument to "
7482 "'parse_example_dataset_v2' Op, not %r." % sparse_types)
7483 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
7484 if not isinstance(dense_shapes, (list, tuple)):
7485 raise TypeError(
7486 "Expected list for 'dense_shapes' argument to "
7487 "'parse_example_dataset_v2' Op, not %r." % dense_shapes)
7488 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
7489 if not isinstance(output_types, (list, tuple)):
7490 raise TypeError(
7491 "Expected list for 'output_types' argument to "
7492 "'parse_example_dataset_v2' Op, not %r." % output_types)
7493 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7494 if not isinstance(output_shapes, (list, tuple)):
7495 raise TypeError(
7496 "Expected list for 'output_shapes' argument to "
7497 "'parse_example_dataset_v2' Op, not %r." % output_shapes)
7498 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7499 if deterministic is None:
7500 deterministic = "default"
7501 deterministic = _execute.make_str(deterministic, "deterministic")
7502 if ragged_keys is None:
7503 ragged_keys = []
7504 if not isinstance(ragged_keys, (list, tuple)):
7505 raise TypeError(
7506 "Expected list for 'ragged_keys' argument to "
7507 "'parse_example_dataset_v2' Op, not %r." % ragged_keys)
7508 ragged_keys = [_execute.make_str(_s, "ragged_keys") for _s in ragged_keys]
7509 if ragged_value_types is None:
7510 ragged_value_types = []
7511 if not isinstance(ragged_value_types, (list, tuple)):
7512 raise TypeError(
7513 "Expected list for 'ragged_value_types' argument to "
7514 "'parse_example_dataset_v2' Op, not %r." % ragged_value_types)
7515 ragged_value_types = [_execute.make_type(_t, "ragged_value_types") for _t in ragged_value_types]
7516 if ragged_split_types is None:
7517 ragged_split_types = []
7518 if not isinstance(ragged_split_types, (list, tuple)):
7519 raise TypeError(
7520 "Expected list for 'ragged_split_types' argument to "
7521 "'parse_example_dataset_v2' Op, not %r." % ragged_split_types)
7522 ragged_split_types = [_execute.make_type(_t, "ragged_split_types") for _t in ragged_split_types]
7523 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7524 "ParseExampleDatasetV2", input_dataset=input_dataset,
7525 num_parallel_calls=num_parallel_calls,
7526 dense_defaults=dense_defaults,
7527 sparse_keys=sparse_keys,
7528 dense_keys=dense_keys,
7529 sparse_types=sparse_types,
7530 dense_shapes=dense_shapes,
7531 output_types=output_types,
7532 output_shapes=output_shapes,
7533 deterministic=deterministic,
7534 ragged_keys=ragged_keys,
7535 ragged_value_types=ragged_value_types,
7536 ragged_split_types=ragged_split_types,
7537 name=name)
7538 _result = _outputs[:]
7539 if _execute.must_record_gradient():
7540 _attrs = ("sparse_keys", _op.get_attr("sparse_keys"), "dense_keys",
7541 _op.get_attr("dense_keys"), "sparse_types",
7542 _op.get_attr("sparse_types"), "Tdense", _op.get_attr("Tdense"),
7543 "dense_shapes", _op.get_attr("dense_shapes"), "output_types",
7544 _op.get_attr("output_types"), "output_shapes",
7545 _op.get_attr("output_shapes"), "deterministic",
7546 _op.get_attr("deterministic"), "ragged_keys",
7547 _op.get_attr("ragged_keys"), "ragged_value_types",
7548 _op.get_attr("ragged_value_types"), "ragged_split_types",
7549 _op.get_attr("ragged_split_types"))
7550 _inputs_flat = _op.inputs
7551 _execute.record_gradient(
7552 "ParseExampleDatasetV2", _inputs_flat, _attrs, _result)
7553 _result, = _result
7554 return _result
7556ParseExampleDatasetV2 = tf_export("raw_ops.ParseExampleDatasetV2")(_ops.to_raw_op(parse_example_dataset_v2))
7559def parse_example_dataset_v2_eager_fallback(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, deterministic, ragged_keys, ragged_value_types, ragged_split_types, name, ctx):
7560 if not isinstance(sparse_keys, (list, tuple)):
7561 raise TypeError(
7562 "Expected list for 'sparse_keys' argument to "
7563 "'parse_example_dataset_v2' Op, not %r." % sparse_keys)
7564 sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
7565 if not isinstance(dense_keys, (list, tuple)):
7566 raise TypeError(
7567 "Expected list for 'dense_keys' argument to "
7568 "'parse_example_dataset_v2' Op, not %r." % dense_keys)
7569 dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
7570 if not isinstance(sparse_types, (list, tuple)):
7571 raise TypeError(
7572 "Expected list for 'sparse_types' argument to "
7573 "'parse_example_dataset_v2' Op, not %r." % sparse_types)
7574 sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
7575 if not isinstance(dense_shapes, (list, tuple)):
7576 raise TypeError(
7577 "Expected list for 'dense_shapes' argument to "
7578 "'parse_example_dataset_v2' Op, not %r." % dense_shapes)
7579 dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
7580 if not isinstance(output_types, (list, tuple)):
7581 raise TypeError(
7582 "Expected list for 'output_types' argument to "
7583 "'parse_example_dataset_v2' Op, not %r." % output_types)
7584 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7585 if not isinstance(output_shapes, (list, tuple)):
7586 raise TypeError(
7587 "Expected list for 'output_shapes' argument to "
7588 "'parse_example_dataset_v2' Op, not %r." % output_shapes)
7589 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7590 if deterministic is None:
7591 deterministic = "default"
7592 deterministic = _execute.make_str(deterministic, "deterministic")
7593 if ragged_keys is None:
7594 ragged_keys = []
7595 if not isinstance(ragged_keys, (list, tuple)):
7596 raise TypeError(
7597 "Expected list for 'ragged_keys' argument to "
7598 "'parse_example_dataset_v2' Op, not %r." % ragged_keys)
7599 ragged_keys = [_execute.make_str(_s, "ragged_keys") for _s in ragged_keys]
7600 if ragged_value_types is None:
7601 ragged_value_types = []
7602 if not isinstance(ragged_value_types, (list, tuple)):
7603 raise TypeError(
7604 "Expected list for 'ragged_value_types' argument to "
7605 "'parse_example_dataset_v2' Op, not %r." % ragged_value_types)
7606 ragged_value_types = [_execute.make_type(_t, "ragged_value_types") for _t in ragged_value_types]
7607 if ragged_split_types is None:
7608 ragged_split_types = []
7609 if not isinstance(ragged_split_types, (list, tuple)):
7610 raise TypeError(
7611 "Expected list for 'ragged_split_types' argument to "
7612 "'parse_example_dataset_v2' Op, not %r." % ragged_split_types)
7613 ragged_split_types = [_execute.make_type(_t, "ragged_split_types") for _t in ragged_split_types]
7614 _attr_Tdense, dense_defaults = _execute.convert_to_mixed_eager_tensors(dense_defaults, ctx)
7615 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
7616 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
7617 _inputs_flat = [input_dataset, num_parallel_calls] + list(dense_defaults)
7618 _attrs = ("sparse_keys", sparse_keys, "dense_keys", dense_keys,
7619 "sparse_types", sparse_types, "Tdense", _attr_Tdense, "dense_shapes",
7620 dense_shapes, "output_types", output_types, "output_shapes", output_shapes,
7621 "deterministic", deterministic, "ragged_keys", ragged_keys,
7622 "ragged_value_types", ragged_value_types, "ragged_split_types",
7623 ragged_split_types)
7624 _result = _execute.execute(b"ParseExampleDatasetV2", 1, inputs=_inputs_flat,
7625 attrs=_attrs, ctx=ctx, name=name)
7626 if _execute.must_record_gradient():
7627 _execute.record_gradient(
7628 "ParseExampleDatasetV2", _inputs_flat, _attrs, _result)
7629 _result, = _result
7630 return _result
7633def private_thread_pool_dataset(input_dataset, num_threads, output_types, output_shapes, name=None):
7634 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
7636 Args:
7637 input_dataset: A `Tensor` of type `variant`.
7638 num_threads: A `Tensor` of type `int64`.
7639 Identifies the number of threads to use for the private threadpool.
7640 output_types: A list of `tf.DTypes` that has length `>= 1`.
7641 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7642 name: A name for the operation (optional).
7644 Returns:
7645 A `Tensor` of type `variant`.
7646 """
7647 _ctx = _context._context or _context.context()
7648 tld = _ctx._thread_local_data
7649 if tld.is_eager:
7650 try:
7651 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7652 _ctx, "PrivateThreadPoolDataset", name, input_dataset, num_threads,
7653 "output_types", output_types, "output_shapes", output_shapes)
7654 return _result
7655 except _core._NotOkStatusException as e:
7656 _ops.raise_from_not_ok_status(e, name)
7657 except _core._FallbackException:
7658 pass
7659 try:
7660 return private_thread_pool_dataset_eager_fallback(
7661 input_dataset, num_threads, output_types=output_types,
7662 output_shapes=output_shapes, name=name, ctx=_ctx)
7663 except _core._SymbolicException:
7664 pass # Add nodes to the TensorFlow graph.
7665 # Add nodes to the TensorFlow graph.
7666 if not isinstance(output_types, (list, tuple)):
7667 raise TypeError(
7668 "Expected list for 'output_types' argument to "
7669 "'private_thread_pool_dataset' Op, not %r." % output_types)
7670 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7671 if not isinstance(output_shapes, (list, tuple)):
7672 raise TypeError(
7673 "Expected list for 'output_shapes' argument to "
7674 "'private_thread_pool_dataset' Op, not %r." % output_shapes)
7675 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7676 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7677 "PrivateThreadPoolDataset", input_dataset=input_dataset,
7678 num_threads=num_threads,
7679 output_types=output_types,
7680 output_shapes=output_shapes, name=name)
7681 _result = _outputs[:]
7682 if _execute.must_record_gradient():
7683 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
7684 _op.get_attr("output_shapes"))
7685 _inputs_flat = _op.inputs
7686 _execute.record_gradient(
7687 "PrivateThreadPoolDataset", _inputs_flat, _attrs, _result)
7688 _result, = _result
7689 return _result
7691PrivateThreadPoolDataset = tf_export("raw_ops.PrivateThreadPoolDataset")(_ops.to_raw_op(private_thread_pool_dataset))
7694def private_thread_pool_dataset_eager_fallback(input_dataset, num_threads, output_types, output_shapes, name, ctx):
7695 if not isinstance(output_types, (list, tuple)):
7696 raise TypeError(
7697 "Expected list for 'output_types' argument to "
7698 "'private_thread_pool_dataset' Op, not %r." % output_types)
7699 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7700 if not isinstance(output_shapes, (list, tuple)):
7701 raise TypeError(
7702 "Expected list for 'output_shapes' argument to "
7703 "'private_thread_pool_dataset' Op, not %r." % output_shapes)
7704 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7705 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
7706 num_threads = _ops.convert_to_tensor(num_threads, _dtypes.int64)
7707 _inputs_flat = [input_dataset, num_threads]
7708 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
7709 _result = _execute.execute(b"PrivateThreadPoolDataset", 1,
7710 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
7711 name=name)
7712 if _execute.must_record_gradient():
7713 _execute.record_gradient(
7714 "PrivateThreadPoolDataset", _inputs_flat, _attrs, _result)
7715 _result, = _result
7716 return _result
7719def random_dataset(seed, seed2, output_types, output_shapes, metadata="", name=None):
7720 r"""Creates a Dataset that returns pseudorandom numbers.
7722 Creates a Dataset that returns a stream of uniformly distributed
7723 pseudorandom 64-bit signed integers.
7725 In the TensorFlow Python API, you can instantiate this dataset via the
7726 class `tf.data.experimental.RandomDataset`.
7728 Instances of this dataset are also created as a result of the
7729 `hoist_random_uniform` static optimization. Whether this optimization is
7730 performed is determined by the `experimental_optimization.hoist_random_uniform`
7731 option of `tf.data.Options`.
7733 Args:
7734 seed: A `Tensor` of type `int64`.
7735 A scalar seed for the random number generator. If either seed or
7736 seed2 is set to be non-zero, the random number generator is seeded
7737 by the given seed. Otherwise, a random seed is used.
7738 seed2: A `Tensor` of type `int64`.
7739 A second scalar seed to avoid seed collision.
7740 output_types: A list of `tf.DTypes` that has length `>= 1`.
7741 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7742 metadata: An optional `string`. Defaults to `""`.
7743 name: A name for the operation (optional).
7745 Returns:
7746 A `Tensor` of type `variant`.
7747 """
7748 _ctx = _context._context or _context.context()
7749 tld = _ctx._thread_local_data
7750 if tld.is_eager:
7751 try:
7752 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7753 _ctx, "RandomDataset", name, seed, seed2, "output_types",
7754 output_types, "output_shapes", output_shapes, "metadata", metadata)
7755 return _result
7756 except _core._NotOkStatusException as e:
7757 _ops.raise_from_not_ok_status(e, name)
7758 except _core._FallbackException:
7759 pass
7760 try:
7761 return random_dataset_eager_fallback(
7762 seed, seed2, output_types=output_types, output_shapes=output_shapes,
7763 metadata=metadata, name=name, ctx=_ctx)
7764 except _core._SymbolicException:
7765 pass # Add nodes to the TensorFlow graph.
7766 # Add nodes to the TensorFlow graph.
7767 if not isinstance(output_types, (list, tuple)):
7768 raise TypeError(
7769 "Expected list for 'output_types' argument to "
7770 "'random_dataset' Op, not %r." % output_types)
7771 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7772 if not isinstance(output_shapes, (list, tuple)):
7773 raise TypeError(
7774 "Expected list for 'output_shapes' argument to "
7775 "'random_dataset' Op, not %r." % output_shapes)
7776 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7777 if metadata is None:
7778 metadata = ""
7779 metadata = _execute.make_str(metadata, "metadata")
7780 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7781 "RandomDataset", seed=seed, seed2=seed2, output_types=output_types,
7782 output_shapes=output_shapes, metadata=metadata,
7783 name=name)
7784 _result = _outputs[:]
7785 if _execute.must_record_gradient():
7786 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
7787 _op.get_attr("output_shapes"), "metadata",
7788 _op.get_attr("metadata"))
7789 _inputs_flat = _op.inputs
7790 _execute.record_gradient(
7791 "RandomDataset", _inputs_flat, _attrs, _result)
7792 _result, = _result
7793 return _result
7795RandomDataset = tf_export("raw_ops.RandomDataset")(_ops.to_raw_op(random_dataset))
7798def random_dataset_eager_fallback(seed, seed2, output_types, output_shapes, metadata, name, ctx):
7799 if not isinstance(output_types, (list, tuple)):
7800 raise TypeError(
7801 "Expected list for 'output_types' argument to "
7802 "'random_dataset' Op, not %r." % output_types)
7803 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7804 if not isinstance(output_shapes, (list, tuple)):
7805 raise TypeError(
7806 "Expected list for 'output_shapes' argument to "
7807 "'random_dataset' Op, not %r." % output_shapes)
7808 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7809 if metadata is None:
7810 metadata = ""
7811 metadata = _execute.make_str(metadata, "metadata")
7812 seed = _ops.convert_to_tensor(seed, _dtypes.int64)
7813 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
7814 _inputs_flat = [seed, seed2]
7815 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
7816 "metadata", metadata)
7817 _result = _execute.execute(b"RandomDataset", 1, inputs=_inputs_flat,
7818 attrs=_attrs, ctx=ctx, name=name)
7819 if _execute.must_record_gradient():
7820 _execute.record_gradient(
7821 "RandomDataset", _inputs_flat, _attrs, _result)
7822 _result, = _result
7823 return _result
7826def random_dataset_v2(seed, seed2, seed_generator, output_types, output_shapes, rerandomize_each_iteration=False, metadata="", name=None):
7827 r"""Creates a Dataset that returns pseudorandom numbers.
7829 Creates a Dataset that returns a stream of uniformly distributed
7830 pseudorandom 64-bit signed integers. It accepts a boolean attribute that
7831 determines if the random number generators are re-applied at each epoch. The
7832 default value is True which means that the seeds are applied and the same
7833 sequence of random numbers are generated at each epoch. If set to False, the
7834 seeds are not re-applied and a different sequence of random numbers are
7835 generated at each epoch.
7837 In the TensorFlow Python API, you can instantiate this dataset via the
7838 class `tf.data.experimental.RandomDatasetV2`.
7840 Args:
7841 seed: A `Tensor` of type `int64`.
7842 A scalar seed for the random number generator. If either seed or
7843 seed2 is set to be non-zero, the random number generator is seeded
7844 by the given seed. Otherwise, a random seed is used.
7845 seed2: A `Tensor` of type `int64`.
7846 A second scalar seed to avoid seed collision.
7847 seed_generator: A `Tensor` of type `resource`.
7848 A resource for the random number seed generator.
7849 output_types: A list of `tf.DTypes` that has length `>= 1`.
7850 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7851 rerandomize_each_iteration: An optional `bool`. Defaults to `False`.
7852 A boolean attribute to rerandomize the sequence of random numbers generated
7853 at each epoch.
7854 metadata: An optional `string`. Defaults to `""`.
7855 name: A name for the operation (optional).
7857 Returns:
7858 A `Tensor` of type `variant`.
7859 """
7860 _ctx = _context._context or _context.context()
7861 tld = _ctx._thread_local_data
7862 if tld.is_eager:
7863 try:
7864 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7865 _ctx, "RandomDatasetV2", name, seed, seed2, seed_generator,
7866 "rerandomize_each_iteration", rerandomize_each_iteration,
7867 "output_types", output_types, "output_shapes", output_shapes,
7868 "metadata", metadata)
7869 return _result
7870 except _core._NotOkStatusException as e:
7871 _ops.raise_from_not_ok_status(e, name)
7872 except _core._FallbackException:
7873 pass
7874 try:
7875 return random_dataset_v2_eager_fallback(
7876 seed, seed2, seed_generator,
7877 rerandomize_each_iteration=rerandomize_each_iteration,
7878 output_types=output_types, output_shapes=output_shapes,
7879 metadata=metadata, name=name, ctx=_ctx)
7880 except _core._SymbolicException:
7881 pass # Add nodes to the TensorFlow graph.
7882 # Add nodes to the TensorFlow graph.
7883 if not isinstance(output_types, (list, tuple)):
7884 raise TypeError(
7885 "Expected list for 'output_types' argument to "
7886 "'random_dataset_v2' Op, not %r." % output_types)
7887 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7888 if not isinstance(output_shapes, (list, tuple)):
7889 raise TypeError(
7890 "Expected list for 'output_shapes' argument to "
7891 "'random_dataset_v2' Op, not %r." % output_shapes)
7892 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7893 if rerandomize_each_iteration is None:
7894 rerandomize_each_iteration = False
7895 rerandomize_each_iteration = _execute.make_bool(rerandomize_each_iteration, "rerandomize_each_iteration")
7896 if metadata is None:
7897 metadata = ""
7898 metadata = _execute.make_str(metadata, "metadata")
7899 _, _, _op, _outputs = _op_def_library._apply_op_helper(
7900 "RandomDatasetV2", seed=seed, seed2=seed2,
7901 seed_generator=seed_generator,
7902 output_types=output_types,
7903 output_shapes=output_shapes,
7904 rerandomize_each_iteration=rerandomize_each_iteration,
7905 metadata=metadata, name=name)
7906 _result = _outputs[:]
7907 if _execute.must_record_gradient():
7908 _attrs = ("rerandomize_each_iteration",
7909 _op._get_attr_bool("rerandomize_each_iteration"),
7910 "output_types", _op.get_attr("output_types"), "output_shapes",
7911 _op.get_attr("output_shapes"), "metadata",
7912 _op.get_attr("metadata"))
7913 _inputs_flat = _op.inputs
7914 _execute.record_gradient(
7915 "RandomDatasetV2", _inputs_flat, _attrs, _result)
7916 _result, = _result
7917 return _result
7919RandomDatasetV2 = tf_export("raw_ops.RandomDatasetV2")(_ops.to_raw_op(random_dataset_v2))
7922def random_dataset_v2_eager_fallback(seed, seed2, seed_generator, output_types, output_shapes, rerandomize_each_iteration, metadata, name, ctx):
7923 if not isinstance(output_types, (list, tuple)):
7924 raise TypeError(
7925 "Expected list for 'output_types' argument to "
7926 "'random_dataset_v2' Op, not %r." % output_types)
7927 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
7928 if not isinstance(output_shapes, (list, tuple)):
7929 raise TypeError(
7930 "Expected list for 'output_shapes' argument to "
7931 "'random_dataset_v2' Op, not %r." % output_shapes)
7932 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
7933 if rerandomize_each_iteration is None:
7934 rerandomize_each_iteration = False
7935 rerandomize_each_iteration = _execute.make_bool(rerandomize_each_iteration, "rerandomize_each_iteration")
7936 if metadata is None:
7937 metadata = ""
7938 metadata = _execute.make_str(metadata, "metadata")
7939 seed = _ops.convert_to_tensor(seed, _dtypes.int64)
7940 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
7941 seed_generator = _ops.convert_to_tensor(seed_generator, _dtypes.resource)
7942 _inputs_flat = [seed, seed2, seed_generator]
7943 _attrs = ("rerandomize_each_iteration", rerandomize_each_iteration,
7944 "output_types", output_types, "output_shapes", output_shapes, "metadata",
7945 metadata)
7946 _result = _execute.execute(b"RandomDatasetV2", 1, inputs=_inputs_flat,
7947 attrs=_attrs, ctx=ctx, name=name)
7948 if _execute.must_record_gradient():
7949 _execute.record_gradient(
7950 "RandomDatasetV2", _inputs_flat, _attrs, _result)
7951 _result, = _result
7952 return _result
7955def rebatch_dataset(input_dataset, num_replicas, output_types, output_shapes, use_fallback=True, name=None):
7956 r"""Creates a dataset that changes the batch size.
7958 Creates a dataset that changes the batch size of the dataset to current batch
7959 size // num_workers.
7961 Args:
7962 input_dataset: A `Tensor` of type `variant`.
7963 A variant tensor representing the input dataset.
7964 num_replicas: A `Tensor` of type `int64`.
7965 A scalar representing the number of replicas to distribute this batch across. As
7966 a result of this transformation the current batch size would end up being
7967 divided by this parameter.
7968 output_types: A list of `tf.DTypes` that has length `>= 1`.
7969 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
7970 use_fallback: An optional `bool`. Defaults to `True`.
7971 name: A name for the operation (optional).
7973 Returns:
7974 A `Tensor` of type `variant`.
7975 """
7976 _ctx = _context._context or _context.context()
7977 tld = _ctx._thread_local_data
7978 if tld.is_eager:
7979 try:
7980 _result = pywrap_tfe.TFE_Py_FastPathExecute(
7981 _ctx, "RebatchDataset", name, input_dataset, num_replicas,
7982 "output_types", output_types, "output_shapes", output_shapes,
7983 "use_fallback", use_fallback)
7984 return _result
7985 except _core._NotOkStatusException as e:
7986 _ops.raise_from_not_ok_status(e, name)
7987 except _core._FallbackException:
7988 pass
7989 try:
7990 return rebatch_dataset_eager_fallback(
7991 input_dataset, num_replicas, output_types=output_types,
7992 output_shapes=output_shapes, use_fallback=use_fallback, name=name,
7993 ctx=_ctx)
7994 except _core._SymbolicException:
7995 pass # Add nodes to the TensorFlow graph.
7996 # Add nodes to the TensorFlow graph.
7997 if not isinstance(output_types, (list, tuple)):
7998 raise TypeError(
7999 "Expected list for 'output_types' argument to "
8000 "'rebatch_dataset' Op, not %r." % output_types)
8001 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8002 if not isinstance(output_shapes, (list, tuple)):
8003 raise TypeError(
8004 "Expected list for 'output_shapes' argument to "
8005 "'rebatch_dataset' Op, not %r." % output_shapes)
8006 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8007 if use_fallback is None:
8008 use_fallback = True
8009 use_fallback = _execute.make_bool(use_fallback, "use_fallback")
8010 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8011 "RebatchDataset", input_dataset=input_dataset,
8012 num_replicas=num_replicas,
8013 output_types=output_types,
8014 output_shapes=output_shapes,
8015 use_fallback=use_fallback, name=name)
8016 _result = _outputs[:]
8017 if _execute.must_record_gradient():
8018 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
8019 _op.get_attr("output_shapes"), "use_fallback",
8020 _op._get_attr_bool("use_fallback"))
8021 _inputs_flat = _op.inputs
8022 _execute.record_gradient(
8023 "RebatchDataset", _inputs_flat, _attrs, _result)
8024 _result, = _result
8025 return _result
8027RebatchDataset = tf_export("raw_ops.RebatchDataset")(_ops.to_raw_op(rebatch_dataset))
8030def rebatch_dataset_eager_fallback(input_dataset, num_replicas, output_types, output_shapes, use_fallback, name, ctx):
8031 if not isinstance(output_types, (list, tuple)):
8032 raise TypeError(
8033 "Expected list for 'output_types' argument to "
8034 "'rebatch_dataset' Op, not %r." % output_types)
8035 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8036 if not isinstance(output_shapes, (list, tuple)):
8037 raise TypeError(
8038 "Expected list for 'output_shapes' argument to "
8039 "'rebatch_dataset' Op, not %r." % output_shapes)
8040 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8041 if use_fallback is None:
8042 use_fallback = True
8043 use_fallback = _execute.make_bool(use_fallback, "use_fallback")
8044 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8045 num_replicas = _ops.convert_to_tensor(num_replicas, _dtypes.int64)
8046 _inputs_flat = [input_dataset, num_replicas]
8047 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
8048 "use_fallback", use_fallback)
8049 _result = _execute.execute(b"RebatchDataset", 1, inputs=_inputs_flat,
8050 attrs=_attrs, ctx=ctx, name=name)
8051 if _execute.must_record_gradient():
8052 _execute.record_gradient(
8053 "RebatchDataset", _inputs_flat, _attrs, _result)
8054 _result, = _result
8055 return _result
8058def rebatch_dataset_v2(input_dataset, batch_sizes, drop_remainder, output_types, output_shapes, name=None):
8059 r"""Creates a dataset that changes the batch size.
8061 Creates a dataset that rebatches elements from `input_dataset` into new batch
8062 sizes.
8064 Args:
8065 input_dataset: A `Tensor` of type `variant`.
8066 A variant tensor representing the input dataset.
8067 batch_sizes: A `Tensor` of type `int64`.
8068 A vector of integers representing the size of batches to produce. These values
8069 are cycled through in order.
8070 drop_remainder: A `Tensor` of type `bool`.
8071 output_types: A list of `tf.DTypes` that has length `>= 1`.
8072 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8073 name: A name for the operation (optional).
8075 Returns:
8076 A `Tensor` of type `variant`.
8077 """
8078 _ctx = _context._context or _context.context()
8079 tld = _ctx._thread_local_data
8080 if tld.is_eager:
8081 try:
8082 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8083 _ctx, "RebatchDatasetV2", name, input_dataset, batch_sizes,
8084 drop_remainder, "output_types", output_types, "output_shapes",
8085 output_shapes)
8086 return _result
8087 except _core._NotOkStatusException as e:
8088 _ops.raise_from_not_ok_status(e, name)
8089 except _core._FallbackException:
8090 pass
8091 try:
8092 return rebatch_dataset_v2_eager_fallback(
8093 input_dataset, batch_sizes, drop_remainder,
8094 output_types=output_types, output_shapes=output_shapes, name=name,
8095 ctx=_ctx)
8096 except _core._SymbolicException:
8097 pass # Add nodes to the TensorFlow graph.
8098 # Add nodes to the TensorFlow graph.
8099 if not isinstance(output_types, (list, tuple)):
8100 raise TypeError(
8101 "Expected list for 'output_types' argument to "
8102 "'rebatch_dataset_v2' Op, not %r." % output_types)
8103 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8104 if not isinstance(output_shapes, (list, tuple)):
8105 raise TypeError(
8106 "Expected list for 'output_shapes' argument to "
8107 "'rebatch_dataset_v2' Op, not %r." % output_shapes)
8108 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8109 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8110 "RebatchDatasetV2", input_dataset=input_dataset,
8111 batch_sizes=batch_sizes,
8112 drop_remainder=drop_remainder,
8113 output_types=output_types,
8114 output_shapes=output_shapes, name=name)
8115 _result = _outputs[:]
8116 if _execute.must_record_gradient():
8117 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
8118 _op.get_attr("output_shapes"))
8119 _inputs_flat = _op.inputs
8120 _execute.record_gradient(
8121 "RebatchDatasetV2", _inputs_flat, _attrs, _result)
8122 _result, = _result
8123 return _result
8125RebatchDatasetV2 = tf_export("raw_ops.RebatchDatasetV2")(_ops.to_raw_op(rebatch_dataset_v2))
8128def rebatch_dataset_v2_eager_fallback(input_dataset, batch_sizes, drop_remainder, output_types, output_shapes, name, ctx):
8129 if not isinstance(output_types, (list, tuple)):
8130 raise TypeError(
8131 "Expected list for 'output_types' argument to "
8132 "'rebatch_dataset_v2' Op, not %r." % output_types)
8133 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8134 if not isinstance(output_shapes, (list, tuple)):
8135 raise TypeError(
8136 "Expected list for 'output_shapes' argument to "
8137 "'rebatch_dataset_v2' Op, not %r." % output_shapes)
8138 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8139 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8140 batch_sizes = _ops.convert_to_tensor(batch_sizes, _dtypes.int64)
8141 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
8142 _inputs_flat = [input_dataset, batch_sizes, drop_remainder]
8143 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
8144 _result = _execute.execute(b"RebatchDatasetV2", 1, inputs=_inputs_flat,
8145 attrs=_attrs, ctx=ctx, name=name)
8146 if _execute.must_record_gradient():
8147 _execute.record_gradient(
8148 "RebatchDatasetV2", _inputs_flat, _attrs, _result)
8149 _result, = _result
8150 return _result
8153def register_dataset(dataset, address, protocol, external_state_policy, element_spec="", metadata="", name=None):
8154 r"""Registers a dataset with the tf.data service.
8156 Args:
8157 dataset: A `Tensor` of type `variant`.
8158 address: A `Tensor` of type `string`.
8159 protocol: A `Tensor` of type `string`.
8160 external_state_policy: An `int`.
8161 element_spec: An optional `string`. Defaults to `""`.
8162 metadata: An optional `string`. Defaults to `""`.
8163 name: A name for the operation (optional).
8165 Returns:
8166 A `Tensor` of type `int64`.
8167 """
8168 _ctx = _context._context or _context.context()
8169 tld = _ctx._thread_local_data
8170 if tld.is_eager:
8171 try:
8172 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8173 _ctx, "RegisterDataset", name, dataset, address, protocol,
8174 "external_state_policy", external_state_policy, "element_spec",
8175 element_spec, "metadata", metadata)
8176 return _result
8177 except _core._NotOkStatusException as e:
8178 _ops.raise_from_not_ok_status(e, name)
8179 except _core._FallbackException:
8180 pass
8181 try:
8182 return register_dataset_eager_fallback(
8183 dataset, address, protocol,
8184 external_state_policy=external_state_policy,
8185 element_spec=element_spec, metadata=metadata, name=name, ctx=_ctx)
8186 except _core._SymbolicException:
8187 pass # Add nodes to the TensorFlow graph.
8188 # Add nodes to the TensorFlow graph.
8189 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy")
8190 if element_spec is None:
8191 element_spec = ""
8192 element_spec = _execute.make_str(element_spec, "element_spec")
8193 if metadata is None:
8194 metadata = ""
8195 metadata = _execute.make_str(metadata, "metadata")
8196 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8197 "RegisterDataset", dataset=dataset, address=address,
8198 protocol=protocol,
8199 external_state_policy=external_state_policy,
8200 element_spec=element_spec, metadata=metadata,
8201 name=name)
8202 _result = _outputs[:]
8203 if _execute.must_record_gradient():
8204 _attrs = ("external_state_policy",
8205 _op._get_attr_int("external_state_policy"), "element_spec",
8206 _op.get_attr("element_spec"), "metadata",
8207 _op.get_attr("metadata"))
8208 _inputs_flat = _op.inputs
8209 _execute.record_gradient(
8210 "RegisterDataset", _inputs_flat, _attrs, _result)
8211 _result, = _result
8212 return _result
8214RegisterDataset = tf_export("raw_ops.RegisterDataset")(_ops.to_raw_op(register_dataset))
8217def register_dataset_eager_fallback(dataset, address, protocol, external_state_policy, element_spec, metadata, name, ctx):
8218 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy")
8219 if element_spec is None:
8220 element_spec = ""
8221 element_spec = _execute.make_str(element_spec, "element_spec")
8222 if metadata is None:
8223 metadata = ""
8224 metadata = _execute.make_str(metadata, "metadata")
8225 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
8226 address = _ops.convert_to_tensor(address, _dtypes.string)
8227 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
8228 _inputs_flat = [dataset, address, protocol]
8229 _attrs = ("external_state_policy", external_state_policy, "element_spec",
8230 element_spec, "metadata", metadata)
8231 _result = _execute.execute(b"RegisterDataset", 1, inputs=_inputs_flat,
8232 attrs=_attrs, ctx=ctx, name=name)
8233 if _execute.must_record_gradient():
8234 _execute.record_gradient(
8235 "RegisterDataset", _inputs_flat, _attrs, _result)
8236 _result, = _result
8237 return _result
8240def register_dataset_v2(dataset, address, protocol, external_state_policy, element_spec="", requested_dataset_id="", metadata="", name=None):
8241 r"""Registers a dataset with the tf.data service.
8243 Args:
8244 dataset: A `Tensor` of type `variant`.
8245 address: A `Tensor` of type `string`.
8246 protocol: A `Tensor` of type `string`.
8247 external_state_policy: An `int`.
8248 element_spec: An optional `string`. Defaults to `""`.
8249 requested_dataset_id: An optional `string`. Defaults to `""`.
8250 metadata: An optional `string`. Defaults to `""`.
8251 name: A name for the operation (optional).
8253 Returns:
8254 A `Tensor` of type `string`.
8255 """
8256 _ctx = _context._context or _context.context()
8257 tld = _ctx._thread_local_data
8258 if tld.is_eager:
8259 try:
8260 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8261 _ctx, "RegisterDatasetV2", name, dataset, address, protocol,
8262 "external_state_policy", external_state_policy, "element_spec",
8263 element_spec, "requested_dataset_id", requested_dataset_id,
8264 "metadata", metadata)
8265 return _result
8266 except _core._NotOkStatusException as e:
8267 _ops.raise_from_not_ok_status(e, name)
8268 except _core._FallbackException:
8269 pass
8270 try:
8271 return register_dataset_v2_eager_fallback(
8272 dataset, address, protocol,
8273 external_state_policy=external_state_policy,
8274 element_spec=element_spec,
8275 requested_dataset_id=requested_dataset_id, metadata=metadata,
8276 name=name, ctx=_ctx)
8277 except _core._SymbolicException:
8278 pass # Add nodes to the TensorFlow graph.
8279 # Add nodes to the TensorFlow graph.
8280 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy")
8281 if element_spec is None:
8282 element_spec = ""
8283 element_spec = _execute.make_str(element_spec, "element_spec")
8284 if requested_dataset_id is None:
8285 requested_dataset_id = ""
8286 requested_dataset_id = _execute.make_str(requested_dataset_id, "requested_dataset_id")
8287 if metadata is None:
8288 metadata = ""
8289 metadata = _execute.make_str(metadata, "metadata")
8290 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8291 "RegisterDatasetV2", dataset=dataset, address=address,
8292 protocol=protocol,
8293 external_state_policy=external_state_policy,
8294 element_spec=element_spec,
8295 requested_dataset_id=requested_dataset_id,
8296 metadata=metadata, name=name)
8297 _result = _outputs[:]
8298 if _execute.must_record_gradient():
8299 _attrs = ("external_state_policy",
8300 _op._get_attr_int("external_state_policy"), "element_spec",
8301 _op.get_attr("element_spec"), "requested_dataset_id",
8302 _op.get_attr("requested_dataset_id"), "metadata",
8303 _op.get_attr("metadata"))
8304 _inputs_flat = _op.inputs
8305 _execute.record_gradient(
8306 "RegisterDatasetV2", _inputs_flat, _attrs, _result)
8307 _result, = _result
8308 return _result
8310RegisterDatasetV2 = tf_export("raw_ops.RegisterDatasetV2")(_ops.to_raw_op(register_dataset_v2))
8313def register_dataset_v2_eager_fallback(dataset, address, protocol, external_state_policy, element_spec, requested_dataset_id, metadata, name, ctx):
8314 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy")
8315 if element_spec is None:
8316 element_spec = ""
8317 element_spec = _execute.make_str(element_spec, "element_spec")
8318 if requested_dataset_id is None:
8319 requested_dataset_id = ""
8320 requested_dataset_id = _execute.make_str(requested_dataset_id, "requested_dataset_id")
8321 if metadata is None:
8322 metadata = ""
8323 metadata = _execute.make_str(metadata, "metadata")
8324 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
8325 address = _ops.convert_to_tensor(address, _dtypes.string)
8326 protocol = _ops.convert_to_tensor(protocol, _dtypes.string)
8327 _inputs_flat = [dataset, address, protocol]
8328 _attrs = ("external_state_policy", external_state_policy, "element_spec",
8329 element_spec, "requested_dataset_id", requested_dataset_id, "metadata",
8330 metadata)
8331 _result = _execute.execute(b"RegisterDatasetV2", 1, inputs=_inputs_flat,
8332 attrs=_attrs, ctx=ctx, name=name)
8333 if _execute.must_record_gradient():
8334 _execute.record_gradient(
8335 "RegisterDatasetV2", _inputs_flat, _attrs, _result)
8336 _result, = _result
8337 return _result
8340def sampling_dataset(input_dataset, rate, seed, seed2, output_types, output_shapes, name=None):
8341 r"""Creates a dataset that takes a Bernoulli sample of the contents of another dataset.
8343 There is no transformation in the `tf.data` Python API for creating this dataset.
8344 Instead, it is created as a result of the `filter_with_random_uniform_fusion`
8345 static optimization. Whether this optimization is performed is determined by the
8346 `experimental_optimization.filter_with_random_uniform_fusion` option of
8347 `tf.data.Options`.
8349 Args:
8350 input_dataset: A `Tensor` of type `variant`.
8351 rate: A `Tensor` of type `float32`.
8352 A scalar representing the sample rate. Each element of `input_dataset` is
8353 retained with this probability, independent of all other elements.
8354 seed: A `Tensor` of type `int64`.
8355 A scalar representing seed of random number generator.
8356 seed2: A `Tensor` of type `int64`.
8357 A scalar representing seed2 of random number generator.
8358 output_types: A list of `tf.DTypes` that has length `>= 1`.
8359 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8360 name: A name for the operation (optional).
8362 Returns:
8363 A `Tensor` of type `variant`.
8364 """
8365 _ctx = _context._context or _context.context()
8366 tld = _ctx._thread_local_data
8367 if tld.is_eager:
8368 try:
8369 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8370 _ctx, "SamplingDataset", name, input_dataset, rate, seed, seed2,
8371 "output_types", output_types, "output_shapes", output_shapes)
8372 return _result
8373 except _core._NotOkStatusException as e:
8374 _ops.raise_from_not_ok_status(e, name)
8375 except _core._FallbackException:
8376 pass
8377 try:
8378 return sampling_dataset_eager_fallback(
8379 input_dataset, rate, seed, seed2, output_types=output_types,
8380 output_shapes=output_shapes, name=name, ctx=_ctx)
8381 except _core._SymbolicException:
8382 pass # Add nodes to the TensorFlow graph.
8383 # Add nodes to the TensorFlow graph.
8384 if not isinstance(output_types, (list, tuple)):
8385 raise TypeError(
8386 "Expected list for 'output_types' argument to "
8387 "'sampling_dataset' Op, not %r." % output_types)
8388 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8389 if not isinstance(output_shapes, (list, tuple)):
8390 raise TypeError(
8391 "Expected list for 'output_shapes' argument to "
8392 "'sampling_dataset' Op, not %r." % output_shapes)
8393 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8394 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8395 "SamplingDataset", input_dataset=input_dataset, rate=rate, seed=seed,
8396 seed2=seed2, output_types=output_types,
8397 output_shapes=output_shapes, name=name)
8398 _result = _outputs[:]
8399 if _execute.must_record_gradient():
8400 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
8401 _op.get_attr("output_shapes"))
8402 _inputs_flat = _op.inputs
8403 _execute.record_gradient(
8404 "SamplingDataset", _inputs_flat, _attrs, _result)
8405 _result, = _result
8406 return _result
8408SamplingDataset = tf_export("raw_ops.SamplingDataset")(_ops.to_raw_op(sampling_dataset))
8411def sampling_dataset_eager_fallback(input_dataset, rate, seed, seed2, output_types, output_shapes, name, ctx):
8412 if not isinstance(output_types, (list, tuple)):
8413 raise TypeError(
8414 "Expected list for 'output_types' argument to "
8415 "'sampling_dataset' Op, not %r." % output_types)
8416 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8417 if not isinstance(output_shapes, (list, tuple)):
8418 raise TypeError(
8419 "Expected list for 'output_shapes' argument to "
8420 "'sampling_dataset' Op, not %r." % output_shapes)
8421 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8422 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8423 rate = _ops.convert_to_tensor(rate, _dtypes.float32)
8424 seed = _ops.convert_to_tensor(seed, _dtypes.int64)
8425 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
8426 _inputs_flat = [input_dataset, rate, seed, seed2]
8427 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
8428 _result = _execute.execute(b"SamplingDataset", 1, inputs=_inputs_flat,
8429 attrs=_attrs, ctx=ctx, name=name)
8430 if _execute.must_record_gradient():
8431 _execute.record_gradient(
8432 "SamplingDataset", _inputs_flat, _attrs, _result)
8433 _result, = _result
8434 return _result
8437def save_dataset(input_dataset, path, shard_func_other_args, shard_func, compression="", use_shard_func=True, name=None):
8438 r"""TODO: add doc.
8440 Args:
8441 input_dataset: A `Tensor` of type `variant`.
8442 path: A `Tensor` of type `string`.
8443 shard_func_other_args: A list of `Tensor` objects.
8444 shard_func: A function decorated with @Defun.
8445 compression: An optional `string`. Defaults to `""`.
8446 use_shard_func: An optional `bool`. Defaults to `True`.
8447 name: A name for the operation (optional).
8449 Returns:
8450 The created Operation.
8451 """
8452 _ctx = _context._context or _context.context()
8453 tld = _ctx._thread_local_data
8454 if tld.is_eager:
8455 try:
8456 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8457 _ctx, "SaveDataset", name, input_dataset, path, shard_func_other_args,
8458 "compression", compression, "shard_func", shard_func,
8459 "use_shard_func", use_shard_func)
8460 return _result
8461 except _core._NotOkStatusException as e:
8462 _ops.raise_from_not_ok_status(e, name)
8463 except _core._FallbackException:
8464 pass
8465 try:
8466 return save_dataset_eager_fallback(
8467 input_dataset, path, shard_func_other_args, compression=compression,
8468 shard_func=shard_func, use_shard_func=use_shard_func, name=name,
8469 ctx=_ctx)
8470 except _core._SymbolicException:
8471 pass # Add nodes to the TensorFlow graph.
8472 # Add nodes to the TensorFlow graph.
8473 if compression is None:
8474 compression = ""
8475 compression = _execute.make_str(compression, "compression")
8476 if use_shard_func is None:
8477 use_shard_func = True
8478 use_shard_func = _execute.make_bool(use_shard_func, "use_shard_func")
8479 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8480 "SaveDataset", input_dataset=input_dataset, path=path,
8481 shard_func_other_args=shard_func_other_args,
8482 shard_func=shard_func, compression=compression,
8483 use_shard_func=use_shard_func, name=name)
8484 return _op
8485SaveDataset = tf_export("raw_ops.SaveDataset")(_ops.to_raw_op(save_dataset))
8488def save_dataset_eager_fallback(input_dataset, path, shard_func_other_args, shard_func, compression, use_shard_func, name, ctx):
8489 if compression is None:
8490 compression = ""
8491 compression = _execute.make_str(compression, "compression")
8492 if use_shard_func is None:
8493 use_shard_func = True
8494 use_shard_func = _execute.make_bool(use_shard_func, "use_shard_func")
8495 _attr_Tshard_func_args, shard_func_other_args = _execute.convert_to_mixed_eager_tensors(shard_func_other_args, ctx)
8496 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8497 path = _ops.convert_to_tensor(path, _dtypes.string)
8498 _inputs_flat = [input_dataset, path] + list(shard_func_other_args)
8499 _attrs = ("compression", compression, "shard_func", shard_func,
8500 "use_shard_func", use_shard_func, "Tshard_func_args",
8501 _attr_Tshard_func_args)
8502 _result = _execute.execute(b"SaveDataset", 0, inputs=_inputs_flat,
8503 attrs=_attrs, ctx=ctx, name=name)
8504 _result = None
8505 return _result
8508def save_dataset_v2(input_dataset, path, shard_func_other_args, shard_func, output_types, output_shapes, compression="", use_shard_func=True, name=None):
8509 r"""TODO: add doc.
8511 Args:
8512 input_dataset: A `Tensor` of type `variant`.
8513 path: A `Tensor` of type `string`.
8514 shard_func_other_args: A list of `Tensor` objects.
8515 shard_func: A function decorated with @Defun.
8516 output_types: A list of `tf.DTypes` that has length `>= 1`.
8517 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8518 compression: An optional `string`. Defaults to `""`.
8519 use_shard_func: An optional `bool`. Defaults to `True`.
8520 name: A name for the operation (optional).
8522 Returns:
8523 A `Tensor` of type `variant`.
8524 """
8525 _ctx = _context._context or _context.context()
8526 tld = _ctx._thread_local_data
8527 if tld.is_eager:
8528 try:
8529 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8530 _ctx, "SaveDatasetV2", name, input_dataset, path,
8531 shard_func_other_args, "compression", compression, "shard_func",
8532 shard_func, "use_shard_func", use_shard_func, "output_types",
8533 output_types, "output_shapes", output_shapes)
8534 return _result
8535 except _core._NotOkStatusException as e:
8536 _ops.raise_from_not_ok_status(e, name)
8537 except _core._FallbackException:
8538 pass
8539 try:
8540 return save_dataset_v2_eager_fallback(
8541 input_dataset, path, shard_func_other_args, compression=compression,
8542 shard_func=shard_func, use_shard_func=use_shard_func,
8543 output_types=output_types, output_shapes=output_shapes, name=name,
8544 ctx=_ctx)
8545 except _core._SymbolicException:
8546 pass # Add nodes to the TensorFlow graph.
8547 # Add nodes to the TensorFlow graph.
8548 if not isinstance(output_types, (list, tuple)):
8549 raise TypeError(
8550 "Expected list for 'output_types' argument to "
8551 "'save_dataset_v2' Op, not %r." % output_types)
8552 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8553 if not isinstance(output_shapes, (list, tuple)):
8554 raise TypeError(
8555 "Expected list for 'output_shapes' argument to "
8556 "'save_dataset_v2' Op, not %r." % output_shapes)
8557 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8558 if compression is None:
8559 compression = ""
8560 compression = _execute.make_str(compression, "compression")
8561 if use_shard_func is None:
8562 use_shard_func = True
8563 use_shard_func = _execute.make_bool(use_shard_func, "use_shard_func")
8564 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8565 "SaveDatasetV2", input_dataset=input_dataset, path=path,
8566 shard_func_other_args=shard_func_other_args,
8567 shard_func=shard_func, output_types=output_types,
8568 output_shapes=output_shapes, compression=compression,
8569 use_shard_func=use_shard_func, name=name)
8570 _result = _outputs[:]
8571 if _execute.must_record_gradient():
8572 _attrs = ("compression", _op.get_attr("compression"), "shard_func",
8573 _op.get_attr("shard_func"), "use_shard_func",
8574 _op._get_attr_bool("use_shard_func"), "Tshard_func_args",
8575 _op.get_attr("Tshard_func_args"), "output_types",
8576 _op.get_attr("output_types"), "output_shapes",
8577 _op.get_attr("output_shapes"))
8578 _inputs_flat = _op.inputs
8579 _execute.record_gradient(
8580 "SaveDatasetV2", _inputs_flat, _attrs, _result)
8581 _result, = _result
8582 return _result
8584SaveDatasetV2 = tf_export("raw_ops.SaveDatasetV2")(_ops.to_raw_op(save_dataset_v2))
8587def save_dataset_v2_eager_fallback(input_dataset, path, shard_func_other_args, shard_func, output_types, output_shapes, compression, use_shard_func, name, ctx):
8588 if not isinstance(output_types, (list, tuple)):
8589 raise TypeError(
8590 "Expected list for 'output_types' argument to "
8591 "'save_dataset_v2' Op, not %r." % output_types)
8592 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8593 if not isinstance(output_shapes, (list, tuple)):
8594 raise TypeError(
8595 "Expected list for 'output_shapes' argument to "
8596 "'save_dataset_v2' Op, not %r." % output_shapes)
8597 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8598 if compression is None:
8599 compression = ""
8600 compression = _execute.make_str(compression, "compression")
8601 if use_shard_func is None:
8602 use_shard_func = True
8603 use_shard_func = _execute.make_bool(use_shard_func, "use_shard_func")
8604 _attr_Tshard_func_args, shard_func_other_args = _execute.convert_to_mixed_eager_tensors(shard_func_other_args, ctx)
8605 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8606 path = _ops.convert_to_tensor(path, _dtypes.string)
8607 _inputs_flat = [input_dataset, path] + list(shard_func_other_args)
8608 _attrs = ("compression", compression, "shard_func", shard_func,
8609 "use_shard_func", use_shard_func, "Tshard_func_args",
8610 _attr_Tshard_func_args, "output_types", output_types, "output_shapes",
8611 output_shapes)
8612 _result = _execute.execute(b"SaveDatasetV2", 1, inputs=_inputs_flat,
8613 attrs=_attrs, ctx=ctx, name=name)
8614 if _execute.must_record_gradient():
8615 _execute.record_gradient(
8616 "SaveDatasetV2", _inputs_flat, _attrs, _result)
8617 _result, = _result
8618 return _result
8621def scan_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality=False, use_default_device=True, metadata="", name=None):
8622 r"""Creates a dataset successively reduces `f` over the elements of `input_dataset`.
8624 Args:
8625 input_dataset: A `Tensor` of type `variant`.
8626 initial_state: A list of `Tensor` objects.
8627 other_arguments: A list of `Tensor` objects.
8628 f: A function decorated with @Defun.
8629 output_types: A list of `tf.DTypes` that has length `>= 1`.
8630 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8631 preserve_cardinality: An optional `bool`. Defaults to `False`.
8632 use_default_device: An optional `bool`. Defaults to `True`.
8633 metadata: An optional `string`. Defaults to `""`.
8634 name: A name for the operation (optional).
8636 Returns:
8637 A `Tensor` of type `variant`.
8638 """
8639 _ctx = _context._context or _context.context()
8640 tld = _ctx._thread_local_data
8641 if tld.is_eager:
8642 try:
8643 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8644 _ctx, "ScanDataset", name, input_dataset, initial_state,
8645 other_arguments, "f", f, "output_types", output_types,
8646 "output_shapes", output_shapes, "preserve_cardinality",
8647 preserve_cardinality, "use_default_device", use_default_device,
8648 "metadata", metadata)
8649 return _result
8650 except _core._NotOkStatusException as e:
8651 _ops.raise_from_not_ok_status(e, name)
8652 except _core._FallbackException:
8653 pass
8654 try:
8655 return scan_dataset_eager_fallback(
8656 input_dataset, initial_state, other_arguments, f=f,
8657 output_types=output_types, output_shapes=output_shapes,
8658 preserve_cardinality=preserve_cardinality,
8659 use_default_device=use_default_device, metadata=metadata, name=name,
8660 ctx=_ctx)
8661 except _core._SymbolicException:
8662 pass # Add nodes to the TensorFlow graph.
8663 # Add nodes to the TensorFlow graph.
8664 if not isinstance(output_types, (list, tuple)):
8665 raise TypeError(
8666 "Expected list for 'output_types' argument to "
8667 "'scan_dataset' Op, not %r." % output_types)
8668 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8669 if not isinstance(output_shapes, (list, tuple)):
8670 raise TypeError(
8671 "Expected list for 'output_shapes' argument to "
8672 "'scan_dataset' Op, not %r." % output_shapes)
8673 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8674 if preserve_cardinality is None:
8675 preserve_cardinality = False
8676 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
8677 if use_default_device is None:
8678 use_default_device = True
8679 use_default_device = _execute.make_bool(use_default_device, "use_default_device")
8680 if metadata is None:
8681 metadata = ""
8682 metadata = _execute.make_str(metadata, "metadata")
8683 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8684 "ScanDataset", input_dataset=input_dataset,
8685 initial_state=initial_state,
8686 other_arguments=other_arguments, f=f,
8687 output_types=output_types, output_shapes=output_shapes,
8688 preserve_cardinality=preserve_cardinality,
8689 use_default_device=use_default_device,
8690 metadata=metadata, name=name)
8691 _result = _outputs[:]
8692 if _execute.must_record_gradient():
8693 _attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"),
8694 "Targuments", _op.get_attr("Targuments"), "output_types",
8695 _op.get_attr("output_types"), "output_shapes",
8696 _op.get_attr("output_shapes"), "preserve_cardinality",
8697 _op._get_attr_bool("preserve_cardinality"),
8698 "use_default_device", _op._get_attr_bool("use_default_device"),
8699 "metadata", _op.get_attr("metadata"))
8700 _inputs_flat = _op.inputs
8701 _execute.record_gradient(
8702 "ScanDataset", _inputs_flat, _attrs, _result)
8703 _result, = _result
8704 return _result
8706ScanDataset = tf_export("raw_ops.ScanDataset")(_ops.to_raw_op(scan_dataset))
8709def scan_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality, use_default_device, metadata, name, ctx):
8710 if not isinstance(output_types, (list, tuple)):
8711 raise TypeError(
8712 "Expected list for 'output_types' argument to "
8713 "'scan_dataset' Op, not %r." % output_types)
8714 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8715 if not isinstance(output_shapes, (list, tuple)):
8716 raise TypeError(
8717 "Expected list for 'output_shapes' argument to "
8718 "'scan_dataset' Op, not %r." % output_shapes)
8719 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8720 if preserve_cardinality is None:
8721 preserve_cardinality = False
8722 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
8723 if use_default_device is None:
8724 use_default_device = True
8725 use_default_device = _execute.make_bool(use_default_device, "use_default_device")
8726 if metadata is None:
8727 metadata = ""
8728 metadata = _execute.make_str(metadata, "metadata")
8729 _attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, ctx)
8730 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
8731 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8732 _inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments)
8733 _attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments,
8734 "output_types", output_types, "output_shapes", output_shapes,
8735 "preserve_cardinality", preserve_cardinality, "use_default_device",
8736 use_default_device, "metadata", metadata)
8737 _result = _execute.execute(b"ScanDataset", 1, inputs=_inputs_flat,
8738 attrs=_attrs, ctx=ctx, name=name)
8739 if _execute.must_record_gradient():
8740 _execute.record_gradient(
8741 "ScanDataset", _inputs_flat, _attrs, _result)
8742 _result, = _result
8743 return _result
8746def set_stats_aggregator_dataset(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name=None):
8747 r"""TODO: add doc.
8749 Args:
8750 input_dataset: A `Tensor` of type `variant`.
8751 stats_aggregator: A `Tensor` of type `resource`.
8752 tag: A `Tensor` of type `string`.
8753 counter_prefix: A `Tensor` of type `string`.
8754 output_types: A list of `tf.DTypes` that has length `>= 1`.
8755 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8756 name: A name for the operation (optional).
8758 Returns:
8759 A `Tensor` of type `variant`.
8760 """
8761 _ctx = _context._context or _context.context()
8762 tld = _ctx._thread_local_data
8763 if tld.is_eager:
8764 try:
8765 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8766 _ctx, "SetStatsAggregatorDataset", name, input_dataset,
8767 stats_aggregator, tag, counter_prefix, "output_types", output_types,
8768 "output_shapes", output_shapes)
8769 return _result
8770 except _core._NotOkStatusException as e:
8771 _ops.raise_from_not_ok_status(e, name)
8772 except _core._FallbackException:
8773 pass
8774 try:
8775 return set_stats_aggregator_dataset_eager_fallback(
8776 input_dataset, stats_aggregator, tag, counter_prefix,
8777 output_types=output_types, output_shapes=output_shapes, name=name,
8778 ctx=_ctx)
8779 except _core._SymbolicException:
8780 pass # Add nodes to the TensorFlow graph.
8781 # Add nodes to the TensorFlow graph.
8782 if not isinstance(output_types, (list, tuple)):
8783 raise TypeError(
8784 "Expected list for 'output_types' argument to "
8785 "'set_stats_aggregator_dataset' Op, not %r." % output_types)
8786 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8787 if not isinstance(output_shapes, (list, tuple)):
8788 raise TypeError(
8789 "Expected list for 'output_shapes' argument to "
8790 "'set_stats_aggregator_dataset' Op, not %r." % output_shapes)
8791 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8792 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8793 "SetStatsAggregatorDataset", input_dataset=input_dataset,
8794 stats_aggregator=stats_aggregator,
8795 tag=tag, counter_prefix=counter_prefix,
8796 output_types=output_types,
8797 output_shapes=output_shapes, name=name)
8798 _result = _outputs[:]
8799 if _execute.must_record_gradient():
8800 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
8801 _op.get_attr("output_shapes"))
8802 _inputs_flat = _op.inputs
8803 _execute.record_gradient(
8804 "SetStatsAggregatorDataset", _inputs_flat, _attrs, _result)
8805 _result, = _result
8806 return _result
8808SetStatsAggregatorDataset = tf_export("raw_ops.SetStatsAggregatorDataset")(_ops.to_raw_op(set_stats_aggregator_dataset))
8811def set_stats_aggregator_dataset_eager_fallback(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name, ctx):
8812 if not isinstance(output_types, (list, tuple)):
8813 raise TypeError(
8814 "Expected list for 'output_types' argument to "
8815 "'set_stats_aggregator_dataset' Op, not %r." % output_types)
8816 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8817 if not isinstance(output_shapes, (list, tuple)):
8818 raise TypeError(
8819 "Expected list for 'output_shapes' argument to "
8820 "'set_stats_aggregator_dataset' Op, not %r." % output_shapes)
8821 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8822 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8823 stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
8824 tag = _ops.convert_to_tensor(tag, _dtypes.string)
8825 counter_prefix = _ops.convert_to_tensor(counter_prefix, _dtypes.string)
8826 _inputs_flat = [input_dataset, stats_aggregator, tag, counter_prefix]
8827 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
8828 _result = _execute.execute(b"SetStatsAggregatorDataset", 1,
8829 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
8830 name=name)
8831 if _execute.must_record_gradient():
8832 _execute.record_gradient(
8833 "SetStatsAggregatorDataset", _inputs_flat, _attrs, _result)
8834 _result, = _result
8835 return _result
8838def sleep_dataset(input_dataset, sleep_microseconds, output_types, output_shapes, name=None):
8839 r"""TODO: add doc.
8841 Args:
8842 input_dataset: A `Tensor` of type `variant`.
8843 sleep_microseconds: A `Tensor` of type `int64`.
8844 output_types: A list of `tf.DTypes` that has length `>= 1`.
8845 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8846 name: A name for the operation (optional).
8848 Returns:
8849 A `Tensor` of type `variant`.
8850 """
8851 _ctx = _context._context or _context.context()
8852 tld = _ctx._thread_local_data
8853 if tld.is_eager:
8854 try:
8855 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8856 _ctx, "SleepDataset", name, input_dataset, sleep_microseconds,
8857 "output_types", output_types, "output_shapes", output_shapes)
8858 return _result
8859 except _core._NotOkStatusException as e:
8860 _ops.raise_from_not_ok_status(e, name)
8861 except _core._FallbackException:
8862 pass
8863 try:
8864 return sleep_dataset_eager_fallback(
8865 input_dataset, sleep_microseconds, output_types=output_types,
8866 output_shapes=output_shapes, name=name, ctx=_ctx)
8867 except _core._SymbolicException:
8868 pass # Add nodes to the TensorFlow graph.
8869 # Add nodes to the TensorFlow graph.
8870 if not isinstance(output_types, (list, tuple)):
8871 raise TypeError(
8872 "Expected list for 'output_types' argument to "
8873 "'sleep_dataset' Op, not %r." % output_types)
8874 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8875 if not isinstance(output_shapes, (list, tuple)):
8876 raise TypeError(
8877 "Expected list for 'output_shapes' argument to "
8878 "'sleep_dataset' Op, not %r." % output_shapes)
8879 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8880 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8881 "SleepDataset", input_dataset=input_dataset,
8882 sleep_microseconds=sleep_microseconds,
8883 output_types=output_types,
8884 output_shapes=output_shapes, name=name)
8885 _result = _outputs[:]
8886 if _execute.must_record_gradient():
8887 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
8888 _op.get_attr("output_shapes"))
8889 _inputs_flat = _op.inputs
8890 _execute.record_gradient(
8891 "SleepDataset", _inputs_flat, _attrs, _result)
8892 _result, = _result
8893 return _result
8895SleepDataset = tf_export("raw_ops.SleepDataset")(_ops.to_raw_op(sleep_dataset))
8898def sleep_dataset_eager_fallback(input_dataset, sleep_microseconds, output_types, output_shapes, name, ctx):
8899 if not isinstance(output_types, (list, tuple)):
8900 raise TypeError(
8901 "Expected list for 'output_types' argument to "
8902 "'sleep_dataset' Op, not %r." % output_types)
8903 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8904 if not isinstance(output_shapes, (list, tuple)):
8905 raise TypeError(
8906 "Expected list for 'output_shapes' argument to "
8907 "'sleep_dataset' Op, not %r." % output_shapes)
8908 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8909 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
8910 sleep_microseconds = _ops.convert_to_tensor(sleep_microseconds, _dtypes.int64)
8911 _inputs_flat = [input_dataset, sleep_microseconds]
8912 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
8913 _result = _execute.execute(b"SleepDataset", 1, inputs=_inputs_flat,
8914 attrs=_attrs, ctx=ctx, name=name)
8915 if _execute.must_record_gradient():
8916 _execute.record_gradient(
8917 "SleepDataset", _inputs_flat, _attrs, _result)
8918 _result, = _result
8919 return _result
8922def sliding_window_dataset(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, drop_remainder=True, name=None):
8923 r"""Creates a dataset that passes a sliding window over `input_dataset`.
8925 Args:
8926 input_dataset: A `Tensor` of type `variant`.
8927 window_size: A `Tensor` of type `int64`.
8928 A scalar representing the number of elements in the
8929 sliding window.
8930 window_shift: A `Tensor` of type `int64`.
8931 A scalar representing the steps moving the sliding window
8932 forward in one iteration. It must be positive.
8933 window_stride: A `Tensor` of type `int64`.
8934 A scalar representing the stride of the input elements of the sliding window.
8935 It must be positive.
8936 output_types: A list of `tf.DTypes` that has length `>= 1`.
8937 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
8938 drop_remainder: An optional `bool`. Defaults to `True`.
8939 name: A name for the operation (optional).
8941 Returns:
8942 A `Tensor` of type `variant`.
8943 """
8944 _ctx = _context._context or _context.context()
8945 tld = _ctx._thread_local_data
8946 if tld.is_eager:
8947 try:
8948 _result = pywrap_tfe.TFE_Py_FastPathExecute(
8949 _ctx, "SlidingWindowDataset", name, input_dataset, window_size,
8950 window_shift, window_stride, "drop_remainder", drop_remainder,
8951 "output_types", output_types, "output_shapes", output_shapes)
8952 return _result
8953 except _core._NotOkStatusException as e:
8954 _ops.raise_from_not_ok_status(e, name)
8955 except _core._FallbackException:
8956 pass
8957 try:
8958 return sliding_window_dataset_eager_fallback(
8959 input_dataset, window_size, window_shift, window_stride,
8960 drop_remainder=drop_remainder, output_types=output_types,
8961 output_shapes=output_shapes, name=name, ctx=_ctx)
8962 except _core._SymbolicException:
8963 pass # Add nodes to the TensorFlow graph.
8964 # Add nodes to the TensorFlow graph.
8965 if not isinstance(output_types, (list, tuple)):
8966 raise TypeError(
8967 "Expected list for 'output_types' argument to "
8968 "'sliding_window_dataset' Op, not %r." % output_types)
8969 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
8970 if not isinstance(output_shapes, (list, tuple)):
8971 raise TypeError(
8972 "Expected list for 'output_shapes' argument to "
8973 "'sliding_window_dataset' Op, not %r." % output_shapes)
8974 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
8975 if drop_remainder is None:
8976 drop_remainder = True
8977 drop_remainder = _execute.make_bool(drop_remainder, "drop_remainder")
8978 _, _, _op, _outputs = _op_def_library._apply_op_helper(
8979 "SlidingWindowDataset", input_dataset=input_dataset,
8980 window_size=window_size,
8981 window_shift=window_shift,
8982 window_stride=window_stride,
8983 output_types=output_types,
8984 output_shapes=output_shapes,
8985 drop_remainder=drop_remainder, name=name)
8986 _result = _outputs[:]
8987 if _execute.must_record_gradient():
8988 _attrs = ("drop_remainder", _op._get_attr_bool("drop_remainder"),
8989 "output_types", _op.get_attr("output_types"), "output_shapes",
8990 _op.get_attr("output_shapes"))
8991 _inputs_flat = _op.inputs
8992 _execute.record_gradient(
8993 "SlidingWindowDataset", _inputs_flat, _attrs, _result)
8994 _result, = _result
8995 return _result
8997SlidingWindowDataset = tf_export("raw_ops.SlidingWindowDataset")(_ops.to_raw_op(sliding_window_dataset))
9000def sliding_window_dataset_eager_fallback(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, drop_remainder, name, ctx):
9001 if not isinstance(output_types, (list, tuple)):
9002 raise TypeError(
9003 "Expected list for 'output_types' argument to "
9004 "'sliding_window_dataset' Op, not %r." % output_types)
9005 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9006 if not isinstance(output_shapes, (list, tuple)):
9007 raise TypeError(
9008 "Expected list for 'output_shapes' argument to "
9009 "'sliding_window_dataset' Op, not %r." % output_shapes)
9010 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9011 if drop_remainder is None:
9012 drop_remainder = True
9013 drop_remainder = _execute.make_bool(drop_remainder, "drop_remainder")
9014 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
9015 window_size = _ops.convert_to_tensor(window_size, _dtypes.int64)
9016 window_shift = _ops.convert_to_tensor(window_shift, _dtypes.int64)
9017 window_stride = _ops.convert_to_tensor(window_stride, _dtypes.int64)
9018 _inputs_flat = [input_dataset, window_size, window_shift, window_stride]
9019 _attrs = ("drop_remainder", drop_remainder, "output_types", output_types,
9020 "output_shapes", output_shapes)
9021 _result = _execute.execute(b"SlidingWindowDataset", 1, inputs=_inputs_flat,
9022 attrs=_attrs, ctx=ctx, name=name)
9023 if _execute.must_record_gradient():
9024 _execute.record_gradient(
9025 "SlidingWindowDataset", _inputs_flat, _attrs, _result)
9026 _result, = _result
9027 return _result
9030def snapshot_chunk_dataset(chunk_file, output_types, output_shapes, compression="", name=None):
9031 r"""TODO: add doc.
9033 Args:
9034 chunk_file: A `Tensor` of type `string`.
9035 output_types: A list of `tf.DTypes` that has length `>= 1`.
9036 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9037 compression: An optional `string`. Defaults to `""`.
9038 name: A name for the operation (optional).
9040 Returns:
9041 A `Tensor` of type `variant`.
9042 """
9043 _ctx = _context._context or _context.context()
9044 tld = _ctx._thread_local_data
9045 if tld.is_eager:
9046 try:
9047 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9048 _ctx, "SnapshotChunkDataset", name, chunk_file, "output_types",
9049 output_types, "output_shapes", output_shapes, "compression",
9050 compression)
9051 return _result
9052 except _core._NotOkStatusException as e:
9053 _ops.raise_from_not_ok_status(e, name)
9054 except _core._FallbackException:
9055 pass
9056 try:
9057 return snapshot_chunk_dataset_eager_fallback(
9058 chunk_file, output_types=output_types, output_shapes=output_shapes,
9059 compression=compression, name=name, ctx=_ctx)
9060 except _core._SymbolicException:
9061 pass # Add nodes to the TensorFlow graph.
9062 # Add nodes to the TensorFlow graph.
9063 if not isinstance(output_types, (list, tuple)):
9064 raise TypeError(
9065 "Expected list for 'output_types' argument to "
9066 "'snapshot_chunk_dataset' Op, not %r." % output_types)
9067 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9068 if not isinstance(output_shapes, (list, tuple)):
9069 raise TypeError(
9070 "Expected list for 'output_shapes' argument to "
9071 "'snapshot_chunk_dataset' Op, not %r." % output_shapes)
9072 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9073 if compression is None:
9074 compression = ""
9075 compression = _execute.make_str(compression, "compression")
9076 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9077 "SnapshotChunkDataset", chunk_file=chunk_file,
9078 output_types=output_types,
9079 output_shapes=output_shapes,
9080 compression=compression, name=name)
9081 _result = _outputs[:]
9082 if _execute.must_record_gradient():
9083 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9084 _op.get_attr("output_shapes"), "compression",
9085 _op.get_attr("compression"))
9086 _inputs_flat = _op.inputs
9087 _execute.record_gradient(
9088 "SnapshotChunkDataset", _inputs_flat, _attrs, _result)
9089 _result, = _result
9090 return _result
9092SnapshotChunkDataset = tf_export("raw_ops.SnapshotChunkDataset")(_ops.to_raw_op(snapshot_chunk_dataset))
9095def snapshot_chunk_dataset_eager_fallback(chunk_file, output_types, output_shapes, compression, name, ctx):
9096 if not isinstance(output_types, (list, tuple)):
9097 raise TypeError(
9098 "Expected list for 'output_types' argument to "
9099 "'snapshot_chunk_dataset' Op, not %r." % output_types)
9100 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9101 if not isinstance(output_shapes, (list, tuple)):
9102 raise TypeError(
9103 "Expected list for 'output_shapes' argument to "
9104 "'snapshot_chunk_dataset' Op, not %r." % output_shapes)
9105 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9106 if compression is None:
9107 compression = ""
9108 compression = _execute.make_str(compression, "compression")
9109 chunk_file = _ops.convert_to_tensor(chunk_file, _dtypes.string)
9110 _inputs_flat = [chunk_file]
9111 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
9112 "compression", compression)
9113 _result = _execute.execute(b"SnapshotChunkDataset", 1, inputs=_inputs_flat,
9114 attrs=_attrs, ctx=ctx, name=name)
9115 if _execute.must_record_gradient():
9116 _execute.record_gradient(
9117 "SnapshotChunkDataset", _inputs_flat, _attrs, _result)
9118 _result, = _result
9119 return _result
9122def snapshot_dataset(input_dataset, path, output_types, output_shapes, compression="", reader_path_prefix="", writer_path_prefix="", shard_size_bytes=10737418240, pending_snapshot_expiry_seconds=86400, num_reader_threads=1, reader_buffer_size=1, num_writer_threads=1, writer_buffer_size=1, shuffle_on_read=False, seed=0, seed2=0, mode="auto", snapshot_name="", name=None):
9123 r"""Creates a dataset that will write to / read from a snapshot.
9125 This dataset attempts to determine whether a valid snapshot exists at the
9126 `snapshot_path`, and reads from the snapshot in lieu of using `input_dataset`.
9127 If not, it will run the preprocessing pipeline as usual, and write out a
9128 snapshot of the data processed for future use.
9130 Args:
9131 input_dataset: A `Tensor` of type `variant`.
9132 A variant tensor representing the input dataset.
9133 path: A `Tensor` of type `string`.
9134 The path we should write snapshots to / read snapshots from.
9135 output_types: A list of `tf.DTypes` that has length `>= 1`.
9136 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9137 compression: An optional `string`. Defaults to `""`.
9138 reader_path_prefix: An optional `string`. Defaults to `""`.
9139 writer_path_prefix: An optional `string`. Defaults to `""`.
9140 shard_size_bytes: An optional `int`. Defaults to `10737418240`.
9141 pending_snapshot_expiry_seconds: An optional `int`. Defaults to `86400`.
9142 num_reader_threads: An optional `int`. Defaults to `1`.
9143 reader_buffer_size: An optional `int`. Defaults to `1`.
9144 num_writer_threads: An optional `int`. Defaults to `1`.
9145 writer_buffer_size: An optional `int`. Defaults to `1`.
9146 shuffle_on_read: An optional `bool`. Defaults to `False`.
9147 seed: An optional `int`. Defaults to `0`.
9148 seed2: An optional `int`. Defaults to `0`.
9149 mode: An optional `string`. Defaults to `"auto"`.
9150 snapshot_name: An optional `string`. Defaults to `""`.
9151 name: A name for the operation (optional).
9153 Returns:
9154 A `Tensor` of type `variant`.
9155 """
9156 _ctx = _context._context or _context.context()
9157 tld = _ctx._thread_local_data
9158 if tld.is_eager:
9159 try:
9160 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9161 _ctx, "SnapshotDataset", name, input_dataset, path, "output_types",
9162 output_types, "output_shapes", output_shapes, "compression",
9163 compression, "reader_path_prefix", reader_path_prefix,
9164 "writer_path_prefix", writer_path_prefix, "shard_size_bytes",
9165 shard_size_bytes, "pending_snapshot_expiry_seconds",
9166 pending_snapshot_expiry_seconds, "num_reader_threads",
9167 num_reader_threads, "reader_buffer_size", reader_buffer_size,
9168 "num_writer_threads", num_writer_threads, "writer_buffer_size",
9169 writer_buffer_size, "shuffle_on_read", shuffle_on_read, "seed", seed,
9170 "seed2", seed2, "mode", mode, "snapshot_name", snapshot_name)
9171 return _result
9172 except _core._NotOkStatusException as e:
9173 _ops.raise_from_not_ok_status(e, name)
9174 except _core._FallbackException:
9175 pass
9176 try:
9177 return snapshot_dataset_eager_fallback(
9178 input_dataset, path, output_types=output_types,
9179 output_shapes=output_shapes, compression=compression,
9180 reader_path_prefix=reader_path_prefix,
9181 writer_path_prefix=writer_path_prefix,
9182 shard_size_bytes=shard_size_bytes,
9183 pending_snapshot_expiry_seconds=pending_snapshot_expiry_seconds,
9184 num_reader_threads=num_reader_threads,
9185 reader_buffer_size=reader_buffer_size,
9186 num_writer_threads=num_writer_threads,
9187 writer_buffer_size=writer_buffer_size,
9188 shuffle_on_read=shuffle_on_read, seed=seed, seed2=seed2, mode=mode,
9189 snapshot_name=snapshot_name, name=name, ctx=_ctx)
9190 except _core._SymbolicException:
9191 pass # Add nodes to the TensorFlow graph.
9192 # Add nodes to the TensorFlow graph.
9193 if not isinstance(output_types, (list, tuple)):
9194 raise TypeError(
9195 "Expected list for 'output_types' argument to "
9196 "'snapshot_dataset' Op, not %r." % output_types)
9197 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9198 if not isinstance(output_shapes, (list, tuple)):
9199 raise TypeError(
9200 "Expected list for 'output_shapes' argument to "
9201 "'snapshot_dataset' Op, not %r." % output_shapes)
9202 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9203 if compression is None:
9204 compression = ""
9205 compression = _execute.make_str(compression, "compression")
9206 if reader_path_prefix is None:
9207 reader_path_prefix = ""
9208 reader_path_prefix = _execute.make_str(reader_path_prefix, "reader_path_prefix")
9209 if writer_path_prefix is None:
9210 writer_path_prefix = ""
9211 writer_path_prefix = _execute.make_str(writer_path_prefix, "writer_path_prefix")
9212 if shard_size_bytes is None:
9213 shard_size_bytes = 10737418240
9214 shard_size_bytes = _execute.make_int(shard_size_bytes, "shard_size_bytes")
9215 if pending_snapshot_expiry_seconds is None:
9216 pending_snapshot_expiry_seconds = 86400
9217 pending_snapshot_expiry_seconds = _execute.make_int(pending_snapshot_expiry_seconds, "pending_snapshot_expiry_seconds")
9218 if num_reader_threads is None:
9219 num_reader_threads = 1
9220 num_reader_threads = _execute.make_int(num_reader_threads, "num_reader_threads")
9221 if reader_buffer_size is None:
9222 reader_buffer_size = 1
9223 reader_buffer_size = _execute.make_int(reader_buffer_size, "reader_buffer_size")
9224 if num_writer_threads is None:
9225 num_writer_threads = 1
9226 num_writer_threads = _execute.make_int(num_writer_threads, "num_writer_threads")
9227 if writer_buffer_size is None:
9228 writer_buffer_size = 1
9229 writer_buffer_size = _execute.make_int(writer_buffer_size, "writer_buffer_size")
9230 if shuffle_on_read is None:
9231 shuffle_on_read = False
9232 shuffle_on_read = _execute.make_bool(shuffle_on_read, "shuffle_on_read")
9233 if seed is None:
9234 seed = 0
9235 seed = _execute.make_int(seed, "seed")
9236 if seed2 is None:
9237 seed2 = 0
9238 seed2 = _execute.make_int(seed2, "seed2")
9239 if mode is None:
9240 mode = "auto"
9241 mode = _execute.make_str(mode, "mode")
9242 if snapshot_name is None:
9243 snapshot_name = ""
9244 snapshot_name = _execute.make_str(snapshot_name, "snapshot_name")
9245 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9246 "SnapshotDataset", input_dataset=input_dataset, path=path,
9247 output_types=output_types,
9248 output_shapes=output_shapes,
9249 compression=compression,
9250 reader_path_prefix=reader_path_prefix,
9251 writer_path_prefix=writer_path_prefix,
9252 shard_size_bytes=shard_size_bytes,
9253 pending_snapshot_expiry_seconds=pending_snapshot_expiry_seconds,
9254 num_reader_threads=num_reader_threads,
9255 reader_buffer_size=reader_buffer_size,
9256 num_writer_threads=num_writer_threads,
9257 writer_buffer_size=writer_buffer_size,
9258 shuffle_on_read=shuffle_on_read, seed=seed,
9259 seed2=seed2, mode=mode,
9260 snapshot_name=snapshot_name, name=name)
9261 _result = _outputs[:]
9262 if _execute.must_record_gradient():
9263 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9264 _op.get_attr("output_shapes"), "compression",
9265 _op.get_attr("compression"), "reader_path_prefix",
9266 _op.get_attr("reader_path_prefix"), "writer_path_prefix",
9267 _op.get_attr("writer_path_prefix"), "shard_size_bytes",
9268 _op._get_attr_int("shard_size_bytes"),
9269 "pending_snapshot_expiry_seconds",
9270 _op._get_attr_int("pending_snapshot_expiry_seconds"),
9271 "num_reader_threads", _op._get_attr_int("num_reader_threads"),
9272 "reader_buffer_size", _op._get_attr_int("reader_buffer_size"),
9273 "num_writer_threads", _op._get_attr_int("num_writer_threads"),
9274 "writer_buffer_size", _op._get_attr_int("writer_buffer_size"),
9275 "shuffle_on_read", _op._get_attr_bool("shuffle_on_read"),
9276 "seed", _op._get_attr_int("seed"), "seed2",
9277 _op._get_attr_int("seed2"), "mode", _op.get_attr("mode"),
9278 "snapshot_name", _op.get_attr("snapshot_name"))
9279 _inputs_flat = _op.inputs
9280 _execute.record_gradient(
9281 "SnapshotDataset", _inputs_flat, _attrs, _result)
9282 _result, = _result
9283 return _result
9285SnapshotDataset = tf_export("raw_ops.SnapshotDataset")(_ops.to_raw_op(snapshot_dataset))
9288def snapshot_dataset_eager_fallback(input_dataset, path, output_types, output_shapes, compression, reader_path_prefix, writer_path_prefix, shard_size_bytes, pending_snapshot_expiry_seconds, num_reader_threads, reader_buffer_size, num_writer_threads, writer_buffer_size, shuffle_on_read, seed, seed2, mode, snapshot_name, name, ctx):
9289 if not isinstance(output_types, (list, tuple)):
9290 raise TypeError(
9291 "Expected list for 'output_types' argument to "
9292 "'snapshot_dataset' Op, not %r." % output_types)
9293 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9294 if not isinstance(output_shapes, (list, tuple)):
9295 raise TypeError(
9296 "Expected list for 'output_shapes' argument to "
9297 "'snapshot_dataset' Op, not %r." % output_shapes)
9298 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9299 if compression is None:
9300 compression = ""
9301 compression = _execute.make_str(compression, "compression")
9302 if reader_path_prefix is None:
9303 reader_path_prefix = ""
9304 reader_path_prefix = _execute.make_str(reader_path_prefix, "reader_path_prefix")
9305 if writer_path_prefix is None:
9306 writer_path_prefix = ""
9307 writer_path_prefix = _execute.make_str(writer_path_prefix, "writer_path_prefix")
9308 if shard_size_bytes is None:
9309 shard_size_bytes = 10737418240
9310 shard_size_bytes = _execute.make_int(shard_size_bytes, "shard_size_bytes")
9311 if pending_snapshot_expiry_seconds is None:
9312 pending_snapshot_expiry_seconds = 86400
9313 pending_snapshot_expiry_seconds = _execute.make_int(pending_snapshot_expiry_seconds, "pending_snapshot_expiry_seconds")
9314 if num_reader_threads is None:
9315 num_reader_threads = 1
9316 num_reader_threads = _execute.make_int(num_reader_threads, "num_reader_threads")
9317 if reader_buffer_size is None:
9318 reader_buffer_size = 1
9319 reader_buffer_size = _execute.make_int(reader_buffer_size, "reader_buffer_size")
9320 if num_writer_threads is None:
9321 num_writer_threads = 1
9322 num_writer_threads = _execute.make_int(num_writer_threads, "num_writer_threads")
9323 if writer_buffer_size is None:
9324 writer_buffer_size = 1
9325 writer_buffer_size = _execute.make_int(writer_buffer_size, "writer_buffer_size")
9326 if shuffle_on_read is None:
9327 shuffle_on_read = False
9328 shuffle_on_read = _execute.make_bool(shuffle_on_read, "shuffle_on_read")
9329 if seed is None:
9330 seed = 0
9331 seed = _execute.make_int(seed, "seed")
9332 if seed2 is None:
9333 seed2 = 0
9334 seed2 = _execute.make_int(seed2, "seed2")
9335 if mode is None:
9336 mode = "auto"
9337 mode = _execute.make_str(mode, "mode")
9338 if snapshot_name is None:
9339 snapshot_name = ""
9340 snapshot_name = _execute.make_str(snapshot_name, "snapshot_name")
9341 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
9342 path = _ops.convert_to_tensor(path, _dtypes.string)
9343 _inputs_flat = [input_dataset, path]
9344 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
9345 "compression", compression, "reader_path_prefix", reader_path_prefix,
9346 "writer_path_prefix", writer_path_prefix, "shard_size_bytes",
9347 shard_size_bytes, "pending_snapshot_expiry_seconds",
9348 pending_snapshot_expiry_seconds, "num_reader_threads", num_reader_threads,
9349 "reader_buffer_size", reader_buffer_size, "num_writer_threads",
9350 num_writer_threads, "writer_buffer_size", writer_buffer_size,
9351 "shuffle_on_read", shuffle_on_read, "seed", seed, "seed2", seed2, "mode",
9352 mode, "snapshot_name", snapshot_name)
9353 _result = _execute.execute(b"SnapshotDataset", 1, inputs=_inputs_flat,
9354 attrs=_attrs, ctx=ctx, name=name)
9355 if _execute.must_record_gradient():
9356 _execute.record_gradient(
9357 "SnapshotDataset", _inputs_flat, _attrs, _result)
9358 _result, = _result
9359 return _result
9362def snapshot_dataset_reader(shard_dir, start_index, output_types, output_shapes, version, compression="", name=None):
9363 r"""TODO: add doc.
9365 Args:
9366 shard_dir: A `Tensor` of type `string`.
9367 start_index: A `Tensor` of type `int64`.
9368 output_types: A list of `tf.DTypes` that has length `>= 1`.
9369 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9370 version: An `int`.
9371 compression: An optional `string`. Defaults to `""`.
9372 name: A name for the operation (optional).
9374 Returns:
9375 A `Tensor` of type `variant`.
9376 """
9377 _ctx = _context._context or _context.context()
9378 tld = _ctx._thread_local_data
9379 if tld.is_eager:
9380 try:
9381 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9382 _ctx, "SnapshotDatasetReader", name, shard_dir, start_index,
9383 "output_types", output_types, "output_shapes", output_shapes,
9384 "compression", compression, "version", version)
9385 return _result
9386 except _core._NotOkStatusException as e:
9387 _ops.raise_from_not_ok_status(e, name)
9388 except _core._FallbackException:
9389 pass
9390 try:
9391 return snapshot_dataset_reader_eager_fallback(
9392 shard_dir, start_index, output_types=output_types,
9393 output_shapes=output_shapes, compression=compression,
9394 version=version, name=name, ctx=_ctx)
9395 except _core._SymbolicException:
9396 pass # Add nodes to the TensorFlow graph.
9397 # Add nodes to the TensorFlow graph.
9398 if not isinstance(output_types, (list, tuple)):
9399 raise TypeError(
9400 "Expected list for 'output_types' argument to "
9401 "'snapshot_dataset_reader' Op, not %r." % output_types)
9402 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9403 if not isinstance(output_shapes, (list, tuple)):
9404 raise TypeError(
9405 "Expected list for 'output_shapes' argument to "
9406 "'snapshot_dataset_reader' Op, not %r." % output_shapes)
9407 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9408 version = _execute.make_int(version, "version")
9409 if compression is None:
9410 compression = ""
9411 compression = _execute.make_str(compression, "compression")
9412 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9413 "SnapshotDatasetReader", shard_dir=shard_dir, start_index=start_index,
9414 output_types=output_types,
9415 output_shapes=output_shapes, version=version,
9416 compression=compression, name=name)
9417 _result = _outputs[:]
9418 if _execute.must_record_gradient():
9419 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9420 _op.get_attr("output_shapes"), "compression",
9421 _op.get_attr("compression"), "version",
9422 _op._get_attr_int("version"))
9423 _inputs_flat = _op.inputs
9424 _execute.record_gradient(
9425 "SnapshotDatasetReader", _inputs_flat, _attrs, _result)
9426 _result, = _result
9427 return _result
9429SnapshotDatasetReader = tf_export("raw_ops.SnapshotDatasetReader")(_ops.to_raw_op(snapshot_dataset_reader))
9432def snapshot_dataset_reader_eager_fallback(shard_dir, start_index, output_types, output_shapes, version, compression, name, ctx):
9433 if not isinstance(output_types, (list, tuple)):
9434 raise TypeError(
9435 "Expected list for 'output_types' argument to "
9436 "'snapshot_dataset_reader' Op, not %r." % output_types)
9437 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9438 if not isinstance(output_shapes, (list, tuple)):
9439 raise TypeError(
9440 "Expected list for 'output_shapes' argument to "
9441 "'snapshot_dataset_reader' Op, not %r." % output_shapes)
9442 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9443 version = _execute.make_int(version, "version")
9444 if compression is None:
9445 compression = ""
9446 compression = _execute.make_str(compression, "compression")
9447 shard_dir = _ops.convert_to_tensor(shard_dir, _dtypes.string)
9448 start_index = _ops.convert_to_tensor(start_index, _dtypes.int64)
9449 _inputs_flat = [shard_dir, start_index]
9450 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
9451 "compression", compression, "version", version)
9452 _result = _execute.execute(b"SnapshotDatasetReader", 1, inputs=_inputs_flat,
9453 attrs=_attrs, ctx=ctx, name=name)
9454 if _execute.must_record_gradient():
9455 _execute.record_gradient(
9456 "SnapshotDatasetReader", _inputs_flat, _attrs, _result)
9457 _result, = _result
9458 return _result
9461def snapshot_dataset_v2(input_dataset, path, reader_func_other_args, shard_func_other_args, output_types, output_shapes, reader_func, shard_func, compression="", reader_prefix="", writer_prefix="", hash_valid=False, hash=0, metadata="", name=None):
9462 r"""Creates a dataset that will write to / read from a snapshot.
9464 This dataset attempts to determine whether a valid snapshot exists at the
9465 `snapshot_path`, and reads from the snapshot in lieu of using `input_dataset`.
9466 If not, it will run the preprocessing pipeline as usual, and write out a
9467 snapshot of the data processed for future use.
9469 Args:
9470 input_dataset: A `Tensor` of type `variant`.
9471 A variant tensor representing the input dataset.
9472 path: A `Tensor` of type `string`.
9473 The path we should write snapshots to / read snapshots from.
9474 reader_func_other_args: A list of `Tensor` objects.
9475 shard_func_other_args: A list of `Tensor` objects.
9476 output_types: A list of `tf.DTypes` that has length `>= 1`.
9477 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9478 reader_func: A function decorated with @Defun.
9479 Optional. A function to control how to read data from snapshot shards.
9480 shard_func: A function decorated with @Defun.
9481 Optional. A function to control how to shard data when writing a snapshot.
9482 compression: An optional `string`. Defaults to `""`.
9483 The type of compression to be applied to the saved snapshot files.
9484 reader_prefix: An optional `string`. Defaults to `""`.
9485 writer_prefix: An optional `string`. Defaults to `""`.
9486 hash_valid: An optional `bool`. Defaults to `False`.
9487 hash: An optional `int`. Defaults to `0`.
9488 metadata: An optional `string`. Defaults to `""`.
9489 name: A name for the operation (optional).
9491 Returns:
9492 A `Tensor` of type `variant`.
9493 """
9494 _ctx = _context._context or _context.context()
9495 tld = _ctx._thread_local_data
9496 if tld.is_eager:
9497 try:
9498 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9499 _ctx, "SnapshotDatasetV2", name, input_dataset, path,
9500 reader_func_other_args, shard_func_other_args, "output_types",
9501 output_types, "output_shapes", output_shapes, "compression",
9502 compression, "reader_prefix", reader_prefix, "writer_prefix",
9503 writer_prefix, "hash_valid", hash_valid, "hash", hash, "reader_func",
9504 reader_func, "shard_func", shard_func, "metadata", metadata)
9505 return _result
9506 except _core._NotOkStatusException as e:
9507 _ops.raise_from_not_ok_status(e, name)
9508 except _core._FallbackException:
9509 pass
9510 try:
9511 return snapshot_dataset_v2_eager_fallback(
9512 input_dataset, path, reader_func_other_args, shard_func_other_args,
9513 output_types=output_types, output_shapes=output_shapes,
9514 compression=compression, reader_prefix=reader_prefix,
9515 writer_prefix=writer_prefix, hash_valid=hash_valid, hash=hash,
9516 reader_func=reader_func, shard_func=shard_func, metadata=metadata,
9517 name=name, ctx=_ctx)
9518 except _core._SymbolicException:
9519 pass # Add nodes to the TensorFlow graph.
9520 # Add nodes to the TensorFlow graph.
9521 if not isinstance(output_types, (list, tuple)):
9522 raise TypeError(
9523 "Expected list for 'output_types' argument to "
9524 "'snapshot_dataset_v2' Op, not %r." % output_types)
9525 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9526 if not isinstance(output_shapes, (list, tuple)):
9527 raise TypeError(
9528 "Expected list for 'output_shapes' argument to "
9529 "'snapshot_dataset_v2' Op, not %r." % output_shapes)
9530 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9531 if compression is None:
9532 compression = ""
9533 compression = _execute.make_str(compression, "compression")
9534 if reader_prefix is None:
9535 reader_prefix = ""
9536 reader_prefix = _execute.make_str(reader_prefix, "reader_prefix")
9537 if writer_prefix is None:
9538 writer_prefix = ""
9539 writer_prefix = _execute.make_str(writer_prefix, "writer_prefix")
9540 if hash_valid is None:
9541 hash_valid = False
9542 hash_valid = _execute.make_bool(hash_valid, "hash_valid")
9543 if hash is None:
9544 hash = 0
9545 hash = _execute.make_int(hash, "hash")
9546 if metadata is None:
9547 metadata = ""
9548 metadata = _execute.make_str(metadata, "metadata")
9549 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9550 "SnapshotDatasetV2", input_dataset=input_dataset, path=path,
9551 reader_func_other_args=reader_func_other_args,
9552 shard_func_other_args=shard_func_other_args,
9553 output_types=output_types,
9554 output_shapes=output_shapes,
9555 reader_func=reader_func, shard_func=shard_func,
9556 compression=compression,
9557 reader_prefix=reader_prefix,
9558 writer_prefix=writer_prefix,
9559 hash_valid=hash_valid, hash=hash,
9560 metadata=metadata, name=name)
9561 _result = _outputs[:]
9562 if _execute.must_record_gradient():
9563 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9564 _op.get_attr("output_shapes"), "compression",
9565 _op.get_attr("compression"), "reader_prefix",
9566 _op.get_attr("reader_prefix"), "writer_prefix",
9567 _op.get_attr("writer_prefix"), "hash_valid",
9568 _op._get_attr_bool("hash_valid"), "hash",
9569 _op._get_attr_int("hash"), "reader_func",
9570 _op.get_attr("reader_func"), "shard_func",
9571 _op.get_attr("shard_func"), "Treader_func_args",
9572 _op.get_attr("Treader_func_args"), "Tshard_func_args",
9573 _op.get_attr("Tshard_func_args"), "metadata",
9574 _op.get_attr("metadata"))
9575 _inputs_flat = _op.inputs
9576 _execute.record_gradient(
9577 "SnapshotDatasetV2", _inputs_flat, _attrs, _result)
9578 _result, = _result
9579 return _result
9581SnapshotDatasetV2 = tf_export("raw_ops.SnapshotDatasetV2")(_ops.to_raw_op(snapshot_dataset_v2))
9584def snapshot_dataset_v2_eager_fallback(input_dataset, path, reader_func_other_args, shard_func_other_args, output_types, output_shapes, reader_func, shard_func, compression, reader_prefix, writer_prefix, hash_valid, hash, metadata, name, ctx):
9585 if not isinstance(output_types, (list, tuple)):
9586 raise TypeError(
9587 "Expected list for 'output_types' argument to "
9588 "'snapshot_dataset_v2' Op, not %r." % output_types)
9589 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9590 if not isinstance(output_shapes, (list, tuple)):
9591 raise TypeError(
9592 "Expected list for 'output_shapes' argument to "
9593 "'snapshot_dataset_v2' Op, not %r." % output_shapes)
9594 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9595 if compression is None:
9596 compression = ""
9597 compression = _execute.make_str(compression, "compression")
9598 if reader_prefix is None:
9599 reader_prefix = ""
9600 reader_prefix = _execute.make_str(reader_prefix, "reader_prefix")
9601 if writer_prefix is None:
9602 writer_prefix = ""
9603 writer_prefix = _execute.make_str(writer_prefix, "writer_prefix")
9604 if hash_valid is None:
9605 hash_valid = False
9606 hash_valid = _execute.make_bool(hash_valid, "hash_valid")
9607 if hash is None:
9608 hash = 0
9609 hash = _execute.make_int(hash, "hash")
9610 if metadata is None:
9611 metadata = ""
9612 metadata = _execute.make_str(metadata, "metadata")
9613 _attr_Treader_func_args, reader_func_other_args = _execute.convert_to_mixed_eager_tensors(reader_func_other_args, ctx)
9614 _attr_Tshard_func_args, shard_func_other_args = _execute.convert_to_mixed_eager_tensors(shard_func_other_args, ctx)
9615 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
9616 path = _ops.convert_to_tensor(path, _dtypes.string)
9617 _inputs_flat = [input_dataset, path] + list(reader_func_other_args) + list(shard_func_other_args)
9618 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
9619 "compression", compression, "reader_prefix", reader_prefix, "writer_prefix",
9620 writer_prefix, "hash_valid", hash_valid, "hash", hash, "reader_func",
9621 reader_func, "shard_func", shard_func, "Treader_func_args",
9622 _attr_Treader_func_args, "Tshard_func_args", _attr_Tshard_func_args,
9623 "metadata", metadata)
9624 _result = _execute.execute(b"SnapshotDatasetV2", 1, inputs=_inputs_flat,
9625 attrs=_attrs, ctx=ctx, name=name)
9626 if _execute.must_record_gradient():
9627 _execute.record_gradient(
9628 "SnapshotDatasetV2", _inputs_flat, _attrs, _result)
9629 _result, = _result
9630 return _result
9633def snapshot_nested_dataset_reader(inputs, output_types, output_shapes, name=None):
9634 r"""TODO: add doc.
9636 Args:
9637 inputs: A list of at least 1 `Tensor` objects with type `variant`.
9638 output_types: A list of `tf.DTypes` that has length `>= 1`.
9639 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9640 name: A name for the operation (optional).
9642 Returns:
9643 A `Tensor` of type `variant`.
9644 """
9645 _ctx = _context._context or _context.context()
9646 tld = _ctx._thread_local_data
9647 if tld.is_eager:
9648 try:
9649 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9650 _ctx, "SnapshotNestedDatasetReader", name, inputs, "output_types",
9651 output_types, "output_shapes", output_shapes)
9652 return _result
9653 except _core._NotOkStatusException as e:
9654 _ops.raise_from_not_ok_status(e, name)
9655 except _core._FallbackException:
9656 pass
9657 try:
9658 return snapshot_nested_dataset_reader_eager_fallback(
9659 inputs, output_types=output_types, output_shapes=output_shapes,
9660 name=name, ctx=_ctx)
9661 except _core._SymbolicException:
9662 pass # Add nodes to the TensorFlow graph.
9663 # Add nodes to the TensorFlow graph.
9664 if not isinstance(inputs, (list, tuple)):
9665 raise TypeError(
9666 "Expected list for 'inputs' argument to "
9667 "'snapshot_nested_dataset_reader' Op, not %r." % inputs)
9668 _attr_N = len(inputs)
9669 if not isinstance(output_types, (list, tuple)):
9670 raise TypeError(
9671 "Expected list for 'output_types' argument to "
9672 "'snapshot_nested_dataset_reader' Op, not %r." % output_types)
9673 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9674 if not isinstance(output_shapes, (list, tuple)):
9675 raise TypeError(
9676 "Expected list for 'output_shapes' argument to "
9677 "'snapshot_nested_dataset_reader' Op, not %r." % output_shapes)
9678 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9679 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9680 "SnapshotNestedDatasetReader", inputs=inputs,
9681 output_types=output_types,
9682 output_shapes=output_shapes, name=name)
9683 _result = _outputs[:]
9684 if _execute.must_record_gradient():
9685 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9686 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"))
9687 _inputs_flat = _op.inputs
9688 _execute.record_gradient(
9689 "SnapshotNestedDatasetReader", _inputs_flat, _attrs, _result)
9690 _result, = _result
9691 return _result
9693SnapshotNestedDatasetReader = tf_export("raw_ops.SnapshotNestedDatasetReader")(_ops.to_raw_op(snapshot_nested_dataset_reader))
9696def snapshot_nested_dataset_reader_eager_fallback(inputs, output_types, output_shapes, name, ctx):
9697 if not isinstance(inputs, (list, tuple)):
9698 raise TypeError(
9699 "Expected list for 'inputs' argument to "
9700 "'snapshot_nested_dataset_reader' Op, not %r." % inputs)
9701 _attr_N = len(inputs)
9702 if not isinstance(output_types, (list, tuple)):
9703 raise TypeError(
9704 "Expected list for 'output_types' argument to "
9705 "'snapshot_nested_dataset_reader' Op, not %r." % output_types)
9706 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9707 if not isinstance(output_shapes, (list, tuple)):
9708 raise TypeError(
9709 "Expected list for 'output_shapes' argument to "
9710 "'snapshot_nested_dataset_reader' Op, not %r." % output_shapes)
9711 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9712 inputs = _ops.convert_n_to_tensor(inputs, _dtypes.variant)
9713 _inputs_flat = list(inputs)
9714 _attrs = ("output_types", output_types, "output_shapes", output_shapes, "N",
9715 _attr_N)
9716 _result = _execute.execute(b"SnapshotNestedDatasetReader", 1,
9717 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
9718 name=name)
9719 if _execute.must_record_gradient():
9720 _execute.record_gradient(
9721 "SnapshotNestedDatasetReader", _inputs_flat, _attrs, _result)
9722 _result, = _result
9723 return _result
9726def sql_dataset(driver_name, data_source_name, query, output_types, output_shapes, name=None):
9727 r"""Creates a dataset that executes a SQL query and emits rows of the result set.
9729 Args:
9730 driver_name: A `Tensor` of type `string`.
9731 The database type. Currently, the only supported type is 'sqlite'.
9732 data_source_name: A `Tensor` of type `string`.
9733 A connection string to connect to the database.
9734 query: A `Tensor` of type `string`. A SQL query to execute.
9735 output_types: A list of `tf.DTypes` that has length `>= 1`.
9736 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
9737 name: A name for the operation (optional).
9739 Returns:
9740 A `Tensor` of type `variant`.
9741 """
9742 _ctx = _context._context or _context.context()
9743 tld = _ctx._thread_local_data
9744 if tld.is_eager:
9745 try:
9746 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9747 _ctx, "SqlDataset", name, driver_name, data_source_name, query,
9748 "output_types", output_types, "output_shapes", output_shapes)
9749 return _result
9750 except _core._NotOkStatusException as e:
9751 _ops.raise_from_not_ok_status(e, name)
9752 except _core._FallbackException:
9753 pass
9754 try:
9755 return sql_dataset_eager_fallback(
9756 driver_name, data_source_name, query, output_types=output_types,
9757 output_shapes=output_shapes, name=name, ctx=_ctx)
9758 except _core._SymbolicException:
9759 pass # Add nodes to the TensorFlow graph.
9760 # Add nodes to the TensorFlow graph.
9761 if not isinstance(output_types, (list, tuple)):
9762 raise TypeError(
9763 "Expected list for 'output_types' argument to "
9764 "'sql_dataset' Op, not %r." % output_types)
9765 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9766 if not isinstance(output_shapes, (list, tuple)):
9767 raise TypeError(
9768 "Expected list for 'output_shapes' argument to "
9769 "'sql_dataset' Op, not %r." % output_shapes)
9770 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9771 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9772 "SqlDataset", driver_name=driver_name,
9773 data_source_name=data_source_name, query=query,
9774 output_types=output_types, output_shapes=output_shapes,
9775 name=name)
9776 _result = _outputs[:]
9777 if _execute.must_record_gradient():
9778 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
9779 _op.get_attr("output_shapes"))
9780 _inputs_flat = _op.inputs
9781 _execute.record_gradient(
9782 "SqlDataset", _inputs_flat, _attrs, _result)
9783 _result, = _result
9784 return _result
9786SqlDataset = tf_export("raw_ops.SqlDataset")(_ops.to_raw_op(sql_dataset))
9789def sql_dataset_eager_fallback(driver_name, data_source_name, query, output_types, output_shapes, name, ctx):
9790 if not isinstance(output_types, (list, tuple)):
9791 raise TypeError(
9792 "Expected list for 'output_types' argument to "
9793 "'sql_dataset' Op, not %r." % output_types)
9794 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
9795 if not isinstance(output_shapes, (list, tuple)):
9796 raise TypeError(
9797 "Expected list for 'output_shapes' argument to "
9798 "'sql_dataset' Op, not %r." % output_shapes)
9799 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
9800 driver_name = _ops.convert_to_tensor(driver_name, _dtypes.string)
9801 data_source_name = _ops.convert_to_tensor(data_source_name, _dtypes.string)
9802 query = _ops.convert_to_tensor(query, _dtypes.string)
9803 _inputs_flat = [driver_name, data_source_name, query]
9804 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
9805 _result = _execute.execute(b"SqlDataset", 1, inputs=_inputs_flat,
9806 attrs=_attrs, ctx=ctx, name=name)
9807 if _execute.must_record_gradient():
9808 _execute.record_gradient(
9809 "SqlDataset", _inputs_flat, _attrs, _result)
9810 _result, = _result
9811 return _result
9814def stats_aggregator_handle(container="", shared_name="", name=None):
9815 r"""Creates a statistics manager resource.
9817 Args:
9818 container: An optional `string`. Defaults to `""`.
9819 shared_name: An optional `string`. Defaults to `""`.
9820 name: A name for the operation (optional).
9822 Returns:
9823 A `Tensor` of type `resource`.
9824 """
9825 _ctx = _context._context or _context.context()
9826 tld = _ctx._thread_local_data
9827 if tld.is_eager:
9828 try:
9829 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9830 _ctx, "StatsAggregatorHandle", name, "container", container,
9831 "shared_name", shared_name)
9832 return _result
9833 except _core._NotOkStatusException as e:
9834 _ops.raise_from_not_ok_status(e, name)
9835 except _core._FallbackException:
9836 pass
9837 try:
9838 return stats_aggregator_handle_eager_fallback(
9839 container=container, shared_name=shared_name, name=name, ctx=_ctx)
9840 except _core._SymbolicException:
9841 pass # Add nodes to the TensorFlow graph.
9842 # Add nodes to the TensorFlow graph.
9843 if container is None:
9844 container = ""
9845 container = _execute.make_str(container, "container")
9846 if shared_name is None:
9847 shared_name = ""
9848 shared_name = _execute.make_str(shared_name, "shared_name")
9849 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9850 "StatsAggregatorHandle", container=container, shared_name=shared_name,
9851 name=name)
9852 _result = _outputs[:]
9853 if _execute.must_record_gradient():
9854 _attrs = ("container", _op.get_attr("container"), "shared_name",
9855 _op.get_attr("shared_name"))
9856 _inputs_flat = _op.inputs
9857 _execute.record_gradient(
9858 "StatsAggregatorHandle", _inputs_flat, _attrs, _result)
9859 _result, = _result
9860 return _result
9862StatsAggregatorHandle = tf_export("raw_ops.StatsAggregatorHandle")(_ops.to_raw_op(stats_aggregator_handle))
9865def stats_aggregator_handle_eager_fallback(container, shared_name, name, ctx):
9866 if container is None:
9867 container = ""
9868 container = _execute.make_str(container, "container")
9869 if shared_name is None:
9870 shared_name = ""
9871 shared_name = _execute.make_str(shared_name, "shared_name")
9872 _inputs_flat = []
9873 _attrs = ("container", container, "shared_name", shared_name)
9874 _result = _execute.execute(b"StatsAggregatorHandle", 1, inputs=_inputs_flat,
9875 attrs=_attrs, ctx=ctx, name=name)
9876 if _execute.must_record_gradient():
9877 _execute.record_gradient(
9878 "StatsAggregatorHandle", _inputs_flat, _attrs, _result)
9879 _result, = _result
9880 return _result
9883def stats_aggregator_handle_v2(container="", shared_name="", name=None):
9884 r"""TODO: add doc.
9886 Args:
9887 container: An optional `string`. Defaults to `""`.
9888 shared_name: An optional `string`. Defaults to `""`.
9889 name: A name for the operation (optional).
9891 Returns:
9892 A `Tensor` of type `resource`.
9893 """
9894 _ctx = _context._context or _context.context()
9895 tld = _ctx._thread_local_data
9896 if tld.is_eager:
9897 try:
9898 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9899 _ctx, "StatsAggregatorHandleV2", name, "container", container,
9900 "shared_name", shared_name)
9901 return _result
9902 except _core._NotOkStatusException as e:
9903 _ops.raise_from_not_ok_status(e, name)
9904 except _core._FallbackException:
9905 pass
9906 try:
9907 return stats_aggregator_handle_v2_eager_fallback(
9908 container=container, shared_name=shared_name, name=name, ctx=_ctx)
9909 except _core._SymbolicException:
9910 pass # Add nodes to the TensorFlow graph.
9911 # Add nodes to the TensorFlow graph.
9912 if container is None:
9913 container = ""
9914 container = _execute.make_str(container, "container")
9915 if shared_name is None:
9916 shared_name = ""
9917 shared_name = _execute.make_str(shared_name, "shared_name")
9918 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9919 "StatsAggregatorHandleV2", container=container,
9920 shared_name=shared_name, name=name)
9921 _result = _outputs[:]
9922 if _execute.must_record_gradient():
9923 _attrs = ("container", _op.get_attr("container"), "shared_name",
9924 _op.get_attr("shared_name"))
9925 _inputs_flat = _op.inputs
9926 _execute.record_gradient(
9927 "StatsAggregatorHandleV2", _inputs_flat, _attrs, _result)
9928 _result, = _result
9929 return _result
9931StatsAggregatorHandleV2 = tf_export("raw_ops.StatsAggregatorHandleV2")(_ops.to_raw_op(stats_aggregator_handle_v2))
9934def stats_aggregator_handle_v2_eager_fallback(container, shared_name, name, ctx):
9935 if container is None:
9936 container = ""
9937 container = _execute.make_str(container, "container")
9938 if shared_name is None:
9939 shared_name = ""
9940 shared_name = _execute.make_str(shared_name, "shared_name")
9941 _inputs_flat = []
9942 _attrs = ("container", container, "shared_name", shared_name)
9943 _result = _execute.execute(b"StatsAggregatorHandleV2", 1,
9944 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
9945 name=name)
9946 if _execute.must_record_gradient():
9947 _execute.record_gradient(
9948 "StatsAggregatorHandleV2", _inputs_flat, _attrs, _result)
9949 _result, = _result
9950 return _result
9953def stats_aggregator_set_summary_writer(stats_aggregator, summary, name=None):
9954 r"""Set a summary_writer_interface to record statistics using given stats_aggregator.
9956 Args:
9957 stats_aggregator: A `Tensor` of type `resource`.
9958 summary: A `Tensor` of type `resource`.
9959 name: A name for the operation (optional).
9961 Returns:
9962 The created Operation.
9963 """
9964 _ctx = _context._context or _context.context()
9965 tld = _ctx._thread_local_data
9966 if tld.is_eager:
9967 try:
9968 _result = pywrap_tfe.TFE_Py_FastPathExecute(
9969 _ctx, "StatsAggregatorSetSummaryWriter", name, stats_aggregator,
9970 summary)
9971 return _result
9972 except _core._NotOkStatusException as e:
9973 _ops.raise_from_not_ok_status(e, name)
9974 except _core._FallbackException:
9975 pass
9976 try:
9977 return stats_aggregator_set_summary_writer_eager_fallback(
9978 stats_aggregator, summary, name=name, ctx=_ctx)
9979 except _core._SymbolicException:
9980 pass # Add nodes to the TensorFlow graph.
9981 # Add nodes to the TensorFlow graph.
9982 _, _, _op, _outputs = _op_def_library._apply_op_helper(
9983 "StatsAggregatorSetSummaryWriter", stats_aggregator=stats_aggregator,
9984 summary=summary, name=name)
9985 return _op
9986StatsAggregatorSetSummaryWriter = tf_export("raw_ops.StatsAggregatorSetSummaryWriter")(_ops.to_raw_op(stats_aggregator_set_summary_writer))
9989def stats_aggregator_set_summary_writer_eager_fallback(stats_aggregator, summary, name, ctx):
9990 stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
9991 summary = _ops.convert_to_tensor(summary, _dtypes.resource)
9992 _inputs_flat = [stats_aggregator, summary]
9993 _attrs = None
9994 _result = _execute.execute(b"StatsAggregatorSetSummaryWriter", 0,
9995 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
9996 name=name)
9997 _result = None
9998 return _result
10001def stats_aggregator_summary(iterator, name=None):
10002 r"""Produces a summary of any statistics recorded by the given statistics manager.
10004 Args:
10005 iterator: A `Tensor` of type `resource`.
10006 name: A name for the operation (optional).
10008 Returns:
10009 A `Tensor` of type `string`.
10010 """
10011 _ctx = _context._context or _context.context()
10012 tld = _ctx._thread_local_data
10013 if tld.is_eager:
10014 try:
10015 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10016 _ctx, "StatsAggregatorSummary", name, iterator)
10017 return _result
10018 except _core._NotOkStatusException as e:
10019 _ops.raise_from_not_ok_status(e, name)
10020 except _core._FallbackException:
10021 pass
10022 try:
10023 return stats_aggregator_summary_eager_fallback(
10024 iterator, name=name, ctx=_ctx)
10025 except _core._SymbolicException:
10026 pass # Add nodes to the TensorFlow graph.
10027 # Add nodes to the TensorFlow graph.
10028 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10029 "StatsAggregatorSummary", iterator=iterator, name=name)
10030 _result = _outputs[:]
10031 if _execute.must_record_gradient():
10032 _attrs = ()
10033 _inputs_flat = _op.inputs
10034 _execute.record_gradient(
10035 "StatsAggregatorSummary", _inputs_flat, _attrs, _result)
10036 _result, = _result
10037 return _result
10039StatsAggregatorSummary = tf_export("raw_ops.StatsAggregatorSummary")(_ops.to_raw_op(stats_aggregator_summary))
10042def stats_aggregator_summary_eager_fallback(iterator, name, ctx):
10043 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
10044 _inputs_flat = [iterator]
10045 _attrs = None
10046 _result = _execute.execute(b"StatsAggregatorSummary", 1,
10047 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
10048 name=name)
10049 if _execute.must_record_gradient():
10050 _execute.record_gradient(
10051 "StatsAggregatorSummary", _inputs_flat, _attrs, _result)
10052 _result, = _result
10053 return _result
10056def take_while_dataset(input_dataset, other_arguments, predicate, output_types, output_shapes, metadata="", name=None):
10057 r"""Creates a dataset that stops iteration when predicate` is false.
10059 The `predicate` function must return a scalar boolean and accept the
10060 following arguments:
10062 * One tensor for each component of an element of `input_dataset`.
10063 * One tensor for each value in `other_arguments`.
10065 Args:
10066 input_dataset: A `Tensor` of type `variant`.
10067 other_arguments: A list of `Tensor` objects.
10068 A list of tensors, typically values that were captured when
10069 building a closure for `predicate`.
10070 predicate: A function decorated with @Defun.
10071 A function returning a scalar boolean.
10072 output_types: A list of `tf.DTypes` that has length `>= 1`.
10073 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
10074 metadata: An optional `string`. Defaults to `""`.
10075 name: A name for the operation (optional).
10077 Returns:
10078 A `Tensor` of type `variant`.
10079 """
10080 _ctx = _context._context or _context.context()
10081 tld = _ctx._thread_local_data
10082 if tld.is_eager:
10083 try:
10084 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10085 _ctx, "TakeWhileDataset", name, input_dataset, other_arguments,
10086 "predicate", predicate, "output_types", output_types, "output_shapes",
10087 output_shapes, "metadata", metadata)
10088 return _result
10089 except _core._NotOkStatusException as e:
10090 _ops.raise_from_not_ok_status(e, name)
10091 except _core._FallbackException:
10092 pass
10093 try:
10094 return take_while_dataset_eager_fallback(
10095 input_dataset, other_arguments, predicate=predicate,
10096 output_types=output_types, output_shapes=output_shapes,
10097 metadata=metadata, name=name, ctx=_ctx)
10098 except _core._SymbolicException:
10099 pass # Add nodes to the TensorFlow graph.
10100 # Add nodes to the TensorFlow graph.
10101 if not isinstance(output_types, (list, tuple)):
10102 raise TypeError(
10103 "Expected list for 'output_types' argument to "
10104 "'take_while_dataset' Op, not %r." % output_types)
10105 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10106 if not isinstance(output_shapes, (list, tuple)):
10107 raise TypeError(
10108 "Expected list for 'output_shapes' argument to "
10109 "'take_while_dataset' Op, not %r." % output_shapes)
10110 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10111 if metadata is None:
10112 metadata = ""
10113 metadata = _execute.make_str(metadata, "metadata")
10114 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10115 "TakeWhileDataset", input_dataset=input_dataset,
10116 other_arguments=other_arguments,
10117 predicate=predicate, output_types=output_types,
10118 output_shapes=output_shapes, metadata=metadata,
10119 name=name)
10120 _result = _outputs[:]
10121 if _execute.must_record_gradient():
10122 _attrs = ("predicate", _op.get_attr("predicate"), "Targuments",
10123 _op.get_attr("Targuments"), "output_types",
10124 _op.get_attr("output_types"), "output_shapes",
10125 _op.get_attr("output_shapes"), "metadata",
10126 _op.get_attr("metadata"))
10127 _inputs_flat = _op.inputs
10128 _execute.record_gradient(
10129 "TakeWhileDataset", _inputs_flat, _attrs, _result)
10130 _result, = _result
10131 return _result
10133TakeWhileDataset = tf_export("raw_ops.TakeWhileDataset")(_ops.to_raw_op(take_while_dataset))
10136def take_while_dataset_eager_fallback(input_dataset, other_arguments, predicate, output_types, output_shapes, metadata, name, ctx):
10137 if not isinstance(output_types, (list, tuple)):
10138 raise TypeError(
10139 "Expected list for 'output_types' argument to "
10140 "'take_while_dataset' Op, not %r." % output_types)
10141 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10142 if not isinstance(output_shapes, (list, tuple)):
10143 raise TypeError(
10144 "Expected list for 'output_shapes' argument to "
10145 "'take_while_dataset' Op, not %r." % output_shapes)
10146 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10147 if metadata is None:
10148 metadata = ""
10149 metadata = _execute.make_str(metadata, "metadata")
10150 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx)
10151 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
10152 _inputs_flat = [input_dataset] + list(other_arguments)
10153 _attrs = ("predicate", predicate, "Targuments", _attr_Targuments,
10154 "output_types", output_types, "output_shapes", output_shapes, "metadata",
10155 metadata)
10156 _result = _execute.execute(b"TakeWhileDataset", 1, inputs=_inputs_flat,
10157 attrs=_attrs, ctx=ctx, name=name)
10158 if _execute.must_record_gradient():
10159 _execute.record_gradient(
10160 "TakeWhileDataset", _inputs_flat, _attrs, _result)
10161 _result, = _result
10162 return _result
10165def thread_pool_dataset(input_dataset, thread_pool, output_types, output_shapes, name=None):
10166 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
10168 Args:
10169 input_dataset: A `Tensor` of type `variant`.
10170 thread_pool: A `Tensor` of type `resource`.
10171 A resource produced by the ThreadPoolHandle op.
10172 output_types: A list of `tf.DTypes` that has length `>= 1`.
10173 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
10174 name: A name for the operation (optional).
10176 Returns:
10177 A `Tensor` of type `variant`.
10178 """
10179 _ctx = _context._context or _context.context()
10180 tld = _ctx._thread_local_data
10181 if tld.is_eager:
10182 try:
10183 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10184 _ctx, "ThreadPoolDataset", name, input_dataset, thread_pool,
10185 "output_types", output_types, "output_shapes", output_shapes)
10186 return _result
10187 except _core._NotOkStatusException as e:
10188 _ops.raise_from_not_ok_status(e, name)
10189 except _core._FallbackException:
10190 pass
10191 try:
10192 return thread_pool_dataset_eager_fallback(
10193 input_dataset, thread_pool, output_types=output_types,
10194 output_shapes=output_shapes, name=name, ctx=_ctx)
10195 except _core._SymbolicException:
10196 pass # Add nodes to the TensorFlow graph.
10197 # Add nodes to the TensorFlow graph.
10198 if not isinstance(output_types, (list, tuple)):
10199 raise TypeError(
10200 "Expected list for 'output_types' argument to "
10201 "'thread_pool_dataset' Op, not %r." % output_types)
10202 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10203 if not isinstance(output_shapes, (list, tuple)):
10204 raise TypeError(
10205 "Expected list for 'output_shapes' argument to "
10206 "'thread_pool_dataset' Op, not %r." % output_shapes)
10207 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10208 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10209 "ThreadPoolDataset", input_dataset=input_dataset,
10210 thread_pool=thread_pool,
10211 output_types=output_types,
10212 output_shapes=output_shapes, name=name)
10213 _result = _outputs[:]
10214 if _execute.must_record_gradient():
10215 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
10216 _op.get_attr("output_shapes"))
10217 _inputs_flat = _op.inputs
10218 _execute.record_gradient(
10219 "ThreadPoolDataset", _inputs_flat, _attrs, _result)
10220 _result, = _result
10221 return _result
10223ThreadPoolDataset = tf_export("raw_ops.ThreadPoolDataset")(_ops.to_raw_op(thread_pool_dataset))
10226def thread_pool_dataset_eager_fallback(input_dataset, thread_pool, output_types, output_shapes, name, ctx):
10227 if not isinstance(output_types, (list, tuple)):
10228 raise TypeError(
10229 "Expected list for 'output_types' argument to "
10230 "'thread_pool_dataset' Op, not %r." % output_types)
10231 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10232 if not isinstance(output_shapes, (list, tuple)):
10233 raise TypeError(
10234 "Expected list for 'output_shapes' argument to "
10235 "'thread_pool_dataset' Op, not %r." % output_shapes)
10236 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10237 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
10238 thread_pool = _ops.convert_to_tensor(thread_pool, _dtypes.resource)
10239 _inputs_flat = [input_dataset, thread_pool]
10240 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
10241 _result = _execute.execute(b"ThreadPoolDataset", 1, inputs=_inputs_flat,
10242 attrs=_attrs, ctx=ctx, name=name)
10243 if _execute.must_record_gradient():
10244 _execute.record_gradient(
10245 "ThreadPoolDataset", _inputs_flat, _attrs, _result)
10246 _result, = _result
10247 return _result
10250def thread_pool_handle(num_threads, display_name, max_intra_op_parallelism=1, container="", shared_name="", name=None):
10251 r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
10253 Args:
10254 num_threads: An `int`. The number of threads in the thread pool.
10255 display_name: A `string`.
10256 A human-readable name for the threads that may be visible in some
10257 visualizations.
10258 threadpool.
10259 max_intra_op_parallelism: An optional `int`. Defaults to `1`.
10260 The maximum degree of parallelism to use within operations that execute on this
10261 threadpool.
10262 container: An optional `string`. Defaults to `""`.
10263 shared_name: An optional `string`. Defaults to `""`.
10264 name: A name for the operation (optional).
10266 Returns:
10267 A `Tensor` of type `resource`.
10268 """
10269 _ctx = _context._context or _context.context()
10270 tld = _ctx._thread_local_data
10271 if tld.is_eager:
10272 try:
10273 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10274 _ctx, "ThreadPoolHandle", name, "num_threads", num_threads,
10275 "max_intra_op_parallelism", max_intra_op_parallelism, "display_name",
10276 display_name, "container", container, "shared_name", shared_name)
10277 return _result
10278 except _core._NotOkStatusException as e:
10279 _ops.raise_from_not_ok_status(e, name)
10280 except _core._FallbackException:
10281 pass
10282 try:
10283 return thread_pool_handle_eager_fallback(
10284 num_threads=num_threads,
10285 max_intra_op_parallelism=max_intra_op_parallelism,
10286 display_name=display_name, container=container,
10287 shared_name=shared_name, name=name, ctx=_ctx)
10288 except _core._SymbolicException:
10289 pass # Add nodes to the TensorFlow graph.
10290 # Add nodes to the TensorFlow graph.
10291 num_threads = _execute.make_int(num_threads, "num_threads")
10292 display_name = _execute.make_str(display_name, "display_name")
10293 if max_intra_op_parallelism is None:
10294 max_intra_op_parallelism = 1
10295 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
10296 if container is None:
10297 container = ""
10298 container = _execute.make_str(container, "container")
10299 if shared_name is None:
10300 shared_name = ""
10301 shared_name = _execute.make_str(shared_name, "shared_name")
10302 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10303 "ThreadPoolHandle", num_threads=num_threads,
10304 display_name=display_name,
10305 max_intra_op_parallelism=max_intra_op_parallelism,
10306 container=container, shared_name=shared_name,
10307 name=name)
10308 _result = _outputs[:]
10309 if _execute.must_record_gradient():
10310 _attrs = ("num_threads", _op._get_attr_int("num_threads"),
10311 "max_intra_op_parallelism",
10312 _op._get_attr_int("max_intra_op_parallelism"), "display_name",
10313 _op.get_attr("display_name"), "container",
10314 _op.get_attr("container"), "shared_name",
10315 _op.get_attr("shared_name"))
10316 _inputs_flat = _op.inputs
10317 _execute.record_gradient(
10318 "ThreadPoolHandle", _inputs_flat, _attrs, _result)
10319 _result, = _result
10320 return _result
10322ThreadPoolHandle = tf_export("raw_ops.ThreadPoolHandle")(_ops.to_raw_op(thread_pool_handle))
10325def thread_pool_handle_eager_fallback(num_threads, display_name, max_intra_op_parallelism, container, shared_name, name, ctx):
10326 num_threads = _execute.make_int(num_threads, "num_threads")
10327 display_name = _execute.make_str(display_name, "display_name")
10328 if max_intra_op_parallelism is None:
10329 max_intra_op_parallelism = 1
10330 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
10331 if container is None:
10332 container = ""
10333 container = _execute.make_str(container, "container")
10334 if shared_name is None:
10335 shared_name = ""
10336 shared_name = _execute.make_str(shared_name, "shared_name")
10337 _inputs_flat = []
10338 _attrs = ("num_threads", num_threads, "max_intra_op_parallelism",
10339 max_intra_op_parallelism, "display_name", display_name, "container",
10340 container, "shared_name", shared_name)
10341 _result = _execute.execute(b"ThreadPoolHandle", 1, inputs=_inputs_flat,
10342 attrs=_attrs, ctx=ctx, name=name)
10343 if _execute.must_record_gradient():
10344 _execute.record_gradient(
10345 "ThreadPoolHandle", _inputs_flat, _attrs, _result)
10346 _result, = _result
10347 return _result
10350def unbatch_dataset(input_dataset, output_types, output_shapes, metadata="", name=None):
10351 r"""A dataset that splits the elements of its input into multiple elements.
10353 Args:
10354 input_dataset: A `Tensor` of type `variant`.
10355 output_types: A list of `tf.DTypes` that has length `>= 1`.
10356 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
10357 metadata: An optional `string`. Defaults to `""`.
10358 name: A name for the operation (optional).
10360 Returns:
10361 A `Tensor` of type `variant`.
10362 """
10363 _ctx = _context._context or _context.context()
10364 tld = _ctx._thread_local_data
10365 if tld.is_eager:
10366 try:
10367 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10368 _ctx, "UnbatchDataset", name, input_dataset, "output_types",
10369 output_types, "output_shapes", output_shapes, "metadata", metadata)
10370 return _result
10371 except _core._NotOkStatusException as e:
10372 _ops.raise_from_not_ok_status(e, name)
10373 except _core._FallbackException:
10374 pass
10375 try:
10376 return unbatch_dataset_eager_fallback(
10377 input_dataset, output_types=output_types,
10378 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx)
10379 except _core._SymbolicException:
10380 pass # Add nodes to the TensorFlow graph.
10381 # Add nodes to the TensorFlow graph.
10382 if not isinstance(output_types, (list, tuple)):
10383 raise TypeError(
10384 "Expected list for 'output_types' argument to "
10385 "'unbatch_dataset' Op, not %r." % output_types)
10386 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10387 if not isinstance(output_shapes, (list, tuple)):
10388 raise TypeError(
10389 "Expected list for 'output_shapes' argument to "
10390 "'unbatch_dataset' Op, not %r." % output_shapes)
10391 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10392 if metadata is None:
10393 metadata = ""
10394 metadata = _execute.make_str(metadata, "metadata")
10395 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10396 "UnbatchDataset", input_dataset=input_dataset,
10397 output_types=output_types,
10398 output_shapes=output_shapes, metadata=metadata,
10399 name=name)
10400 _result = _outputs[:]
10401 if _execute.must_record_gradient():
10402 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
10403 _op.get_attr("output_shapes"), "metadata",
10404 _op.get_attr("metadata"))
10405 _inputs_flat = _op.inputs
10406 _execute.record_gradient(
10407 "UnbatchDataset", _inputs_flat, _attrs, _result)
10408 _result, = _result
10409 return _result
10411UnbatchDataset = tf_export("raw_ops.UnbatchDataset")(_ops.to_raw_op(unbatch_dataset))
10414def unbatch_dataset_eager_fallback(input_dataset, output_types, output_shapes, metadata, name, ctx):
10415 if not isinstance(output_types, (list, tuple)):
10416 raise TypeError(
10417 "Expected list for 'output_types' argument to "
10418 "'unbatch_dataset' Op, not %r." % output_types)
10419 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10420 if not isinstance(output_shapes, (list, tuple)):
10421 raise TypeError(
10422 "Expected list for 'output_shapes' argument to "
10423 "'unbatch_dataset' Op, not %r." % output_shapes)
10424 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10425 if metadata is None:
10426 metadata = ""
10427 metadata = _execute.make_str(metadata, "metadata")
10428 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
10429 _inputs_flat = [input_dataset]
10430 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
10431 "metadata", metadata)
10432 _result = _execute.execute(b"UnbatchDataset", 1, inputs=_inputs_flat,
10433 attrs=_attrs, ctx=ctx, name=name)
10434 if _execute.must_record_gradient():
10435 _execute.record_gradient(
10436 "UnbatchDataset", _inputs_flat, _attrs, _result)
10437 _result, = _result
10438 return _result
10441def uncompress_element(compressed, output_types, output_shapes, name=None):
10442 r"""Uncompresses a compressed dataset element.
10444 Args:
10445 compressed: A `Tensor` of type `variant`.
10446 output_types: A list of `tf.DTypes` that has length `>= 1`.
10447 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
10448 name: A name for the operation (optional).
10450 Returns:
10451 A list of `Tensor` objects of type `output_types`.
10452 """
10453 _ctx = _context._context or _context.context()
10454 tld = _ctx._thread_local_data
10455 if tld.is_eager:
10456 try:
10457 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10458 _ctx, "UncompressElement", name, compressed, "output_types",
10459 output_types, "output_shapes", output_shapes)
10460 return _result
10461 except _core._NotOkStatusException as e:
10462 _ops.raise_from_not_ok_status(e, name)
10463 except _core._FallbackException:
10464 pass
10465 try:
10466 return uncompress_element_eager_fallback(
10467 compressed, output_types=output_types, output_shapes=output_shapes,
10468 name=name, ctx=_ctx)
10469 except _core._SymbolicException:
10470 pass # Add nodes to the TensorFlow graph.
10471 # Add nodes to the TensorFlow graph.
10472 if not isinstance(output_types, (list, tuple)):
10473 raise TypeError(
10474 "Expected list for 'output_types' argument to "
10475 "'uncompress_element' Op, not %r." % output_types)
10476 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10477 if not isinstance(output_shapes, (list, tuple)):
10478 raise TypeError(
10479 "Expected list for 'output_shapes' argument to "
10480 "'uncompress_element' Op, not %r." % output_shapes)
10481 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10482 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10483 "UncompressElement", compressed=compressed, output_types=output_types,
10484 output_shapes=output_shapes, name=name)
10485 _result = _outputs[:]
10486 if _execute.must_record_gradient():
10487 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
10488 _op.get_attr("output_shapes"))
10489 _inputs_flat = _op.inputs
10490 _execute.record_gradient(
10491 "UncompressElement", _inputs_flat, _attrs, _result)
10492 return _result
10494UncompressElement = tf_export("raw_ops.UncompressElement")(_ops.to_raw_op(uncompress_element))
10497def uncompress_element_eager_fallback(compressed, output_types, output_shapes, name, ctx):
10498 if not isinstance(output_types, (list, tuple)):
10499 raise TypeError(
10500 "Expected list for 'output_types' argument to "
10501 "'uncompress_element' Op, not %r." % output_types)
10502 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10503 if not isinstance(output_shapes, (list, tuple)):
10504 raise TypeError(
10505 "Expected list for 'output_shapes' argument to "
10506 "'uncompress_element' Op, not %r." % output_shapes)
10507 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10508 compressed = _ops.convert_to_tensor(compressed, _dtypes.variant)
10509 _inputs_flat = [compressed]
10510 _attrs = ("output_types", output_types, "output_shapes", output_shapes)
10511 _result = _execute.execute(b"UncompressElement", len(output_types),
10512 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
10513 name=name)
10514 if _execute.must_record_gradient():
10515 _execute.record_gradient(
10516 "UncompressElement", _inputs_flat, _attrs, _result)
10517 return _result
10520def unique_dataset(input_dataset, output_types, output_shapes, metadata="", name=None):
10521 r"""Creates a dataset that contains the unique elements of `input_dataset`.
10523 Args:
10524 input_dataset: A `Tensor` of type `variant`.
10525 output_types: A list of `tf.DTypes` that has length `>= 1`.
10526 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
10527 metadata: An optional `string`. Defaults to `""`.
10528 name: A name for the operation (optional).
10530 Returns:
10531 A `Tensor` of type `variant`.
10532 """
10533 _ctx = _context._context or _context.context()
10534 tld = _ctx._thread_local_data
10535 if tld.is_eager:
10536 try:
10537 _result = pywrap_tfe.TFE_Py_FastPathExecute(
10538 _ctx, "UniqueDataset", name, input_dataset, "output_types",
10539 output_types, "output_shapes", output_shapes, "metadata", metadata)
10540 return _result
10541 except _core._NotOkStatusException as e:
10542 _ops.raise_from_not_ok_status(e, name)
10543 except _core._FallbackException:
10544 pass
10545 try:
10546 return unique_dataset_eager_fallback(
10547 input_dataset, output_types=output_types,
10548 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx)
10549 except _core._SymbolicException:
10550 pass # Add nodes to the TensorFlow graph.
10551 # Add nodes to the TensorFlow graph.
10552 if not isinstance(output_types, (list, tuple)):
10553 raise TypeError(
10554 "Expected list for 'output_types' argument to "
10555 "'unique_dataset' Op, not %r." % output_types)
10556 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10557 if not isinstance(output_shapes, (list, tuple)):
10558 raise TypeError(
10559 "Expected list for 'output_shapes' argument to "
10560 "'unique_dataset' Op, not %r." % output_shapes)
10561 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10562 if metadata is None:
10563 metadata = ""
10564 metadata = _execute.make_str(metadata, "metadata")
10565 _, _, _op, _outputs = _op_def_library._apply_op_helper(
10566 "UniqueDataset", input_dataset=input_dataset,
10567 output_types=output_types,
10568 output_shapes=output_shapes, metadata=metadata,
10569 name=name)
10570 _result = _outputs[:]
10571 if _execute.must_record_gradient():
10572 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
10573 _op.get_attr("output_shapes"), "metadata",
10574 _op.get_attr("metadata"))
10575 _inputs_flat = _op.inputs
10576 _execute.record_gradient(
10577 "UniqueDataset", _inputs_flat, _attrs, _result)
10578 _result, = _result
10579 return _result
10581UniqueDataset = tf_export("raw_ops.UniqueDataset")(_ops.to_raw_op(unique_dataset))
10584def unique_dataset_eager_fallback(input_dataset, output_types, output_shapes, metadata, name, ctx):
10585 if not isinstance(output_types, (list, tuple)):
10586 raise TypeError(
10587 "Expected list for 'output_types' argument to "
10588 "'unique_dataset' Op, not %r." % output_types)
10589 output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
10590 if not isinstance(output_shapes, (list, tuple)):
10591 raise TypeError(
10592 "Expected list for 'output_shapes' argument to "
10593 "'unique_dataset' Op, not %r." % output_shapes)
10594 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
10595 if metadata is None:
10596 metadata = ""
10597 metadata = _execute.make_str(metadata, "metadata")
10598 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
10599 _inputs_flat = [input_dataset]
10600 _attrs = ("output_types", output_types, "output_shapes", output_shapes,
10601 "metadata", metadata)
10602 _result = _execute.execute(b"UniqueDataset", 1, inputs=_inputs_flat,
10603 attrs=_attrs, ctx=ctx, name=name)
10604 if _execute.must_record_gradient():
10605 _execute.record_gradient(
10606 "UniqueDataset", _inputs_flat, _attrs, _result)
10607 _result, = _result
10608 return _result