Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_io_ops.py: 12%
1197 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1"""Python wrappers around TensorFlow ops.
3This file is MACHINE GENERATED! Do not edit.
4"""
6import collections
8from tensorflow.python import pywrap_tfe as pywrap_tfe
9from tensorflow.python.eager import context as _context
10from tensorflow.python.eager import core as _core
11from tensorflow.python.eager import execute as _execute
12from tensorflow.python.framework import dtypes as _dtypes
13from tensorflow.security.fuzzing.py import annotation_types as _atypes
15from tensorflow.python.framework import op_def_registry as _op_def_registry
16from tensorflow.python.framework import ops as _ops
17from tensorflow.python.framework import op_def_library as _op_def_library
18from tensorflow.python.util.deprecation import deprecated_endpoints
19from tensorflow.python.util import dispatch as _dispatch
20from tensorflow.python.util.tf_export import tf_export
22from typing import TypeVar
24def fixed_length_record_reader(record_bytes, header_bytes=0, footer_bytes=0, hop_bytes=0, container="", shared_name="", name=None):
25 r"""A Reader that outputs fixed-length records from a file.
27 Args:
28 record_bytes: An `int`. Number of bytes in the record.
29 header_bytes: An optional `int`. Defaults to `0`.
30 Number of bytes in the header, defaults to 0.
31 footer_bytes: An optional `int`. Defaults to `0`.
32 Number of bytes in the footer, defaults to 0.
33 hop_bytes: An optional `int`. Defaults to `0`.
34 Number of bytes to hop before each read. Default of 0 means using
35 record_bytes.
36 container: An optional `string`. Defaults to `""`.
37 If non-empty, this reader is placed in the given container.
38 Otherwise, a default container is used.
39 shared_name: An optional `string`. Defaults to `""`.
40 If non-empty, this reader is named in the given bucket
41 with this shared_name. Otherwise, the node name is used instead.
42 name: A name for the operation (optional).
44 Returns:
45 A `Tensor` of type mutable `string`.
46 """
47 _ctx = _context._context or _context.context()
48 tld = _ctx._thread_local_data
49 if tld.is_eager:
50 raise RuntimeError("fixed_length_record_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
51 # Add nodes to the TensorFlow graph.
52 record_bytes = _execute.make_int(record_bytes, "record_bytes")
53 if header_bytes is None:
54 header_bytes = 0
55 header_bytes = _execute.make_int(header_bytes, "header_bytes")
56 if footer_bytes is None:
57 footer_bytes = 0
58 footer_bytes = _execute.make_int(footer_bytes, "footer_bytes")
59 if hop_bytes is None:
60 hop_bytes = 0
61 hop_bytes = _execute.make_int(hop_bytes, "hop_bytes")
62 if container is None:
63 container = ""
64 container = _execute.make_str(container, "container")
65 if shared_name is None:
66 shared_name = ""
67 shared_name = _execute.make_str(shared_name, "shared_name")
68 _, _, _op, _outputs = _op_def_library._apply_op_helper(
69 "FixedLengthRecordReader", record_bytes=record_bytes,
70 header_bytes=header_bytes,
71 footer_bytes=footer_bytes,
72 hop_bytes=hop_bytes, container=container,
73 shared_name=shared_name, name=name)
74 _result = _outputs[:]
75 if _execute.must_record_gradient():
76 _attrs = ("header_bytes", _op._get_attr_int("header_bytes"),
77 "record_bytes", _op._get_attr_int("record_bytes"),
78 "footer_bytes", _op._get_attr_int("footer_bytes"), "hop_bytes",
79 _op._get_attr_int("hop_bytes"), "container",
80 _op.get_attr("container"), "shared_name",
81 _op.get_attr("shared_name"))
82 _inputs_flat = _op.inputs
83 _execute.record_gradient(
84 "FixedLengthRecordReader", _inputs_flat, _attrs, _result)
85 _result, = _result
86 return _result
88FixedLengthRecordReader = tf_export("raw_ops.FixedLengthRecordReader")(_ops.to_raw_op(fixed_length_record_reader))
91def fixed_length_record_reader_eager_fallback(record_bytes, header_bytes, footer_bytes, hop_bytes, container, shared_name, name, ctx):
92 raise RuntimeError("fixed_length_record_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
94def fixed_length_record_reader_v2(record_bytes, header_bytes=0, footer_bytes=0, hop_bytes=0, container="", shared_name="", encoding="", name=None):
95 r"""A Reader that outputs fixed-length records from a file.
97 Args:
98 record_bytes: An `int`. Number of bytes in the record.
99 header_bytes: An optional `int`. Defaults to `0`.
100 Number of bytes in the header, defaults to 0.
101 footer_bytes: An optional `int`. Defaults to `0`.
102 Number of bytes in the footer, defaults to 0.
103 hop_bytes: An optional `int`. Defaults to `0`.
104 Number of bytes to hop before each read. Default of 0 means using
105 record_bytes.
106 container: An optional `string`. Defaults to `""`.
107 If non-empty, this reader is placed in the given container.
108 Otherwise, a default container is used.
109 shared_name: An optional `string`. Defaults to `""`.
110 If non-empty, this reader is named in the given bucket
111 with this shared_name. Otherwise, the node name is used instead.
112 encoding: An optional `string`. Defaults to `""`.
113 The type of encoding for the file. Currently ZLIB and GZIP
114 are supported. Defaults to none.
115 name: A name for the operation (optional).
117 Returns:
118 A `Tensor` of type `resource`.
119 """
120 _ctx = _context._context or _context.context()
121 tld = _ctx._thread_local_data
122 if tld.is_eager:
123 try:
124 _result = pywrap_tfe.TFE_Py_FastPathExecute(
125 _ctx, "FixedLengthRecordReaderV2", name, "header_bytes", header_bytes,
126 "record_bytes", record_bytes, "footer_bytes", footer_bytes,
127 "hop_bytes", hop_bytes, "container", container, "shared_name",
128 shared_name, "encoding", encoding)
129 return _result
130 except _core._NotOkStatusException as e:
131 _ops.raise_from_not_ok_status(e, name)
132 except _core._FallbackException:
133 pass
134 try:
135 return fixed_length_record_reader_v2_eager_fallback(
136 header_bytes=header_bytes, record_bytes=record_bytes,
137 footer_bytes=footer_bytes, hop_bytes=hop_bytes, container=container,
138 shared_name=shared_name, encoding=encoding, name=name, ctx=_ctx)
139 except _core._SymbolicException:
140 pass # Add nodes to the TensorFlow graph.
141 # Add nodes to the TensorFlow graph.
142 record_bytes = _execute.make_int(record_bytes, "record_bytes")
143 if header_bytes is None:
144 header_bytes = 0
145 header_bytes = _execute.make_int(header_bytes, "header_bytes")
146 if footer_bytes is None:
147 footer_bytes = 0
148 footer_bytes = _execute.make_int(footer_bytes, "footer_bytes")
149 if hop_bytes is None:
150 hop_bytes = 0
151 hop_bytes = _execute.make_int(hop_bytes, "hop_bytes")
152 if container is None:
153 container = ""
154 container = _execute.make_str(container, "container")
155 if shared_name is None:
156 shared_name = ""
157 shared_name = _execute.make_str(shared_name, "shared_name")
158 if encoding is None:
159 encoding = ""
160 encoding = _execute.make_str(encoding, "encoding")
161 _, _, _op, _outputs = _op_def_library._apply_op_helper(
162 "FixedLengthRecordReaderV2", record_bytes=record_bytes,
163 header_bytes=header_bytes,
164 footer_bytes=footer_bytes,
165 hop_bytes=hop_bytes, container=container,
166 shared_name=shared_name,
167 encoding=encoding, name=name)
168 _result = _outputs[:]
169 if _execute.must_record_gradient():
170 _attrs = ("header_bytes", _op._get_attr_int("header_bytes"),
171 "record_bytes", _op._get_attr_int("record_bytes"),
172 "footer_bytes", _op._get_attr_int("footer_bytes"), "hop_bytes",
173 _op._get_attr_int("hop_bytes"), "container",
174 _op.get_attr("container"), "shared_name",
175 _op.get_attr("shared_name"), "encoding",
176 _op.get_attr("encoding"))
177 _inputs_flat = _op.inputs
178 _execute.record_gradient(
179 "FixedLengthRecordReaderV2", _inputs_flat, _attrs, _result)
180 _result, = _result
181 return _result
183FixedLengthRecordReaderV2 = tf_export("raw_ops.FixedLengthRecordReaderV2")(_ops.to_raw_op(fixed_length_record_reader_v2))
186def fixed_length_record_reader_v2_eager_fallback(record_bytes, header_bytes, footer_bytes, hop_bytes, container, shared_name, encoding, name, ctx):
187 record_bytes = _execute.make_int(record_bytes, "record_bytes")
188 if header_bytes is None:
189 header_bytes = 0
190 header_bytes = _execute.make_int(header_bytes, "header_bytes")
191 if footer_bytes is None:
192 footer_bytes = 0
193 footer_bytes = _execute.make_int(footer_bytes, "footer_bytes")
194 if hop_bytes is None:
195 hop_bytes = 0
196 hop_bytes = _execute.make_int(hop_bytes, "hop_bytes")
197 if container is None:
198 container = ""
199 container = _execute.make_str(container, "container")
200 if shared_name is None:
201 shared_name = ""
202 shared_name = _execute.make_str(shared_name, "shared_name")
203 if encoding is None:
204 encoding = ""
205 encoding = _execute.make_str(encoding, "encoding")
206 _inputs_flat = []
207 _attrs = ("header_bytes", header_bytes, "record_bytes", record_bytes,
208 "footer_bytes", footer_bytes, "hop_bytes", hop_bytes, "container",
209 container, "shared_name", shared_name, "encoding", encoding)
210 _result = _execute.execute(b"FixedLengthRecordReaderV2", 1,
211 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
212 name=name)
213 if _execute.must_record_gradient():
214 _execute.record_gradient(
215 "FixedLengthRecordReaderV2", _inputs_flat, _attrs, _result)
216 _result, = _result
217 return _result
220def identity_reader(container="", shared_name="", name=None):
221 r"""A Reader that outputs the queued work as both the key and value.
223 To use, enqueue strings in a Queue. ReaderRead will take the front
224 work string and output (work, work).
226 Args:
227 container: An optional `string`. Defaults to `""`.
228 If non-empty, this reader is placed in the given container.
229 Otherwise, a default container is used.
230 shared_name: An optional `string`. Defaults to `""`.
231 If non-empty, this reader is named in the given bucket
232 with this shared_name. Otherwise, the node name is used instead.
233 name: A name for the operation (optional).
235 Returns:
236 A `Tensor` of type mutable `string`.
237 """
238 _ctx = _context._context or _context.context()
239 tld = _ctx._thread_local_data
240 if tld.is_eager:
241 raise RuntimeError("identity_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
242 # Add nodes to the TensorFlow graph.
243 if container is None:
244 container = ""
245 container = _execute.make_str(container, "container")
246 if shared_name is None:
247 shared_name = ""
248 shared_name = _execute.make_str(shared_name, "shared_name")
249 _, _, _op, _outputs = _op_def_library._apply_op_helper(
250 "IdentityReader", container=container, shared_name=shared_name,
251 name=name)
252 _result = _outputs[:]
253 if _execute.must_record_gradient():
254 _attrs = ("container", _op.get_attr("container"), "shared_name",
255 _op.get_attr("shared_name"))
256 _inputs_flat = _op.inputs
257 _execute.record_gradient(
258 "IdentityReader", _inputs_flat, _attrs, _result)
259 _result, = _result
260 return _result
262IdentityReader = tf_export("raw_ops.IdentityReader")(_ops.to_raw_op(identity_reader))
265def identity_reader_eager_fallback(container, shared_name, name, ctx):
266 raise RuntimeError("identity_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
268def identity_reader_v2(container="", shared_name="", name=None):
269 r"""A Reader that outputs the queued work as both the key and value.
271 To use, enqueue strings in a Queue. ReaderRead will take the front
272 work string and output (work, work).
274 Args:
275 container: An optional `string`. Defaults to `""`.
276 If non-empty, this reader is placed in the given container.
277 Otherwise, a default container is used.
278 shared_name: An optional `string`. Defaults to `""`.
279 If non-empty, this reader is named in the given bucket
280 with this shared_name. Otherwise, the node name is used instead.
281 name: A name for the operation (optional).
283 Returns:
284 A `Tensor` of type `resource`.
285 """
286 _ctx = _context._context or _context.context()
287 tld = _ctx._thread_local_data
288 if tld.is_eager:
289 try:
290 _result = pywrap_tfe.TFE_Py_FastPathExecute(
291 _ctx, "IdentityReaderV2", name, "container", container, "shared_name",
292 shared_name)
293 return _result
294 except _core._NotOkStatusException as e:
295 _ops.raise_from_not_ok_status(e, name)
296 except _core._FallbackException:
297 pass
298 try:
299 return identity_reader_v2_eager_fallback(
300 container=container, shared_name=shared_name, name=name, ctx=_ctx)
301 except _core._SymbolicException:
302 pass # Add nodes to the TensorFlow graph.
303 # Add nodes to the TensorFlow graph.
304 if container is None:
305 container = ""
306 container = _execute.make_str(container, "container")
307 if shared_name is None:
308 shared_name = ""
309 shared_name = _execute.make_str(shared_name, "shared_name")
310 _, _, _op, _outputs = _op_def_library._apply_op_helper(
311 "IdentityReaderV2", container=container, shared_name=shared_name,
312 name=name)
313 _result = _outputs[:]
314 if _execute.must_record_gradient():
315 _attrs = ("container", _op.get_attr("container"), "shared_name",
316 _op.get_attr("shared_name"))
317 _inputs_flat = _op.inputs
318 _execute.record_gradient(
319 "IdentityReaderV2", _inputs_flat, _attrs, _result)
320 _result, = _result
321 return _result
323IdentityReaderV2 = tf_export("raw_ops.IdentityReaderV2")(_ops.to_raw_op(identity_reader_v2))
326def identity_reader_v2_eager_fallback(container, shared_name, name, ctx):
327 if container is None:
328 container = ""
329 container = _execute.make_str(container, "container")
330 if shared_name is None:
331 shared_name = ""
332 shared_name = _execute.make_str(shared_name, "shared_name")
333 _inputs_flat = []
334 _attrs = ("container", container, "shared_name", shared_name)
335 _result = _execute.execute(b"IdentityReaderV2", 1, inputs=_inputs_flat,
336 attrs=_attrs, ctx=ctx, name=name)
337 if _execute.must_record_gradient():
338 _execute.record_gradient(
339 "IdentityReaderV2", _inputs_flat, _attrs, _result)
340 _result, = _result
341 return _result
344def lmdb_reader(container="", shared_name="", name=None):
345 r"""A Reader that outputs the records from a LMDB file.
347 Args:
348 container: An optional `string`. Defaults to `""`.
349 If non-empty, this reader is placed in the given container.
350 Otherwise, a default container is used.
351 shared_name: An optional `string`. Defaults to `""`.
352 If non-empty, this reader is named in the given bucket
353 with this shared_name. Otherwise, the node name is used instead.
354 name: A name for the operation (optional).
356 Returns:
357 A `Tensor` of type mutable `string`.
358 """
359 _ctx = _context._context or _context.context()
360 tld = _ctx._thread_local_data
361 if tld.is_eager:
362 raise RuntimeError("lmdb_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
363 # Add nodes to the TensorFlow graph.
364 if container is None:
365 container = ""
366 container = _execute.make_str(container, "container")
367 if shared_name is None:
368 shared_name = ""
369 shared_name = _execute.make_str(shared_name, "shared_name")
370 _, _, _op, _outputs = _op_def_library._apply_op_helper(
371 "LMDBReader", container=container, shared_name=shared_name, name=name)
372 _result = _outputs[:]
373 if _execute.must_record_gradient():
374 _attrs = ("container", _op.get_attr("container"), "shared_name",
375 _op.get_attr("shared_name"))
376 _inputs_flat = _op.inputs
377 _execute.record_gradient(
378 "LMDBReader", _inputs_flat, _attrs, _result)
379 _result, = _result
380 return _result
382LMDBReader = tf_export("raw_ops.LMDBReader")(_ops.to_raw_op(lmdb_reader))
385def lmdb_reader_eager_fallback(container, shared_name, name, ctx):
386 raise RuntimeError("lmdb_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
388@_dispatch.add_fallback_dispatch_list
389@_dispatch.add_type_based_api_dispatcher
390@tf_export('io.matching_files', v1=['io.matching_files', 'matching_files'])
391@deprecated_endpoints('matching_files')
392def matching_files(pattern, name=None):
393 r"""Returns the set of files matching one or more glob patterns.
395 Note that this routine only supports wildcard characters in the
396 basename portion of the pattern, not in the directory portion.
397 Note also that the order of filenames returned is deterministic.
399 Args:
400 pattern: A `Tensor` of type `string`.
401 Shell wildcard pattern(s). Scalar or vector of type string.
402 name: A name for the operation (optional).
404 Returns:
405 A `Tensor` of type `string`.
406 """
407 _ctx = _context._context or _context.context()
408 tld = _ctx._thread_local_data
409 if tld.is_eager:
410 try:
411 _result = pywrap_tfe.TFE_Py_FastPathExecute(
412 _ctx, "MatchingFiles", name, pattern)
413 return _result
414 except _core._NotOkStatusException as e:
415 _ops.raise_from_not_ok_status(e, name)
416 except _core._FallbackException:
417 pass
418 try:
419 _result = _dispatcher_for_matching_files(
420 (pattern, name,), None)
421 if _result is not NotImplemented:
422 return _result
423 return matching_files_eager_fallback(
424 pattern, name=name, ctx=_ctx)
425 except _core._SymbolicException:
426 pass # Add nodes to the TensorFlow graph.
427 except (TypeError, ValueError):
428 _result = _dispatch.dispatch(
429 matching_files, (), dict(pattern=pattern, name=name)
430 )
431 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
432 return _result
433 raise
434 else:
435 _result = _dispatcher_for_matching_files(
436 (pattern, name,), None)
437 if _result is not NotImplemented:
438 return _result
439 # Add nodes to the TensorFlow graph.
440 try:
441 _, _, _op, _outputs = _op_def_library._apply_op_helper(
442 "MatchingFiles", pattern=pattern, name=name)
443 except (TypeError, ValueError):
444 _result = _dispatch.dispatch(
445 matching_files, (), dict(pattern=pattern, name=name)
446 )
447 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
448 return _result
449 raise
450 _result = _outputs[:]
451 if _execute.must_record_gradient():
452 _attrs = ()
453 _inputs_flat = _op.inputs
454 _execute.record_gradient(
455 "MatchingFiles", _inputs_flat, _attrs, _result)
456 _result, = _result
457 return _result
459MatchingFiles = tf_export("raw_ops.MatchingFiles")(_ops.to_raw_op(matching_files))
460_dispatcher_for_matching_files = matching_files._tf_type_based_dispatcher.Dispatch
463def matching_files_eager_fallback(pattern, name, ctx):
464 pattern = _ops.convert_to_tensor(pattern, _dtypes.string)
465 _inputs_flat = [pattern]
466 _attrs = None
467 _result = _execute.execute(b"MatchingFiles", 1, inputs=_inputs_flat,
468 attrs=_attrs, ctx=ctx, name=name)
469 if _execute.must_record_gradient():
470 _execute.record_gradient(
471 "MatchingFiles", _inputs_flat, _attrs, _result)
472 _result, = _result
473 return _result
476def merge_v2_checkpoints(checkpoint_prefixes, destination_prefix, delete_old_dirs=True, allow_missing_files=False, name=None):
477 r"""V2 format specific: merges the metadata files of sharded checkpoints. The
479 result is one logical checkpoint, with one physical metadata file and renamed
480 data files.
482 Intended for "grouping" multiple checkpoints in a sharded checkpoint setup.
484 If delete_old_dirs is true, attempts to delete recursively the dirname of each
485 path in the input checkpoint_prefixes. This is useful when those paths are non
486 user-facing temporary locations.
488 If allow_missing_files is true, merges the checkpoint prefixes as long as
489 at least one file exists. Otherwise, if no files exist, an error will be thrown.
490 The default value for allow_missing_files is false.
492 Args:
493 checkpoint_prefixes: A `Tensor` of type `string`.
494 prefixes of V2 checkpoints to merge.
495 destination_prefix: A `Tensor` of type `string`.
496 scalar. The desired final prefix. Allowed to be the same
497 as one of the checkpoint_prefixes.
498 delete_old_dirs: An optional `bool`. Defaults to `True`. see above.
499 allow_missing_files: An optional `bool`. Defaults to `False`. see above.
500 name: A name for the operation (optional).
502 Returns:
503 The created Operation.
504 """
505 _ctx = _context._context or _context.context()
506 tld = _ctx._thread_local_data
507 if tld.is_eager:
508 try:
509 _result = pywrap_tfe.TFE_Py_FastPathExecute(
510 _ctx, "MergeV2Checkpoints", name, checkpoint_prefixes,
511 destination_prefix, "delete_old_dirs", delete_old_dirs,
512 "allow_missing_files", allow_missing_files)
513 return _result
514 except _core._NotOkStatusException as e:
515 _ops.raise_from_not_ok_status(e, name)
516 except _core._FallbackException:
517 pass
518 try:
519 return merge_v2_checkpoints_eager_fallback(
520 checkpoint_prefixes, destination_prefix,
521 delete_old_dirs=delete_old_dirs,
522 allow_missing_files=allow_missing_files, name=name, ctx=_ctx)
523 except _core._SymbolicException:
524 pass # Add nodes to the TensorFlow graph.
525 # Add nodes to the TensorFlow graph.
526 if delete_old_dirs is None:
527 delete_old_dirs = True
528 delete_old_dirs = _execute.make_bool(delete_old_dirs, "delete_old_dirs")
529 if allow_missing_files is None:
530 allow_missing_files = False
531 allow_missing_files = _execute.make_bool(allow_missing_files, "allow_missing_files")
532 _, _, _op, _outputs = _op_def_library._apply_op_helper(
533 "MergeV2Checkpoints", checkpoint_prefixes=checkpoint_prefixes,
534 destination_prefix=destination_prefix,
535 delete_old_dirs=delete_old_dirs,
536 allow_missing_files=allow_missing_files,
537 name=name)
538 return _op
539MergeV2Checkpoints = tf_export("raw_ops.MergeV2Checkpoints")(_ops.to_raw_op(merge_v2_checkpoints))
542def merge_v2_checkpoints_eager_fallback(checkpoint_prefixes, destination_prefix, delete_old_dirs, allow_missing_files, name, ctx):
543 if delete_old_dirs is None:
544 delete_old_dirs = True
545 delete_old_dirs = _execute.make_bool(delete_old_dirs, "delete_old_dirs")
546 if allow_missing_files is None:
547 allow_missing_files = False
548 allow_missing_files = _execute.make_bool(allow_missing_files, "allow_missing_files")
549 checkpoint_prefixes = _ops.convert_to_tensor(checkpoint_prefixes, _dtypes.string)
550 destination_prefix = _ops.convert_to_tensor(destination_prefix, _dtypes.string)
551 _inputs_flat = [checkpoint_prefixes, destination_prefix]
552 _attrs = ("delete_old_dirs", delete_old_dirs, "allow_missing_files",
553 allow_missing_files)
554 _result = _execute.execute(b"MergeV2Checkpoints", 0, inputs=_inputs_flat,
555 attrs=_attrs, ctx=ctx, name=name)
556 _result = None
557 return _result
560def read_file(filename, name=None):
561 r"""Reads and outputs the entire contents of the input filename.
563 Args:
564 filename: A `Tensor` of type `string`.
565 name: A name for the operation (optional).
567 Returns:
568 A `Tensor` of type `string`.
569 """
570 _ctx = _context._context or _context.context()
571 tld = _ctx._thread_local_data
572 if tld.is_eager:
573 try:
574 _result = pywrap_tfe.TFE_Py_FastPathExecute(
575 _ctx, "ReadFile", name, filename)
576 return _result
577 except _core._NotOkStatusException as e:
578 _ops.raise_from_not_ok_status(e, name)
579 except _core._FallbackException:
580 pass
581 try:
582 return read_file_eager_fallback(
583 filename, name=name, ctx=_ctx)
584 except _core._SymbolicException:
585 pass # Add nodes to the TensorFlow graph.
586 # Add nodes to the TensorFlow graph.
587 _, _, _op, _outputs = _op_def_library._apply_op_helper(
588 "ReadFile", filename=filename, name=name)
589 _result = _outputs[:]
590 if _execute.must_record_gradient():
591 _attrs = ()
592 _inputs_flat = _op.inputs
593 _execute.record_gradient(
594 "ReadFile", _inputs_flat, _attrs, _result)
595 _result, = _result
596 return _result
598ReadFile = tf_export("raw_ops.ReadFile")(_ops.to_raw_op(read_file))
601def read_file_eager_fallback(filename, name, ctx):
602 filename = _ops.convert_to_tensor(filename, _dtypes.string)
603 _inputs_flat = [filename]
604 _attrs = None
605 _result = _execute.execute(b"ReadFile", 1, inputs=_inputs_flat,
606 attrs=_attrs, ctx=ctx, name=name)
607 if _execute.must_record_gradient():
608 _execute.record_gradient(
609 "ReadFile", _inputs_flat, _attrs, _result)
610 _result, = _result
611 return _result
614def reader_num_records_produced(reader_handle, name=None):
615 r"""Returns the number of records this Reader has produced.
617 This is the same as the number of ReaderRead executions that have
618 succeeded.
620 Args:
621 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
622 name: A name for the operation (optional).
624 Returns:
625 A `Tensor` of type `int64`.
626 """
627 _ctx = _context._context or _context.context()
628 tld = _ctx._thread_local_data
629 if tld.is_eager:
630 raise RuntimeError("reader_num_records_produced op does not support eager execution. Arg 'reader_handle' is a ref.")
631 # Add nodes to the TensorFlow graph.
632 _, _, _op, _outputs = _op_def_library._apply_op_helper(
633 "ReaderNumRecordsProduced", reader_handle=reader_handle, name=name)
634 _result = _outputs[:]
635 if _execute.must_record_gradient():
636 _attrs = ()
637 _inputs_flat = _op.inputs
638 _execute.record_gradient(
639 "ReaderNumRecordsProduced", _inputs_flat, _attrs, _result)
640 _result, = _result
641 return _result
643ReaderNumRecordsProduced = tf_export("raw_ops.ReaderNumRecordsProduced")(_ops.to_raw_op(reader_num_records_produced))
646def reader_num_records_produced_eager_fallback(reader_handle, name, ctx):
647 raise RuntimeError("reader_num_records_produced op does not support eager execution. Arg 'reader_handle' is a ref.")
649def reader_num_records_produced_v2(reader_handle, name=None):
650 r"""Returns the number of records this Reader has produced.
652 This is the same as the number of ReaderRead executions that have
653 succeeded.
655 Args:
656 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
657 name: A name for the operation (optional).
659 Returns:
660 A `Tensor` of type `int64`.
661 """
662 _ctx = _context._context or _context.context()
663 tld = _ctx._thread_local_data
664 if tld.is_eager:
665 try:
666 _result = pywrap_tfe.TFE_Py_FastPathExecute(
667 _ctx, "ReaderNumRecordsProducedV2", name, reader_handle)
668 return _result
669 except _core._NotOkStatusException as e:
670 _ops.raise_from_not_ok_status(e, name)
671 except _core._FallbackException:
672 pass
673 try:
674 return reader_num_records_produced_v2_eager_fallback(
675 reader_handle, name=name, ctx=_ctx)
676 except _core._SymbolicException:
677 pass # Add nodes to the TensorFlow graph.
678 # Add nodes to the TensorFlow graph.
679 _, _, _op, _outputs = _op_def_library._apply_op_helper(
680 "ReaderNumRecordsProducedV2", reader_handle=reader_handle, name=name)
681 _result = _outputs[:]
682 if _execute.must_record_gradient():
683 _attrs = ()
684 _inputs_flat = _op.inputs
685 _execute.record_gradient(
686 "ReaderNumRecordsProducedV2", _inputs_flat, _attrs, _result)
687 _result, = _result
688 return _result
690ReaderNumRecordsProducedV2 = tf_export("raw_ops.ReaderNumRecordsProducedV2")(_ops.to_raw_op(reader_num_records_produced_v2))
693def reader_num_records_produced_v2_eager_fallback(reader_handle, name, ctx):
694 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
695 _inputs_flat = [reader_handle]
696 _attrs = None
697 _result = _execute.execute(b"ReaderNumRecordsProducedV2", 1,
698 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
699 name=name)
700 if _execute.must_record_gradient():
701 _execute.record_gradient(
702 "ReaderNumRecordsProducedV2", _inputs_flat, _attrs, _result)
703 _result, = _result
704 return _result
707def reader_num_work_units_completed(reader_handle, name=None):
708 r"""Returns the number of work units this Reader has finished processing.
710 Args:
711 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
712 name: A name for the operation (optional).
714 Returns:
715 A `Tensor` of type `int64`.
716 """
717 _ctx = _context._context or _context.context()
718 tld = _ctx._thread_local_data
719 if tld.is_eager:
720 raise RuntimeError("reader_num_work_units_completed op does not support eager execution. Arg 'reader_handle' is a ref.")
721 # Add nodes to the TensorFlow graph.
722 _, _, _op, _outputs = _op_def_library._apply_op_helper(
723 "ReaderNumWorkUnitsCompleted", reader_handle=reader_handle, name=name)
724 _result = _outputs[:]
725 if _execute.must_record_gradient():
726 _attrs = ()
727 _inputs_flat = _op.inputs
728 _execute.record_gradient(
729 "ReaderNumWorkUnitsCompleted", _inputs_flat, _attrs, _result)
730 _result, = _result
731 return _result
733ReaderNumWorkUnitsCompleted = tf_export("raw_ops.ReaderNumWorkUnitsCompleted")(_ops.to_raw_op(reader_num_work_units_completed))
736def reader_num_work_units_completed_eager_fallback(reader_handle, name, ctx):
737 raise RuntimeError("reader_num_work_units_completed op does not support eager execution. Arg 'reader_handle' is a ref.")
739def reader_num_work_units_completed_v2(reader_handle, name=None):
740 r"""Returns the number of work units this Reader has finished processing.
742 Args:
743 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
744 name: A name for the operation (optional).
746 Returns:
747 A `Tensor` of type `int64`.
748 """
749 _ctx = _context._context or _context.context()
750 tld = _ctx._thread_local_data
751 if tld.is_eager:
752 try:
753 _result = pywrap_tfe.TFE_Py_FastPathExecute(
754 _ctx, "ReaderNumWorkUnitsCompletedV2", name, reader_handle)
755 return _result
756 except _core._NotOkStatusException as e:
757 _ops.raise_from_not_ok_status(e, name)
758 except _core._FallbackException:
759 pass
760 try:
761 return reader_num_work_units_completed_v2_eager_fallback(
762 reader_handle, name=name, ctx=_ctx)
763 except _core._SymbolicException:
764 pass # Add nodes to the TensorFlow graph.
765 # Add nodes to the TensorFlow graph.
766 _, _, _op, _outputs = _op_def_library._apply_op_helper(
767 "ReaderNumWorkUnitsCompletedV2", reader_handle=reader_handle,
768 name=name)
769 _result = _outputs[:]
770 if _execute.must_record_gradient():
771 _attrs = ()
772 _inputs_flat = _op.inputs
773 _execute.record_gradient(
774 "ReaderNumWorkUnitsCompletedV2", _inputs_flat, _attrs, _result)
775 _result, = _result
776 return _result
778ReaderNumWorkUnitsCompletedV2 = tf_export("raw_ops.ReaderNumWorkUnitsCompletedV2")(_ops.to_raw_op(reader_num_work_units_completed_v2))
781def reader_num_work_units_completed_v2_eager_fallback(reader_handle, name, ctx):
782 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
783 _inputs_flat = [reader_handle]
784 _attrs = None
785 _result = _execute.execute(b"ReaderNumWorkUnitsCompletedV2", 1,
786 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
787 name=name)
788 if _execute.must_record_gradient():
789 _execute.record_gradient(
790 "ReaderNumWorkUnitsCompletedV2", _inputs_flat, _attrs, _result)
791 _result, = _result
792 return _result
794_ReaderReadOutput = collections.namedtuple(
795 "ReaderRead",
796 ["key", "value"])
799def reader_read(reader_handle, queue_handle, name=None):
800 r"""Returns the next record (key, value pair) produced by a Reader.
802 Will dequeue from the input queue if necessary (e.g. when the
803 Reader needs to start reading from a new file since it has finished
804 with the previous file).
806 Args:
807 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
808 queue_handle: A `Tensor` of type mutable `string`.
809 Handle to a Queue, with string work items.
810 name: A name for the operation (optional).
812 Returns:
813 A tuple of `Tensor` objects (key, value).
815 key: A `Tensor` of type `string`.
816 value: A `Tensor` of type `string`.
817 """
818 _ctx = _context._context or _context.context()
819 tld = _ctx._thread_local_data
820 if tld.is_eager:
821 raise RuntimeError("reader_read op does not support eager execution. Arg 'queue_handle' is a ref.")
822 # Add nodes to the TensorFlow graph.
823 _, _, _op, _outputs = _op_def_library._apply_op_helper(
824 "ReaderRead", reader_handle=reader_handle, queue_handle=queue_handle,
825 name=name)
826 _result = _outputs[:]
827 if _execute.must_record_gradient():
828 _attrs = ()
829 _inputs_flat = _op.inputs
830 _execute.record_gradient(
831 "ReaderRead", _inputs_flat, _attrs, _result)
832 _result = _ReaderReadOutput._make(_result)
833 return _result
835ReaderRead = tf_export("raw_ops.ReaderRead")(_ops.to_raw_op(reader_read))
838def reader_read_eager_fallback(reader_handle, queue_handle, name, ctx):
839 raise RuntimeError("reader_read op does not support eager execution. Arg 'queue_handle' is a ref.")
840_ReaderReadUpToOutput = collections.namedtuple(
841 "ReaderReadUpTo",
842 ["keys", "values"])
845def reader_read_up_to(reader_handle, queue_handle, num_records, name=None):
846 r"""Returns up to `num_records` (key, value) pairs produced by a Reader.
848 Will dequeue from the input queue if necessary (e.g. when the
849 Reader needs to start reading from a new file since it has finished
850 with the previous file).
851 It may return less than `num_records` even before the last batch.
853 Args:
854 reader_handle: A `Tensor` of type mutable `string`. Handle to a `Reader`.
855 queue_handle: A `Tensor` of type mutable `string`.
856 Handle to a `Queue`, with string work items.
857 num_records: A `Tensor` of type `int64`.
858 number of records to read from `Reader`.
859 name: A name for the operation (optional).
861 Returns:
862 A tuple of `Tensor` objects (keys, values).
864 keys: A `Tensor` of type `string`.
865 values: A `Tensor` of type `string`.
866 """
867 _ctx = _context._context or _context.context()
868 tld = _ctx._thread_local_data
869 if tld.is_eager:
870 raise RuntimeError("reader_read_up_to op does not support eager execution. Arg 'queue_handle' is a ref.")
871 # Add nodes to the TensorFlow graph.
872 _, _, _op, _outputs = _op_def_library._apply_op_helper(
873 "ReaderReadUpTo", reader_handle=reader_handle,
874 queue_handle=queue_handle, num_records=num_records,
875 name=name)
876 _result = _outputs[:]
877 if _execute.must_record_gradient():
878 _attrs = ()
879 _inputs_flat = _op.inputs
880 _execute.record_gradient(
881 "ReaderReadUpTo", _inputs_flat, _attrs, _result)
882 _result = _ReaderReadUpToOutput._make(_result)
883 return _result
885ReaderReadUpTo = tf_export("raw_ops.ReaderReadUpTo")(_ops.to_raw_op(reader_read_up_to))
888def reader_read_up_to_eager_fallback(reader_handle, queue_handle, num_records, name, ctx):
889 raise RuntimeError("reader_read_up_to op does not support eager execution. Arg 'queue_handle' is a ref.")
890_ReaderReadUpToV2Output = collections.namedtuple(
891 "ReaderReadUpToV2",
892 ["keys", "values"])
895def reader_read_up_to_v2(reader_handle, queue_handle, num_records, name=None):
896 r"""Returns up to `num_records` (key, value) pairs produced by a Reader.
898 Will dequeue from the input queue if necessary (e.g. when the
899 Reader needs to start reading from a new file since it has finished
900 with the previous file).
901 It may return less than `num_records` even before the last batch.
903 Args:
904 reader_handle: A `Tensor` of type `resource`. Handle to a `Reader`.
905 queue_handle: A `Tensor` of type `resource`.
906 Handle to a `Queue`, with string work items.
907 num_records: A `Tensor` of type `int64`.
908 number of records to read from `Reader`.
909 name: A name for the operation (optional).
911 Returns:
912 A tuple of `Tensor` objects (keys, values).
914 keys: A `Tensor` of type `string`.
915 values: A `Tensor` of type `string`.
916 """
917 _ctx = _context._context or _context.context()
918 tld = _ctx._thread_local_data
919 if tld.is_eager:
920 try:
921 _result = pywrap_tfe.TFE_Py_FastPathExecute(
922 _ctx, "ReaderReadUpToV2", name, reader_handle, queue_handle,
923 num_records)
924 _result = _ReaderReadUpToV2Output._make(_result)
925 return _result
926 except _core._NotOkStatusException as e:
927 _ops.raise_from_not_ok_status(e, name)
928 except _core._FallbackException:
929 pass
930 try:
931 return reader_read_up_to_v2_eager_fallback(
932 reader_handle, queue_handle, num_records, name=name, ctx=_ctx)
933 except _core._SymbolicException:
934 pass # Add nodes to the TensorFlow graph.
935 # Add nodes to the TensorFlow graph.
936 _, _, _op, _outputs = _op_def_library._apply_op_helper(
937 "ReaderReadUpToV2", reader_handle=reader_handle,
938 queue_handle=queue_handle,
939 num_records=num_records, name=name)
940 _result = _outputs[:]
941 if _execute.must_record_gradient():
942 _attrs = ()
943 _inputs_flat = _op.inputs
944 _execute.record_gradient(
945 "ReaderReadUpToV2", _inputs_flat, _attrs, _result)
946 _result = _ReaderReadUpToV2Output._make(_result)
947 return _result
949ReaderReadUpToV2 = tf_export("raw_ops.ReaderReadUpToV2")(_ops.to_raw_op(reader_read_up_to_v2))
952def reader_read_up_to_v2_eager_fallback(reader_handle, queue_handle, num_records, name, ctx):
953 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
954 queue_handle = _ops.convert_to_tensor(queue_handle, _dtypes.resource)
955 num_records = _ops.convert_to_tensor(num_records, _dtypes.int64)
956 _inputs_flat = [reader_handle, queue_handle, num_records]
957 _attrs = None
958 _result = _execute.execute(b"ReaderReadUpToV2", 2, inputs=_inputs_flat,
959 attrs=_attrs, ctx=ctx, name=name)
960 if _execute.must_record_gradient():
961 _execute.record_gradient(
962 "ReaderReadUpToV2", _inputs_flat, _attrs, _result)
963 _result = _ReaderReadUpToV2Output._make(_result)
964 return _result
966_ReaderReadV2Output = collections.namedtuple(
967 "ReaderReadV2",
968 ["key", "value"])
971def reader_read_v2(reader_handle, queue_handle, name=None):
972 r"""Returns the next record (key, value pair) produced by a Reader.
974 Will dequeue from the input queue if necessary (e.g. when the
975 Reader needs to start reading from a new file since it has finished
976 with the previous file).
978 Args:
979 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
980 queue_handle: A `Tensor` of type `resource`.
981 Handle to a Queue, with string work items.
982 name: A name for the operation (optional).
984 Returns:
985 A tuple of `Tensor` objects (key, value).
987 key: A `Tensor` of type `string`.
988 value: A `Tensor` of type `string`.
989 """
990 _ctx = _context._context or _context.context()
991 tld = _ctx._thread_local_data
992 if tld.is_eager:
993 try:
994 _result = pywrap_tfe.TFE_Py_FastPathExecute(
995 _ctx, "ReaderReadV2", name, reader_handle, queue_handle)
996 _result = _ReaderReadV2Output._make(_result)
997 return _result
998 except _core._NotOkStatusException as e:
999 _ops.raise_from_not_ok_status(e, name)
1000 except _core._FallbackException:
1001 pass
1002 try:
1003 return reader_read_v2_eager_fallback(
1004 reader_handle, queue_handle, name=name, ctx=_ctx)
1005 except _core._SymbolicException:
1006 pass # Add nodes to the TensorFlow graph.
1007 # Add nodes to the TensorFlow graph.
1008 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1009 "ReaderReadV2", reader_handle=reader_handle,
1010 queue_handle=queue_handle, name=name)
1011 _result = _outputs[:]
1012 if _execute.must_record_gradient():
1013 _attrs = ()
1014 _inputs_flat = _op.inputs
1015 _execute.record_gradient(
1016 "ReaderReadV2", _inputs_flat, _attrs, _result)
1017 _result = _ReaderReadV2Output._make(_result)
1018 return _result
1020ReaderReadV2 = tf_export("raw_ops.ReaderReadV2")(_ops.to_raw_op(reader_read_v2))
1023def reader_read_v2_eager_fallback(reader_handle, queue_handle, name, ctx):
1024 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
1025 queue_handle = _ops.convert_to_tensor(queue_handle, _dtypes.resource)
1026 _inputs_flat = [reader_handle, queue_handle]
1027 _attrs = None
1028 _result = _execute.execute(b"ReaderReadV2", 2, inputs=_inputs_flat,
1029 attrs=_attrs, ctx=ctx, name=name)
1030 if _execute.must_record_gradient():
1031 _execute.record_gradient(
1032 "ReaderReadV2", _inputs_flat, _attrs, _result)
1033 _result = _ReaderReadV2Output._make(_result)
1034 return _result
1037def reader_reset(reader_handle, name=None):
1038 r"""Restore a Reader to its initial clean state.
1040 Args:
1041 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
1042 name: A name for the operation (optional).
1044 Returns:
1045 The created Operation.
1046 """
1047 _ctx = _context._context or _context.context()
1048 tld = _ctx._thread_local_data
1049 if tld.is_eager:
1050 raise RuntimeError("reader_reset op does not support eager execution. Arg 'reader_handle' is a ref.")
1051 # Add nodes to the TensorFlow graph.
1052 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1053 "ReaderReset", reader_handle=reader_handle, name=name)
1054 return _op
1055ReaderReset = tf_export("raw_ops.ReaderReset")(_ops.to_raw_op(reader_reset))
1058def reader_reset_eager_fallback(reader_handle, name, ctx):
1059 raise RuntimeError("reader_reset op does not support eager execution. Arg 'reader_handle' is a ref.")
1061def reader_reset_v2(reader_handle, name=None):
1062 r"""Restore a Reader to its initial clean state.
1064 Args:
1065 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
1066 name: A name for the operation (optional).
1068 Returns:
1069 The created Operation.
1070 """
1071 _ctx = _context._context or _context.context()
1072 tld = _ctx._thread_local_data
1073 if tld.is_eager:
1074 try:
1075 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1076 _ctx, "ReaderResetV2", name, reader_handle)
1077 return _result
1078 except _core._NotOkStatusException as e:
1079 _ops.raise_from_not_ok_status(e, name)
1080 except _core._FallbackException:
1081 pass
1082 try:
1083 return reader_reset_v2_eager_fallback(
1084 reader_handle, name=name, ctx=_ctx)
1085 except _core._SymbolicException:
1086 pass # Add nodes to the TensorFlow graph.
1087 # Add nodes to the TensorFlow graph.
1088 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1089 "ReaderResetV2", reader_handle=reader_handle, name=name)
1090 return _op
1091ReaderResetV2 = tf_export("raw_ops.ReaderResetV2")(_ops.to_raw_op(reader_reset_v2))
1094def reader_reset_v2_eager_fallback(reader_handle, name, ctx):
1095 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
1096 _inputs_flat = [reader_handle]
1097 _attrs = None
1098 _result = _execute.execute(b"ReaderResetV2", 0, inputs=_inputs_flat,
1099 attrs=_attrs, ctx=ctx, name=name)
1100 _result = None
1101 return _result
1104def reader_restore_state(reader_handle, state, name=None):
1105 r"""Restore a reader to a previously saved state.
1107 Not all Readers support being restored, so this can produce an
1108 Unimplemented error.
1110 Args:
1111 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
1112 state: A `Tensor` of type `string`.
1113 Result of a ReaderSerializeState of a Reader with type
1114 matching reader_handle.
1115 name: A name for the operation (optional).
1117 Returns:
1118 The created Operation.
1119 """
1120 _ctx = _context._context or _context.context()
1121 tld = _ctx._thread_local_data
1122 if tld.is_eager:
1123 raise RuntimeError("reader_restore_state op does not support eager execution. Arg 'reader_handle' is a ref.")
1124 # Add nodes to the TensorFlow graph.
1125 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1126 "ReaderRestoreState", reader_handle=reader_handle, state=state,
1127 name=name)
1128 return _op
1129ReaderRestoreState = tf_export("raw_ops.ReaderRestoreState")(_ops.to_raw_op(reader_restore_state))
1132def reader_restore_state_eager_fallback(reader_handle, state, name, ctx):
1133 raise RuntimeError("reader_restore_state op does not support eager execution. Arg 'reader_handle' is a ref.")
1135def reader_restore_state_v2(reader_handle, state, name=None):
1136 r"""Restore a reader to a previously saved state.
1138 Not all Readers support being restored, so this can produce an
1139 Unimplemented error.
1141 Args:
1142 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
1143 state: A `Tensor` of type `string`.
1144 Result of a ReaderSerializeState of a Reader with type
1145 matching reader_handle.
1146 name: A name for the operation (optional).
1148 Returns:
1149 The created Operation.
1150 """
1151 _ctx = _context._context or _context.context()
1152 tld = _ctx._thread_local_data
1153 if tld.is_eager:
1154 try:
1155 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1156 _ctx, "ReaderRestoreStateV2", name, reader_handle, state)
1157 return _result
1158 except _core._NotOkStatusException as e:
1159 _ops.raise_from_not_ok_status(e, name)
1160 except _core._FallbackException:
1161 pass
1162 try:
1163 return reader_restore_state_v2_eager_fallback(
1164 reader_handle, state, name=name, ctx=_ctx)
1165 except _core._SymbolicException:
1166 pass # Add nodes to the TensorFlow graph.
1167 # Add nodes to the TensorFlow graph.
1168 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1169 "ReaderRestoreStateV2", reader_handle=reader_handle, state=state,
1170 name=name)
1171 return _op
1172ReaderRestoreStateV2 = tf_export("raw_ops.ReaderRestoreStateV2")(_ops.to_raw_op(reader_restore_state_v2))
1175def reader_restore_state_v2_eager_fallback(reader_handle, state, name, ctx):
1176 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
1177 state = _ops.convert_to_tensor(state, _dtypes.string)
1178 _inputs_flat = [reader_handle, state]
1179 _attrs = None
1180 _result = _execute.execute(b"ReaderRestoreStateV2", 0, inputs=_inputs_flat,
1181 attrs=_attrs, ctx=ctx, name=name)
1182 _result = None
1183 return _result
1186def reader_serialize_state(reader_handle, name=None):
1187 r"""Produce a string tensor that encodes the state of a Reader.
1189 Not all Readers support being serialized, so this can produce an
1190 Unimplemented error.
1192 Args:
1193 reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
1194 name: A name for the operation (optional).
1196 Returns:
1197 A `Tensor` of type `string`.
1198 """
1199 _ctx = _context._context or _context.context()
1200 tld = _ctx._thread_local_data
1201 if tld.is_eager:
1202 raise RuntimeError("reader_serialize_state op does not support eager execution. Arg 'reader_handle' is a ref.")
1203 # Add nodes to the TensorFlow graph.
1204 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1205 "ReaderSerializeState", reader_handle=reader_handle, name=name)
1206 _result = _outputs[:]
1207 if _execute.must_record_gradient():
1208 _attrs = ()
1209 _inputs_flat = _op.inputs
1210 _execute.record_gradient(
1211 "ReaderSerializeState", _inputs_flat, _attrs, _result)
1212 _result, = _result
1213 return _result
1215ReaderSerializeState = tf_export("raw_ops.ReaderSerializeState")(_ops.to_raw_op(reader_serialize_state))
1218def reader_serialize_state_eager_fallback(reader_handle, name, ctx):
1219 raise RuntimeError("reader_serialize_state op does not support eager execution. Arg 'reader_handle' is a ref.")
1221def reader_serialize_state_v2(reader_handle, name=None):
1222 r"""Produce a string tensor that encodes the state of a Reader.
1224 Not all Readers support being serialized, so this can produce an
1225 Unimplemented error.
1227 Args:
1228 reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
1229 name: A name for the operation (optional).
1231 Returns:
1232 A `Tensor` of type `string`.
1233 """
1234 _ctx = _context._context or _context.context()
1235 tld = _ctx._thread_local_data
1236 if tld.is_eager:
1237 try:
1238 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1239 _ctx, "ReaderSerializeStateV2", name, reader_handle)
1240 return _result
1241 except _core._NotOkStatusException as e:
1242 _ops.raise_from_not_ok_status(e, name)
1243 except _core._FallbackException:
1244 pass
1245 try:
1246 return reader_serialize_state_v2_eager_fallback(
1247 reader_handle, name=name, ctx=_ctx)
1248 except _core._SymbolicException:
1249 pass # Add nodes to the TensorFlow graph.
1250 # Add nodes to the TensorFlow graph.
1251 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1252 "ReaderSerializeStateV2", reader_handle=reader_handle, name=name)
1253 _result = _outputs[:]
1254 if _execute.must_record_gradient():
1255 _attrs = ()
1256 _inputs_flat = _op.inputs
1257 _execute.record_gradient(
1258 "ReaderSerializeStateV2", _inputs_flat, _attrs, _result)
1259 _result, = _result
1260 return _result
1262ReaderSerializeStateV2 = tf_export("raw_ops.ReaderSerializeStateV2")(_ops.to_raw_op(reader_serialize_state_v2))
1265def reader_serialize_state_v2_eager_fallback(reader_handle, name, ctx):
1266 reader_handle = _ops.convert_to_tensor(reader_handle, _dtypes.resource)
1267 _inputs_flat = [reader_handle]
1268 _attrs = None
1269 _result = _execute.execute(b"ReaderSerializeStateV2", 1,
1270 inputs=_inputs_flat, attrs=_attrs, ctx=ctx,
1271 name=name)
1272 if _execute.must_record_gradient():
1273 _execute.record_gradient(
1274 "ReaderSerializeStateV2", _inputs_flat, _attrs, _result)
1275 _result, = _result
1276 return _result
1279def restore(file_pattern, tensor_name, dt, preferred_shard=-1, name=None):
1280 r"""Restores a tensor from checkpoint files.
1282 Reads a tensor stored in one or several files. If there are several files (for
1283 instance because a tensor was saved as slices), `file_pattern` may contain
1284 wildcard symbols (`*` and `?`) in the filename portion only, not in the
1285 directory portion.
1287 If a `file_pattern` matches several files, `preferred_shard` can be used to hint
1288 in which file the requested tensor is likely to be found. This op will first
1289 open the file at index `preferred_shard` in the list of matching files and try
1290 to restore tensors from that file. Only if some tensors or tensor slices are
1291 not found in that first file, then the Op opens all the files. Setting
1292 `preferred_shard` to match the value passed as the `shard` input
1293 of a matching `Save` Op may speed up Restore. This attribute only affects
1294 performance, not correctness. The default value -1 means files are processed in
1295 order.
1297 See also `RestoreSlice`.
1299 Args:
1300 file_pattern: A `Tensor` of type `string`.
1301 Must have a single element. The pattern of the files from
1302 which we read the tensor.
1303 tensor_name: A `Tensor` of type `string`.
1304 Must have a single element. The name of the tensor to be
1305 restored.
1306 dt: A `tf.DType`. The type of the tensor to be restored.
1307 preferred_shard: An optional `int`. Defaults to `-1`.
1308 Index of file to open first if multiple files match
1309 `file_pattern`.
1310 name: A name for the operation (optional).
1312 Returns:
1313 A `Tensor` of type `dt`.
1314 """
1315 _ctx = _context._context or _context.context()
1316 tld = _ctx._thread_local_data
1317 if tld.is_eager:
1318 try:
1319 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1320 _ctx, "Restore", name, file_pattern, tensor_name, "dt", dt,
1321 "preferred_shard", preferred_shard)
1322 return _result
1323 except _core._NotOkStatusException as e:
1324 _ops.raise_from_not_ok_status(e, name)
1325 except _core._FallbackException:
1326 pass
1327 try:
1328 return restore_eager_fallback(
1329 file_pattern, tensor_name, dt=dt, preferred_shard=preferred_shard,
1330 name=name, ctx=_ctx)
1331 except _core._SymbolicException:
1332 pass # Add nodes to the TensorFlow graph.
1333 # Add nodes to the TensorFlow graph.
1334 dt = _execute.make_type(dt, "dt")
1335 if preferred_shard is None:
1336 preferred_shard = -1
1337 preferred_shard = _execute.make_int(preferred_shard, "preferred_shard")
1338 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1339 "Restore", file_pattern=file_pattern, tensor_name=tensor_name, dt=dt,
1340 preferred_shard=preferred_shard, name=name)
1341 _result = _outputs[:]
1342 if _execute.must_record_gradient():
1343 _attrs = ("dt", _op._get_attr_type("dt"), "preferred_shard",
1344 _op._get_attr_int("preferred_shard"))
1345 _inputs_flat = _op.inputs
1346 _execute.record_gradient(
1347 "Restore", _inputs_flat, _attrs, _result)
1348 _result, = _result
1349 return _result
1351Restore = tf_export("raw_ops.Restore")(_ops.to_raw_op(restore))
1354def restore_eager_fallback(file_pattern, tensor_name, dt, preferred_shard, name, ctx):
1355 dt = _execute.make_type(dt, "dt")
1356 if preferred_shard is None:
1357 preferred_shard = -1
1358 preferred_shard = _execute.make_int(preferred_shard, "preferred_shard")
1359 file_pattern = _ops.convert_to_tensor(file_pattern, _dtypes.string)
1360 tensor_name = _ops.convert_to_tensor(tensor_name, _dtypes.string)
1361 _inputs_flat = [file_pattern, tensor_name]
1362 _attrs = ("dt", dt, "preferred_shard", preferred_shard)
1363 _result = _execute.execute(b"Restore", 1, inputs=_inputs_flat, attrs=_attrs,
1364 ctx=ctx, name=name)
1365 if _execute.must_record_gradient():
1366 _execute.record_gradient(
1367 "Restore", _inputs_flat, _attrs, _result)
1368 _result, = _result
1369 return _result
1372def restore_slice(file_pattern, tensor_name, shape_and_slice, dt, preferred_shard=-1, name=None):
1373 r"""Restores a tensor from checkpoint files.
1375 This is like `Restore` except that restored tensor can be listed as filling
1376 only a slice of a larger tensor. `shape_and_slice` specifies the shape of the
1377 larger tensor and the slice that the restored tensor covers.
1379 The `shape_and_slice` input has the same format as the
1380 elements of the `shapes_and_slices` input of the `SaveSlices` op.
1382 Args:
1383 file_pattern: A `Tensor` of type `string`.
1384 Must have a single element. The pattern of the files from
1385 which we read the tensor.
1386 tensor_name: A `Tensor` of type `string`.
1387 Must have a single element. The name of the tensor to be
1388 restored.
1389 shape_and_slice: A `Tensor` of type `string`.
1390 Scalar. The shapes and slice specifications to use when
1391 restoring a tensors.
1392 dt: A `tf.DType`. The type of the tensor to be restored.
1393 preferred_shard: An optional `int`. Defaults to `-1`.
1394 Index of file to open first if multiple files match
1395 `file_pattern`. See the documentation for `Restore`.
1396 name: A name for the operation (optional).
1398 Returns:
1399 A `Tensor` of type `dt`.
1400 """
1401 _ctx = _context._context or _context.context()
1402 tld = _ctx._thread_local_data
1403 if tld.is_eager:
1404 try:
1405 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1406 _ctx, "RestoreSlice", name, file_pattern, tensor_name,
1407 shape_and_slice, "dt", dt, "preferred_shard", preferred_shard)
1408 return _result
1409 except _core._NotOkStatusException as e:
1410 _ops.raise_from_not_ok_status(e, name)
1411 except _core._FallbackException:
1412 pass
1413 try:
1414 return restore_slice_eager_fallback(
1415 file_pattern, tensor_name, shape_and_slice, dt=dt,
1416 preferred_shard=preferred_shard, name=name, ctx=_ctx)
1417 except _core._SymbolicException:
1418 pass # Add nodes to the TensorFlow graph.
1419 # Add nodes to the TensorFlow graph.
1420 dt = _execute.make_type(dt, "dt")
1421 if preferred_shard is None:
1422 preferred_shard = -1
1423 preferred_shard = _execute.make_int(preferred_shard, "preferred_shard")
1424 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1425 "RestoreSlice", file_pattern=file_pattern, tensor_name=tensor_name,
1426 shape_and_slice=shape_and_slice, dt=dt,
1427 preferred_shard=preferred_shard, name=name)
1428 _result = _outputs[:]
1429 if _execute.must_record_gradient():
1430 _attrs = ("dt", _op._get_attr_type("dt"), "preferred_shard",
1431 _op._get_attr_int("preferred_shard"))
1432 _inputs_flat = _op.inputs
1433 _execute.record_gradient(
1434 "RestoreSlice", _inputs_flat, _attrs, _result)
1435 _result, = _result
1436 return _result
1438RestoreSlice = tf_export("raw_ops.RestoreSlice")(_ops.to_raw_op(restore_slice))
1441def restore_slice_eager_fallback(file_pattern, tensor_name, shape_and_slice, dt, preferred_shard, name, ctx):
1442 dt = _execute.make_type(dt, "dt")
1443 if preferred_shard is None:
1444 preferred_shard = -1
1445 preferred_shard = _execute.make_int(preferred_shard, "preferred_shard")
1446 file_pattern = _ops.convert_to_tensor(file_pattern, _dtypes.string)
1447 tensor_name = _ops.convert_to_tensor(tensor_name, _dtypes.string)
1448 shape_and_slice = _ops.convert_to_tensor(shape_and_slice, _dtypes.string)
1449 _inputs_flat = [file_pattern, tensor_name, shape_and_slice]
1450 _attrs = ("dt", dt, "preferred_shard", preferred_shard)
1451 _result = _execute.execute(b"RestoreSlice", 1, inputs=_inputs_flat,
1452 attrs=_attrs, ctx=ctx, name=name)
1453 if _execute.must_record_gradient():
1454 _execute.record_gradient(
1455 "RestoreSlice", _inputs_flat, _attrs, _result)
1456 _result, = _result
1457 return _result
1460def restore_v2(prefix, tensor_names, shape_and_slices, dtypes, name=None):
1461 r"""Restores tensors from a V2 checkpoint.
1463 For backward compatibility with the V1 format, this Op currently allows
1464 restoring from a V1 checkpoint as well:
1465 - This Op first attempts to find the V2 index file pointed to by "prefix", and
1466 if found proceed to read it as a V2 checkpoint;
1467 - Otherwise the V1 read path is invoked.
1468 Relying on this behavior is not recommended, as the ability to fall back to read
1469 V1 might be deprecated and eventually removed.
1471 By default, restores the named tensors in full. If the caller wishes to restore
1472 specific slices of stored tensors, "shape_and_slices" should be non-empty
1473 strings and correspondingly well-formed.
1475 Callers must ensure all the named tensors are indeed stored in the checkpoint.
1477 Args:
1478 prefix: A `Tensor` of type `string`.
1479 Must have a single element. The prefix of a V2 checkpoint.
1480 tensor_names: A `Tensor` of type `string`.
1481 shape {N}. The names of the tensors to be restored.
1482 shape_and_slices: A `Tensor` of type `string`.
1483 shape {N}. The slice specs of the tensors to be restored.
1484 Empty strings indicate that they are non-partitioned tensors.
1485 dtypes: A list of `tf.DTypes` that has length `>= 1`.
1486 shape {N}. The list of expected dtype for the tensors. Must match
1487 those stored in the checkpoint.
1488 name: A name for the operation (optional).
1490 Returns:
1491 A list of `Tensor` objects of type `dtypes`.
1492 """
1493 _ctx = _context._context or _context.context()
1494 tld = _ctx._thread_local_data
1495 if tld.is_eager:
1496 try:
1497 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1498 _ctx, "RestoreV2", name, prefix, tensor_names, shape_and_slices,
1499 "dtypes", dtypes)
1500 return _result
1501 except _core._NotOkStatusException as e:
1502 _ops.raise_from_not_ok_status(e, name)
1503 except _core._FallbackException:
1504 pass
1505 try:
1506 return restore_v2_eager_fallback(
1507 prefix, tensor_names, shape_and_slices, dtypes=dtypes, name=name,
1508 ctx=_ctx)
1509 except _core._SymbolicException:
1510 pass # Add nodes to the TensorFlow graph.
1511 # Add nodes to the TensorFlow graph.
1512 if not isinstance(dtypes, (list, tuple)):
1513 raise TypeError(
1514 "Expected list for 'dtypes' argument to "
1515 "'restore_v2' Op, not %r." % dtypes)
1516 dtypes = [_execute.make_type(_t, "dtypes") for _t in dtypes]
1517 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1518 "RestoreV2", prefix=prefix, tensor_names=tensor_names,
1519 shape_and_slices=shape_and_slices, dtypes=dtypes,
1520 name=name)
1521 _result = _outputs[:]
1522 if not _result:
1523 return _op
1524 if _execute.must_record_gradient():
1525 _attrs = ("dtypes", _op.get_attr("dtypes"))
1526 _inputs_flat = _op.inputs
1527 _execute.record_gradient(
1528 "RestoreV2", _inputs_flat, _attrs, _result)
1529 return _result
1531RestoreV2 = tf_export("raw_ops.RestoreV2")(_ops.to_raw_op(restore_v2))
1534def restore_v2_eager_fallback(prefix, tensor_names, shape_and_slices, dtypes, name, ctx):
1535 if not isinstance(dtypes, (list, tuple)):
1536 raise TypeError(
1537 "Expected list for 'dtypes' argument to "
1538 "'restore_v2' Op, not %r." % dtypes)
1539 dtypes = [_execute.make_type(_t, "dtypes") for _t in dtypes]
1540 prefix = _ops.convert_to_tensor(prefix, _dtypes.string)
1541 tensor_names = _ops.convert_to_tensor(tensor_names, _dtypes.string)
1542 shape_and_slices = _ops.convert_to_tensor(shape_and_slices, _dtypes.string)
1543 _inputs_flat = [prefix, tensor_names, shape_and_slices]
1544 _attrs = ("dtypes", dtypes)
1545 _result = _execute.execute(b"RestoreV2", len(dtypes), inputs=_inputs_flat,
1546 attrs=_attrs, ctx=ctx, name=name)
1547 if _execute.must_record_gradient():
1548 _execute.record_gradient(
1549 "RestoreV2", _inputs_flat, _attrs, _result)
1550 return _result
1553def save(filename, tensor_names, data, name=None):
1554 r"""Saves the input tensors to disk.
1556 The size of `tensor_names` must match the number of tensors in `data`. `data[i]`
1557 is written to `filename` with name `tensor_names[i]`.
1559 See also `SaveSlices`.
1561 Args:
1562 filename: A `Tensor` of type `string`.
1563 Must have a single element. The name of the file to which we write
1564 the tensor.
1565 tensor_names: A `Tensor` of type `string`.
1566 Shape `[N]`. The names of the tensors to be saved.
1567 data: A list of `Tensor` objects. `N` tensors to save.
1568 name: A name for the operation (optional).
1570 Returns:
1571 The created Operation.
1572 """
1573 _ctx = _context._context or _context.context()
1574 tld = _ctx._thread_local_data
1575 if tld.is_eager:
1576 try:
1577 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1578 _ctx, "Save", name, filename, tensor_names, data)
1579 return _result
1580 except _core._NotOkStatusException as e:
1581 _ops.raise_from_not_ok_status(e, name)
1582 except _core._FallbackException:
1583 pass
1584 try:
1585 return save_eager_fallback(
1586 filename, tensor_names, data, name=name, ctx=_ctx)
1587 except _core._SymbolicException:
1588 pass # Add nodes to the TensorFlow graph.
1589 # Add nodes to the TensorFlow graph.
1590 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1591 "Save", filename=filename, tensor_names=tensor_names, data=data,
1592 name=name)
1593 return _op
1594Save = tf_export("raw_ops.Save")(_ops.to_raw_op(save))
1597def save_eager_fallback(filename, tensor_names, data, name, ctx):
1598 _attr_T, data = _execute.convert_to_mixed_eager_tensors(data, ctx)
1599 filename = _ops.convert_to_tensor(filename, _dtypes.string)
1600 tensor_names = _ops.convert_to_tensor(tensor_names, _dtypes.string)
1601 _inputs_flat = [filename, tensor_names] + list(data)
1602 _attrs = ("T", _attr_T)
1603 _result = _execute.execute(b"Save", 0, inputs=_inputs_flat, attrs=_attrs,
1604 ctx=ctx, name=name)
1605 _result = None
1606 return _result
1609def save_slices(filename, tensor_names, shapes_and_slices, data, name=None):
1610 r"""Saves input tensors slices to disk.
1612 This is like `Save` except that tensors can be listed in the saved file as being
1613 a slice of a larger tensor. `shapes_and_slices` specifies the shape of the
1614 larger tensor and the slice that this tensor covers. `shapes_and_slices` must
1615 have as many elements as `tensor_names`.
1617 Elements of the `shapes_and_slices` input must either be:
1619 * The empty string, in which case the corresponding tensor is
1620 saved normally.
1621 * A string of the form `dim0 dim1 ... dimN-1 slice-spec` where the
1622 `dimI` are the dimensions of the larger tensor and `slice-spec`
1623 specifies what part is covered by the tensor to save.
1625 `slice-spec` itself is a `:`-separated list: `slice0:slice1:...:sliceN-1`
1626 where each `sliceI` is either:
1628 * The string `-` meaning that the slice covers all indices of this dimension
1629 * `start,length` where `start` and `length` are integers. In that
1630 case the slice covers `length` indices starting at `start`.
1632 See also `Save`.
1634 Args:
1635 filename: A `Tensor` of type `string`.
1636 Must have a single element. The name of the file to which we write the
1637 tensor.
1638 tensor_names: A `Tensor` of type `string`.
1639 Shape `[N]`. The names of the tensors to be saved.
1640 shapes_and_slices: A `Tensor` of type `string`.
1641 Shape `[N]`. The shapes and slice specifications to use when
1642 saving the tensors.
1643 data: A list of `Tensor` objects. `N` tensors to save.
1644 name: A name for the operation (optional).
1646 Returns:
1647 The created Operation.
1648 """
1649 _ctx = _context._context or _context.context()
1650 tld = _ctx._thread_local_data
1651 if tld.is_eager:
1652 try:
1653 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1654 _ctx, "SaveSlices", name, filename, tensor_names, shapes_and_slices,
1655 data)
1656 return _result
1657 except _core._NotOkStatusException as e:
1658 _ops.raise_from_not_ok_status(e, name)
1659 except _core._FallbackException:
1660 pass
1661 try:
1662 return save_slices_eager_fallback(
1663 filename, tensor_names, shapes_and_slices, data, name=name,
1664 ctx=_ctx)
1665 except _core._SymbolicException:
1666 pass # Add nodes to the TensorFlow graph.
1667 # Add nodes to the TensorFlow graph.
1668 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1669 "SaveSlices", filename=filename, tensor_names=tensor_names,
1670 shapes_and_slices=shapes_and_slices, data=data,
1671 name=name)
1672 return _op
1673SaveSlices = tf_export("raw_ops.SaveSlices")(_ops.to_raw_op(save_slices))
1676def save_slices_eager_fallback(filename, tensor_names, shapes_and_slices, data, name, ctx):
1677 _attr_T, data = _execute.convert_to_mixed_eager_tensors(data, ctx)
1678 filename = _ops.convert_to_tensor(filename, _dtypes.string)
1679 tensor_names = _ops.convert_to_tensor(tensor_names, _dtypes.string)
1680 shapes_and_slices = _ops.convert_to_tensor(shapes_and_slices, _dtypes.string)
1681 _inputs_flat = [filename, tensor_names, shapes_and_slices] + list(data)
1682 _attrs = ("T", _attr_T)
1683 _result = _execute.execute(b"SaveSlices", 0, inputs=_inputs_flat,
1684 attrs=_attrs, ctx=ctx, name=name)
1685 _result = None
1686 return _result
1689def save_v2(prefix, tensor_names, shape_and_slices, tensors, name=None):
1690 r"""Saves tensors in V2 checkpoint format.
1692 By default, saves the named tensors in full. If the caller wishes to save
1693 specific slices of full tensors, "shape_and_slices" should be non-empty strings
1694 and correspondingly well-formed.
1696 Args:
1697 prefix: A `Tensor` of type `string`.
1698 Must have a single element. The prefix of the V2 checkpoint to which we
1699 write the tensors.
1700 tensor_names: A `Tensor` of type `string`.
1701 shape {N}. The names of the tensors to be saved.
1702 shape_and_slices: A `Tensor` of type `string`.
1703 shape {N}. The slice specs of the tensors to be saved.
1704 Empty strings indicate that they are non-partitioned tensors.
1705 tensors: A list of `Tensor` objects. `N` tensors to save.
1706 name: A name for the operation (optional).
1708 Returns:
1709 The created Operation.
1710 """
1711 _ctx = _context._context or _context.context()
1712 tld = _ctx._thread_local_data
1713 if tld.is_eager:
1714 try:
1715 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1716 _ctx, "SaveV2", name, prefix, tensor_names, shape_and_slices, tensors)
1717 return _result
1718 except _core._NotOkStatusException as e:
1719 _ops.raise_from_not_ok_status(e, name)
1720 except _core._FallbackException:
1721 pass
1722 try:
1723 return save_v2_eager_fallback(
1724 prefix, tensor_names, shape_and_slices, tensors, name=name,
1725 ctx=_ctx)
1726 except _core._SymbolicException:
1727 pass # Add nodes to the TensorFlow graph.
1728 # Add nodes to the TensorFlow graph.
1729 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1730 "SaveV2", prefix=prefix, tensor_names=tensor_names,
1731 shape_and_slices=shape_and_slices, tensors=tensors,
1732 name=name)
1733 return _op
1734SaveV2 = tf_export("raw_ops.SaveV2")(_ops.to_raw_op(save_v2))
1737def save_v2_eager_fallback(prefix, tensor_names, shape_and_slices, tensors, name, ctx):
1738 _attr_dtypes, tensors = _execute.convert_to_mixed_eager_tensors(tensors, ctx)
1739 prefix = _ops.convert_to_tensor(prefix, _dtypes.string)
1740 tensor_names = _ops.convert_to_tensor(tensor_names, _dtypes.string)
1741 shape_and_slices = _ops.convert_to_tensor(shape_and_slices, _dtypes.string)
1742 _inputs_flat = [prefix, tensor_names, shape_and_slices] + list(tensors)
1743 _attrs = ("dtypes", _attr_dtypes)
1744 _result = _execute.execute(b"SaveV2", 0, inputs=_inputs_flat, attrs=_attrs,
1745 ctx=ctx, name=name)
1746 _result = None
1747 return _result
1750def sharded_filename(basename, shard, num_shards, name=None):
1751 r"""Generate a sharded filename. The filename is printf formatted as
1753 %s-%05d-of-%05d, basename, shard, num_shards.
1755 Args:
1756 basename: A `Tensor` of type `string`.
1757 shard: A `Tensor` of type `int32`.
1758 num_shards: A `Tensor` of type `int32`.
1759 name: A name for the operation (optional).
1761 Returns:
1762 A `Tensor` of type `string`.
1763 """
1764 _ctx = _context._context or _context.context()
1765 tld = _ctx._thread_local_data
1766 if tld.is_eager:
1767 try:
1768 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1769 _ctx, "ShardedFilename", name, basename, shard, num_shards)
1770 return _result
1771 except _core._NotOkStatusException as e:
1772 _ops.raise_from_not_ok_status(e, name)
1773 except _core._FallbackException:
1774 pass
1775 try:
1776 return sharded_filename_eager_fallback(
1777 basename, shard, num_shards, name=name, ctx=_ctx)
1778 except _core._SymbolicException:
1779 pass # Add nodes to the TensorFlow graph.
1780 # Add nodes to the TensorFlow graph.
1781 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1782 "ShardedFilename", basename=basename, shard=shard,
1783 num_shards=num_shards, name=name)
1784 _result = _outputs[:]
1785 if _execute.must_record_gradient():
1786 _attrs = ()
1787 _inputs_flat = _op.inputs
1788 _execute.record_gradient(
1789 "ShardedFilename", _inputs_flat, _attrs, _result)
1790 _result, = _result
1791 return _result
1793ShardedFilename = tf_export("raw_ops.ShardedFilename")(_ops.to_raw_op(sharded_filename))
1796def sharded_filename_eager_fallback(basename, shard, num_shards, name, ctx):
1797 basename = _ops.convert_to_tensor(basename, _dtypes.string)
1798 shard = _ops.convert_to_tensor(shard, _dtypes.int32)
1799 num_shards = _ops.convert_to_tensor(num_shards, _dtypes.int32)
1800 _inputs_flat = [basename, shard, num_shards]
1801 _attrs = None
1802 _result = _execute.execute(b"ShardedFilename", 1, inputs=_inputs_flat,
1803 attrs=_attrs, ctx=ctx, name=name)
1804 if _execute.must_record_gradient():
1805 _execute.record_gradient(
1806 "ShardedFilename", _inputs_flat, _attrs, _result)
1807 _result, = _result
1808 return _result
1811def sharded_filespec(basename, num_shards, name=None):
1812 r"""Generate a glob pattern matching all sharded file names.
1814 Args:
1815 basename: A `Tensor` of type `string`.
1816 num_shards: A `Tensor` of type `int32`.
1817 name: A name for the operation (optional).
1819 Returns:
1820 A `Tensor` of type `string`.
1821 """
1822 _ctx = _context._context or _context.context()
1823 tld = _ctx._thread_local_data
1824 if tld.is_eager:
1825 try:
1826 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1827 _ctx, "ShardedFilespec", name, basename, num_shards)
1828 return _result
1829 except _core._NotOkStatusException as e:
1830 _ops.raise_from_not_ok_status(e, name)
1831 except _core._FallbackException:
1832 pass
1833 try:
1834 return sharded_filespec_eager_fallback(
1835 basename, num_shards, name=name, ctx=_ctx)
1836 except _core._SymbolicException:
1837 pass # Add nodes to the TensorFlow graph.
1838 # Add nodes to the TensorFlow graph.
1839 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1840 "ShardedFilespec", basename=basename, num_shards=num_shards,
1841 name=name)
1842 _result = _outputs[:]
1843 if _execute.must_record_gradient():
1844 _attrs = ()
1845 _inputs_flat = _op.inputs
1846 _execute.record_gradient(
1847 "ShardedFilespec", _inputs_flat, _attrs, _result)
1848 _result, = _result
1849 return _result
1851ShardedFilespec = tf_export("raw_ops.ShardedFilespec")(_ops.to_raw_op(sharded_filespec))
1854def sharded_filespec_eager_fallback(basename, num_shards, name, ctx):
1855 basename = _ops.convert_to_tensor(basename, _dtypes.string)
1856 num_shards = _ops.convert_to_tensor(num_shards, _dtypes.int32)
1857 _inputs_flat = [basename, num_shards]
1858 _attrs = None
1859 _result = _execute.execute(b"ShardedFilespec", 1, inputs=_inputs_flat,
1860 attrs=_attrs, ctx=ctx, name=name)
1861 if _execute.must_record_gradient():
1862 _execute.record_gradient(
1863 "ShardedFilespec", _inputs_flat, _attrs, _result)
1864 _result, = _result
1865 return _result
1868def tf_record_reader(container="", shared_name="", compression_type="", name=None):
1869 r"""A Reader that outputs the records from a TensorFlow Records file.
1871 Args:
1872 container: An optional `string`. Defaults to `""`.
1873 If non-empty, this reader is placed in the given container.
1874 Otherwise, a default container is used.
1875 shared_name: An optional `string`. Defaults to `""`.
1876 If non-empty, this reader is named in the given bucket
1877 with this shared_name. Otherwise, the node name is used instead.
1878 compression_type: An optional `string`. Defaults to `""`.
1879 name: A name for the operation (optional).
1881 Returns:
1882 A `Tensor` of type mutable `string`.
1883 """
1884 _ctx = _context._context or _context.context()
1885 tld = _ctx._thread_local_data
1886 if tld.is_eager:
1887 raise RuntimeError("tf_record_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
1888 # Add nodes to the TensorFlow graph.
1889 if container is None:
1890 container = ""
1891 container = _execute.make_str(container, "container")
1892 if shared_name is None:
1893 shared_name = ""
1894 shared_name = _execute.make_str(shared_name, "shared_name")
1895 if compression_type is None:
1896 compression_type = ""
1897 compression_type = _execute.make_str(compression_type, "compression_type")
1898 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1899 "TFRecordReader", container=container, shared_name=shared_name,
1900 compression_type=compression_type, name=name)
1901 _result = _outputs[:]
1902 if _execute.must_record_gradient():
1903 _attrs = ("container", _op.get_attr("container"), "shared_name",
1904 _op.get_attr("shared_name"), "compression_type",
1905 _op.get_attr("compression_type"))
1906 _inputs_flat = _op.inputs
1907 _execute.record_gradient(
1908 "TFRecordReader", _inputs_flat, _attrs, _result)
1909 _result, = _result
1910 return _result
1912TFRecordReader = tf_export("raw_ops.TFRecordReader")(_ops.to_raw_op(tf_record_reader))
1915def tf_record_reader_eager_fallback(container, shared_name, compression_type, name, ctx):
1916 raise RuntimeError("tf_record_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
1918def tf_record_reader_v2(container="", shared_name="", compression_type="", name=None):
1919 r"""A Reader that outputs the records from a TensorFlow Records file.
1921 Args:
1922 container: An optional `string`. Defaults to `""`.
1923 If non-empty, this reader is placed in the given container.
1924 Otherwise, a default container is used.
1925 shared_name: An optional `string`. Defaults to `""`.
1926 If non-empty, this reader is named in the given bucket
1927 with this shared_name. Otherwise, the node name is used instead.
1928 compression_type: An optional `string`. Defaults to `""`.
1929 name: A name for the operation (optional).
1931 Returns:
1932 A `Tensor` of type `resource`.
1933 """
1934 _ctx = _context._context or _context.context()
1935 tld = _ctx._thread_local_data
1936 if tld.is_eager:
1937 try:
1938 _result = pywrap_tfe.TFE_Py_FastPathExecute(
1939 _ctx, "TFRecordReaderV2", name, "container", container, "shared_name",
1940 shared_name, "compression_type", compression_type)
1941 return _result
1942 except _core._NotOkStatusException as e:
1943 _ops.raise_from_not_ok_status(e, name)
1944 except _core._FallbackException:
1945 pass
1946 try:
1947 return tf_record_reader_v2_eager_fallback(
1948 container=container, shared_name=shared_name,
1949 compression_type=compression_type, name=name, ctx=_ctx)
1950 except _core._SymbolicException:
1951 pass # Add nodes to the TensorFlow graph.
1952 # Add nodes to the TensorFlow graph.
1953 if container is None:
1954 container = ""
1955 container = _execute.make_str(container, "container")
1956 if shared_name is None:
1957 shared_name = ""
1958 shared_name = _execute.make_str(shared_name, "shared_name")
1959 if compression_type is None:
1960 compression_type = ""
1961 compression_type = _execute.make_str(compression_type, "compression_type")
1962 _, _, _op, _outputs = _op_def_library._apply_op_helper(
1963 "TFRecordReaderV2", container=container, shared_name=shared_name,
1964 compression_type=compression_type, name=name)
1965 _result = _outputs[:]
1966 if _execute.must_record_gradient():
1967 _attrs = ("container", _op.get_attr("container"), "shared_name",
1968 _op.get_attr("shared_name"), "compression_type",
1969 _op.get_attr("compression_type"))
1970 _inputs_flat = _op.inputs
1971 _execute.record_gradient(
1972 "TFRecordReaderV2", _inputs_flat, _attrs, _result)
1973 _result, = _result
1974 return _result
1976TFRecordReaderV2 = tf_export("raw_ops.TFRecordReaderV2")(_ops.to_raw_op(tf_record_reader_v2))
1979def tf_record_reader_v2_eager_fallback(container, shared_name, compression_type, name, ctx):
1980 if container is None:
1981 container = ""
1982 container = _execute.make_str(container, "container")
1983 if shared_name is None:
1984 shared_name = ""
1985 shared_name = _execute.make_str(shared_name, "shared_name")
1986 if compression_type is None:
1987 compression_type = ""
1988 compression_type = _execute.make_str(compression_type, "compression_type")
1989 _inputs_flat = []
1990 _attrs = ("container", container, "shared_name", shared_name,
1991 "compression_type", compression_type)
1992 _result = _execute.execute(b"TFRecordReaderV2", 1, inputs=_inputs_flat,
1993 attrs=_attrs, ctx=ctx, name=name)
1994 if _execute.must_record_gradient():
1995 _execute.record_gradient(
1996 "TFRecordReaderV2", _inputs_flat, _attrs, _result)
1997 _result, = _result
1998 return _result
2001def text_line_reader(skip_header_lines=0, container="", shared_name="", name=None):
2002 r"""A Reader that outputs the lines of a file delimited by '\n'.
2004 Args:
2005 skip_header_lines: An optional `int`. Defaults to `0`.
2006 Number of lines to skip from the beginning of every file.
2007 container: An optional `string`. Defaults to `""`.
2008 If non-empty, this reader is placed in the given container.
2009 Otherwise, a default container is used.
2010 shared_name: An optional `string`. Defaults to `""`.
2011 If non-empty, this reader is named in the given bucket
2012 with this shared_name. Otherwise, the node name is used instead.
2013 name: A name for the operation (optional).
2015 Returns:
2016 A `Tensor` of type mutable `string`.
2017 """
2018 _ctx = _context._context or _context.context()
2019 tld = _ctx._thread_local_data
2020 if tld.is_eager:
2021 raise RuntimeError("text_line_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
2022 # Add nodes to the TensorFlow graph.
2023 if skip_header_lines is None:
2024 skip_header_lines = 0
2025 skip_header_lines = _execute.make_int(skip_header_lines, "skip_header_lines")
2026 if container is None:
2027 container = ""
2028 container = _execute.make_str(container, "container")
2029 if shared_name is None:
2030 shared_name = ""
2031 shared_name = _execute.make_str(shared_name, "shared_name")
2032 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2033 "TextLineReader", skip_header_lines=skip_header_lines,
2034 container=container, shared_name=shared_name,
2035 name=name)
2036 _result = _outputs[:]
2037 if _execute.must_record_gradient():
2038 _attrs = ("skip_header_lines", _op._get_attr_int("skip_header_lines"),
2039 "container", _op.get_attr("container"), "shared_name",
2040 _op.get_attr("shared_name"))
2041 _inputs_flat = _op.inputs
2042 _execute.record_gradient(
2043 "TextLineReader", _inputs_flat, _attrs, _result)
2044 _result, = _result
2045 return _result
2047TextLineReader = tf_export("raw_ops.TextLineReader")(_ops.to_raw_op(text_line_reader))
2050def text_line_reader_eager_fallback(skip_header_lines, container, shared_name, name, ctx):
2051 raise RuntimeError("text_line_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
2053def text_line_reader_v2(skip_header_lines=0, container="", shared_name="", name=None):
2054 r"""A Reader that outputs the lines of a file delimited by '\n'.
2056 Args:
2057 skip_header_lines: An optional `int`. Defaults to `0`.
2058 Number of lines to skip from the beginning of every file.
2059 container: An optional `string`. Defaults to `""`.
2060 If non-empty, this reader is placed in the given container.
2061 Otherwise, a default container is used.
2062 shared_name: An optional `string`. Defaults to `""`.
2063 If non-empty, this reader is named in the given bucket
2064 with this shared_name. Otherwise, the node name is used instead.
2065 name: A name for the operation (optional).
2067 Returns:
2068 A `Tensor` of type `resource`.
2069 """
2070 _ctx = _context._context or _context.context()
2071 tld = _ctx._thread_local_data
2072 if tld.is_eager:
2073 try:
2074 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2075 _ctx, "TextLineReaderV2", name, "skip_header_lines",
2076 skip_header_lines, "container", container, "shared_name", shared_name)
2077 return _result
2078 except _core._NotOkStatusException as e:
2079 _ops.raise_from_not_ok_status(e, name)
2080 except _core._FallbackException:
2081 pass
2082 try:
2083 return text_line_reader_v2_eager_fallback(
2084 skip_header_lines=skip_header_lines, container=container,
2085 shared_name=shared_name, name=name, ctx=_ctx)
2086 except _core._SymbolicException:
2087 pass # Add nodes to the TensorFlow graph.
2088 # Add nodes to the TensorFlow graph.
2089 if skip_header_lines is None:
2090 skip_header_lines = 0
2091 skip_header_lines = _execute.make_int(skip_header_lines, "skip_header_lines")
2092 if container is None:
2093 container = ""
2094 container = _execute.make_str(container, "container")
2095 if shared_name is None:
2096 shared_name = ""
2097 shared_name = _execute.make_str(shared_name, "shared_name")
2098 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2099 "TextLineReaderV2", skip_header_lines=skip_header_lines,
2100 container=container, shared_name=shared_name,
2101 name=name)
2102 _result = _outputs[:]
2103 if _execute.must_record_gradient():
2104 _attrs = ("skip_header_lines", _op._get_attr_int("skip_header_lines"),
2105 "container", _op.get_attr("container"), "shared_name",
2106 _op.get_attr("shared_name"))
2107 _inputs_flat = _op.inputs
2108 _execute.record_gradient(
2109 "TextLineReaderV2", _inputs_flat, _attrs, _result)
2110 _result, = _result
2111 return _result
2113TextLineReaderV2 = tf_export("raw_ops.TextLineReaderV2")(_ops.to_raw_op(text_line_reader_v2))
2116def text_line_reader_v2_eager_fallback(skip_header_lines, container, shared_name, name, ctx):
2117 if skip_header_lines is None:
2118 skip_header_lines = 0
2119 skip_header_lines = _execute.make_int(skip_header_lines, "skip_header_lines")
2120 if container is None:
2121 container = ""
2122 container = _execute.make_str(container, "container")
2123 if shared_name is None:
2124 shared_name = ""
2125 shared_name = _execute.make_str(shared_name, "shared_name")
2126 _inputs_flat = []
2127 _attrs = ("skip_header_lines", skip_header_lines, "container", container,
2128 "shared_name", shared_name)
2129 _result = _execute.execute(b"TextLineReaderV2", 1, inputs=_inputs_flat,
2130 attrs=_attrs, ctx=ctx, name=name)
2131 if _execute.must_record_gradient():
2132 _execute.record_gradient(
2133 "TextLineReaderV2", _inputs_flat, _attrs, _result)
2134 _result, = _result
2135 return _result
2138def whole_file_reader(container="", shared_name="", name=None):
2139 r"""A Reader that outputs the entire contents of a file as a value.
2141 To use, enqueue filenames in a Queue. The output of ReaderRead will
2142 be a filename (key) and the contents of that file (value).
2144 Args:
2145 container: An optional `string`. Defaults to `""`.
2146 If non-empty, this reader is placed in the given container.
2147 Otherwise, a default container is used.
2148 shared_name: An optional `string`. Defaults to `""`.
2149 If non-empty, this reader is named in the given bucket
2150 with this shared_name. Otherwise, the node name is used instead.
2151 name: A name for the operation (optional).
2153 Returns:
2154 A `Tensor` of type mutable `string`.
2155 """
2156 _ctx = _context._context or _context.context()
2157 tld = _ctx._thread_local_data
2158 if tld.is_eager:
2159 raise RuntimeError("whole_file_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
2160 # Add nodes to the TensorFlow graph.
2161 if container is None:
2162 container = ""
2163 container = _execute.make_str(container, "container")
2164 if shared_name is None:
2165 shared_name = ""
2166 shared_name = _execute.make_str(shared_name, "shared_name")
2167 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2168 "WholeFileReader", container=container, shared_name=shared_name,
2169 name=name)
2170 _result = _outputs[:]
2171 if _execute.must_record_gradient():
2172 _attrs = ("container", _op.get_attr("container"), "shared_name",
2173 _op.get_attr("shared_name"))
2174 _inputs_flat = _op.inputs
2175 _execute.record_gradient(
2176 "WholeFileReader", _inputs_flat, _attrs, _result)
2177 _result, = _result
2178 return _result
2180WholeFileReader = tf_export("raw_ops.WholeFileReader")(_ops.to_raw_op(whole_file_reader))
2183def whole_file_reader_eager_fallback(container, shared_name, name, ctx):
2184 raise RuntimeError("whole_file_reader op does not support eager execution. Arg 'reader_handle' is a ref.")
2186def whole_file_reader_v2(container="", shared_name="", name=None):
2187 r"""A Reader that outputs the entire contents of a file as a value.
2189 To use, enqueue filenames in a Queue. The output of ReaderRead will
2190 be a filename (key) and the contents of that file (value).
2192 Args:
2193 container: An optional `string`. Defaults to `""`.
2194 If non-empty, this reader is placed in the given container.
2195 Otherwise, a default container is used.
2196 shared_name: An optional `string`. Defaults to `""`.
2197 If non-empty, this reader is named in the given bucket
2198 with this shared_name. Otherwise, the node name is used instead.
2199 name: A name for the operation (optional).
2201 Returns:
2202 A `Tensor` of type `resource`.
2203 """
2204 _ctx = _context._context or _context.context()
2205 tld = _ctx._thread_local_data
2206 if tld.is_eager:
2207 try:
2208 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2209 _ctx, "WholeFileReaderV2", name, "container", container,
2210 "shared_name", shared_name)
2211 return _result
2212 except _core._NotOkStatusException as e:
2213 _ops.raise_from_not_ok_status(e, name)
2214 except _core._FallbackException:
2215 pass
2216 try:
2217 return whole_file_reader_v2_eager_fallback(
2218 container=container, shared_name=shared_name, name=name, ctx=_ctx)
2219 except _core._SymbolicException:
2220 pass # Add nodes to the TensorFlow graph.
2221 # Add nodes to the TensorFlow graph.
2222 if container is None:
2223 container = ""
2224 container = _execute.make_str(container, "container")
2225 if shared_name is None:
2226 shared_name = ""
2227 shared_name = _execute.make_str(shared_name, "shared_name")
2228 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2229 "WholeFileReaderV2", container=container, shared_name=shared_name,
2230 name=name)
2231 _result = _outputs[:]
2232 if _execute.must_record_gradient():
2233 _attrs = ("container", _op.get_attr("container"), "shared_name",
2234 _op.get_attr("shared_name"))
2235 _inputs_flat = _op.inputs
2236 _execute.record_gradient(
2237 "WholeFileReaderV2", _inputs_flat, _attrs, _result)
2238 _result, = _result
2239 return _result
2241WholeFileReaderV2 = tf_export("raw_ops.WholeFileReaderV2")(_ops.to_raw_op(whole_file_reader_v2))
2244def whole_file_reader_v2_eager_fallback(container, shared_name, name, ctx):
2245 if container is None:
2246 container = ""
2247 container = _execute.make_str(container, "container")
2248 if shared_name is None:
2249 shared_name = ""
2250 shared_name = _execute.make_str(shared_name, "shared_name")
2251 _inputs_flat = []
2252 _attrs = ("container", container, "shared_name", shared_name)
2253 _result = _execute.execute(b"WholeFileReaderV2", 1, inputs=_inputs_flat,
2254 attrs=_attrs, ctx=ctx, name=name)
2255 if _execute.must_record_gradient():
2256 _execute.record_gradient(
2257 "WholeFileReaderV2", _inputs_flat, _attrs, _result)
2258 _result, = _result
2259 return _result
2262@_dispatch.add_fallback_dispatch_list
2263@_dispatch.add_type_based_api_dispatcher
2264@tf_export('io.write_file', v1=['io.write_file', 'write_file'])
2265@deprecated_endpoints('write_file')
2266def write_file(filename, contents, name=None):
2267 r"""Writes `contents` to the file at input `filename`.
2269 Creates the file and recursively creates directory if it does not exist.
2271 Args:
2272 filename: A `Tensor` of type `string`.
2273 scalar. The name of the file to which we write the contents.
2274 contents: A `Tensor` of type `string`.
2275 scalar. The content to be written to the output file.
2276 name: A name for the operation (optional).
2278 Returns:
2279 The created Operation.
2280 """
2281 _ctx = _context._context or _context.context()
2282 tld = _ctx._thread_local_data
2283 if tld.is_eager:
2284 try:
2285 _result = pywrap_tfe.TFE_Py_FastPathExecute(
2286 _ctx, "WriteFile", name, filename, contents)
2287 return _result
2288 except _core._NotOkStatusException as e:
2289 _ops.raise_from_not_ok_status(e, name)
2290 except _core._FallbackException:
2291 pass
2292 try:
2293 _result = _dispatcher_for_write_file(
2294 (filename, contents, name,), None)
2295 if _result is not NotImplemented:
2296 return _result
2297 return write_file_eager_fallback(
2298 filename, contents, name=name, ctx=_ctx)
2299 except _core._SymbolicException:
2300 pass # Add nodes to the TensorFlow graph.
2301 except (TypeError, ValueError):
2302 _result = _dispatch.dispatch(
2303 write_file, (), dict(filename=filename, contents=contents,
2304 name=name)
2305 )
2306 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
2307 return _result
2308 raise
2309 else:
2310 _result = _dispatcher_for_write_file(
2311 (filename, contents, name,), None)
2312 if _result is not NotImplemented:
2313 return _result
2314 # Add nodes to the TensorFlow graph.
2315 try:
2316 _, _, _op, _outputs = _op_def_library._apply_op_helper(
2317 "WriteFile", filename=filename, contents=contents, name=name)
2318 except (TypeError, ValueError):
2319 _result = _dispatch.dispatch(
2320 write_file, (), dict(filename=filename, contents=contents,
2321 name=name)
2322 )
2323 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
2324 return _result
2325 raise
2326 return _op
2327WriteFile = tf_export("raw_ops.WriteFile")(_ops.to_raw_op(write_file))
2328_dispatcher_for_write_file = write_file._tf_type_based_dispatcher.Dispatch
2331def write_file_eager_fallback(filename, contents, name, ctx):
2332 filename = _ops.convert_to_tensor(filename, _dtypes.string)
2333 contents = _ops.convert_to_tensor(contents, _dtypes.string)
2334 _inputs_flat = [filename, contents]
2335 _attrs = None
2336 _result = _execute.execute(b"WriteFile", 0, inputs=_inputs_flat,
2337 attrs=_attrs, ctx=ctx, name=name)
2338 _result = None
2339 return _result