Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_dataset_ops.py: 7%

4281 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-03 07:57 +0000

1"""Python wrappers around TensorFlow ops. 

2 

3This file is MACHINE GENERATED! Do not edit. 

4""" 

5 

6import collections 

7 

8from tensorflow.python import pywrap_tfe as pywrap_tfe 

9from tensorflow.python.eager import context as _context 

10from tensorflow.python.eager import core as _core 

11from tensorflow.python.eager import execute as _execute 

12from tensorflow.python.framework import dtypes as _dtypes 

13from tensorflow.security.fuzzing.py import annotation_types as _atypes 

14 

15from tensorflow.python.framework import op_def_registry as _op_def_registry 

16from tensorflow.python.framework import ops as _ops 

17from tensorflow.python.framework import op_def_library as _op_def_library 

18from tensorflow.python.util.deprecation import deprecated_endpoints 

19from tensorflow.python.util import dispatch as _dispatch 

20from tensorflow.python.util.tf_export import tf_export 

21 

22from typing import TypeVar 

23 

24def anonymous_iterator(output_types, output_shapes, name=None): 

25 r"""A container for an iterator resource. 

26 

27 Args: 

28 output_types: A list of `tf.DTypes` that has length `>= 1`. 

29 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

30 name: A name for the operation (optional). 

31 

32 Returns: 

33 A `Tensor` of type `resource`. 

34 """ 

35 _ctx = _context._context or _context.context() 

36 tld = _ctx._thread_local_data 

37 if tld.is_eager: 

38 try: 

39 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

40 _ctx, "AnonymousIterator", name, "output_types", output_types, 

41 "output_shapes", output_shapes) 

42 return _result 

43 except _core._NotOkStatusException as e: 

44 _ops.raise_from_not_ok_status(e, name) 

45 except _core._FallbackException: 

46 pass 

47 try: 

48 return anonymous_iterator_eager_fallback( 

49 output_types=output_types, output_shapes=output_shapes, name=name, 

50 ctx=_ctx) 

51 except _core._SymbolicException: 

52 pass # Add nodes to the TensorFlow graph. 

53 # Add nodes to the TensorFlow graph. 

54 if not isinstance(output_types, (list, tuple)): 

55 raise TypeError( 

56 "Expected list for 'output_types' argument to " 

57 "'anonymous_iterator' Op, not %r." % output_types) 

58 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

59 if not isinstance(output_shapes, (list, tuple)): 

60 raise TypeError( 

61 "Expected list for 'output_shapes' argument to " 

62 "'anonymous_iterator' Op, not %r." % output_shapes) 

63 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

64 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

65 "AnonymousIterator", output_types=output_types, 

66 output_shapes=output_shapes, name=name) 

67 _result = _outputs[:] 

68 if _execute.must_record_gradient(): 

69 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

70 _op.get_attr("output_shapes")) 

71 _inputs_flat = _op.inputs 

72 _execute.record_gradient( 

73 "AnonymousIterator", _inputs_flat, _attrs, _result) 

74 _result, = _result 

75 return _result 

76 

77AnonymousIterator = tf_export("raw_ops.AnonymousIterator")(_ops.to_raw_op(anonymous_iterator)) 

78 

79 

80def anonymous_iterator_eager_fallback(output_types, output_shapes, name, ctx): 

81 if not isinstance(output_types, (list, tuple)): 

82 raise TypeError( 

83 "Expected list for 'output_types' argument to " 

84 "'anonymous_iterator' Op, not %r." % output_types) 

85 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

86 if not isinstance(output_shapes, (list, tuple)): 

87 raise TypeError( 

88 "Expected list for 'output_shapes' argument to " 

89 "'anonymous_iterator' Op, not %r." % output_shapes) 

90 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

91 _inputs_flat = [] 

92 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

93 _result = _execute.execute(b"AnonymousIterator", 1, inputs=_inputs_flat, 

94 attrs=_attrs, ctx=ctx, name=name) 

95 if _execute.must_record_gradient(): 

96 _execute.record_gradient( 

97 "AnonymousIterator", _inputs_flat, _attrs, _result) 

98 _result, = _result 

99 return _result 

100 

101_AnonymousIteratorV2Output = collections.namedtuple( 

102 "AnonymousIteratorV2", 

103 ["handle", "deleter"]) 

104 

105 

106def anonymous_iterator_v2(output_types, output_shapes, name=None): 

107 r"""A container for an iterator resource. 

108 

109 Args: 

110 output_types: A list of `tf.DTypes` that has length `>= 1`. 

111 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

112 name: A name for the operation (optional). 

113 

114 Returns: 

115 A tuple of `Tensor` objects (handle, deleter). 

116 

117 handle: A `Tensor` of type `resource`. 

118 deleter: A `Tensor` of type `variant`. 

119 """ 

120 _ctx = _context._context or _context.context() 

121 tld = _ctx._thread_local_data 

122 if tld.is_eager: 

123 try: 

124 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

125 _ctx, "AnonymousIteratorV2", name, "output_types", output_types, 

126 "output_shapes", output_shapes) 

127 _result = _AnonymousIteratorV2Output._make(_result) 

128 return _result 

129 except _core._NotOkStatusException as e: 

130 _ops.raise_from_not_ok_status(e, name) 

131 except _core._FallbackException: 

132 pass 

133 try: 

134 return anonymous_iterator_v2_eager_fallback( 

135 output_types=output_types, output_shapes=output_shapes, name=name, 

136 ctx=_ctx) 

137 except _core._SymbolicException: 

138 pass # Add nodes to the TensorFlow graph. 

139 # Add nodes to the TensorFlow graph. 

140 if not isinstance(output_types, (list, tuple)): 

141 raise TypeError( 

142 "Expected list for 'output_types' argument to " 

143 "'anonymous_iterator_v2' Op, not %r." % output_types) 

144 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

145 if not isinstance(output_shapes, (list, tuple)): 

146 raise TypeError( 

147 "Expected list for 'output_shapes' argument to " 

148 "'anonymous_iterator_v2' Op, not %r." % output_shapes) 

149 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

150 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

151 "AnonymousIteratorV2", output_types=output_types, 

152 output_shapes=output_shapes, name=name) 

153 _result = _outputs[:] 

154 if _execute.must_record_gradient(): 

155 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

156 _op.get_attr("output_shapes")) 

157 _inputs_flat = _op.inputs 

158 _execute.record_gradient( 

159 "AnonymousIteratorV2", _inputs_flat, _attrs, _result) 

160 _result = _AnonymousIteratorV2Output._make(_result) 

161 return _result 

162 

163AnonymousIteratorV2 = tf_export("raw_ops.AnonymousIteratorV2")(_ops.to_raw_op(anonymous_iterator_v2)) 

164 

165 

166def anonymous_iterator_v2_eager_fallback(output_types, output_shapes, name, ctx): 

167 if not isinstance(output_types, (list, tuple)): 

168 raise TypeError( 

169 "Expected list for 'output_types' argument to " 

170 "'anonymous_iterator_v2' Op, not %r." % output_types) 

171 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

172 if not isinstance(output_shapes, (list, tuple)): 

173 raise TypeError( 

174 "Expected list for 'output_shapes' argument to " 

175 "'anonymous_iterator_v2' Op, not %r." % output_shapes) 

176 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

177 _inputs_flat = [] 

178 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

179 _result = _execute.execute(b"AnonymousIteratorV2", 2, inputs=_inputs_flat, 

180 attrs=_attrs, ctx=ctx, name=name) 

181 if _execute.must_record_gradient(): 

182 _execute.record_gradient( 

183 "AnonymousIteratorV2", _inputs_flat, _attrs, _result) 

184 _result = _AnonymousIteratorV2Output._make(_result) 

185 return _result 

186 

187 

188def anonymous_iterator_v3(output_types, output_shapes, name=None): 

189 r"""A container for an iterator resource. 

190 

191 Args: 

192 output_types: A list of `tf.DTypes` that has length `>= 1`. 

193 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

194 name: A name for the operation (optional). 

195 

196 Returns: 

197 A `Tensor` of type `resource`. 

198 """ 

199 _ctx = _context._context or _context.context() 

200 tld = _ctx._thread_local_data 

201 if tld.is_eager: 

202 try: 

203 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

204 _ctx, "AnonymousIteratorV3", name, "output_types", output_types, 

205 "output_shapes", output_shapes) 

206 return _result 

207 except _core._NotOkStatusException as e: 

208 _ops.raise_from_not_ok_status(e, name) 

209 except _core._FallbackException: 

210 pass 

211 try: 

212 return anonymous_iterator_v3_eager_fallback( 

213 output_types=output_types, output_shapes=output_shapes, name=name, 

214 ctx=_ctx) 

215 except _core._SymbolicException: 

216 pass # Add nodes to the TensorFlow graph. 

217 # Add nodes to the TensorFlow graph. 

218 if not isinstance(output_types, (list, tuple)): 

219 raise TypeError( 

220 "Expected list for 'output_types' argument to " 

221 "'anonymous_iterator_v3' Op, not %r." % output_types) 

222 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

223 if not isinstance(output_shapes, (list, tuple)): 

224 raise TypeError( 

225 "Expected list for 'output_shapes' argument to " 

226 "'anonymous_iterator_v3' Op, not %r." % output_shapes) 

227 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

228 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

229 "AnonymousIteratorV3", output_types=output_types, 

230 output_shapes=output_shapes, name=name) 

231 _result = _outputs[:] 

232 if _execute.must_record_gradient(): 

233 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

234 _op.get_attr("output_shapes")) 

235 _inputs_flat = _op.inputs 

236 _execute.record_gradient( 

237 "AnonymousIteratorV3", _inputs_flat, _attrs, _result) 

238 _result, = _result 

239 return _result 

240 

241AnonymousIteratorV3 = tf_export("raw_ops.AnonymousIteratorV3")(_ops.to_raw_op(anonymous_iterator_v3)) 

242 

243 

244def anonymous_iterator_v3_eager_fallback(output_types, output_shapes, name, ctx): 

245 if not isinstance(output_types, (list, tuple)): 

246 raise TypeError( 

247 "Expected list for 'output_types' argument to " 

248 "'anonymous_iterator_v3' Op, not %r." % output_types) 

249 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

250 if not isinstance(output_shapes, (list, tuple)): 

251 raise TypeError( 

252 "Expected list for 'output_shapes' argument to " 

253 "'anonymous_iterator_v3' Op, not %r." % output_shapes) 

254 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

255 _inputs_flat = [] 

256 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

257 _result = _execute.execute(b"AnonymousIteratorV3", 1, inputs=_inputs_flat, 

258 attrs=_attrs, ctx=ctx, name=name) 

259 if _execute.must_record_gradient(): 

260 _execute.record_gradient( 

261 "AnonymousIteratorV3", _inputs_flat, _attrs, _result) 

262 _result, = _result 

263 return _result 

264 

265_AnonymousMemoryCacheOutput = collections.namedtuple( 

266 "AnonymousMemoryCache", 

267 ["handle", "deleter"]) 

268 

269 

270def anonymous_memory_cache(name=None): 

271 r"""TODO: add doc. 

272 

273 Args: 

274 name: A name for the operation (optional). 

275 

276 Returns: 

277 A tuple of `Tensor` objects (handle, deleter). 

278 

279 handle: A `Tensor` of type `resource`. 

280 deleter: A `Tensor` of type `variant`. 

281 """ 

282 _ctx = _context._context or _context.context() 

283 tld = _ctx._thread_local_data 

284 if tld.is_eager: 

285 try: 

286 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

287 _ctx, "AnonymousMemoryCache", name) 

288 _result = _AnonymousMemoryCacheOutput._make(_result) 

289 return _result 

290 except _core._NotOkStatusException as e: 

291 _ops.raise_from_not_ok_status(e, name) 

292 except _core._FallbackException: 

293 pass 

294 try: 

295 return anonymous_memory_cache_eager_fallback( 

296 name=name, ctx=_ctx) 

297 except _core._SymbolicException: 

298 pass # Add nodes to the TensorFlow graph. 

299 # Add nodes to the TensorFlow graph. 

300 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

301 "AnonymousMemoryCache", name=name) 

302 _result = _outputs[:] 

303 if _execute.must_record_gradient(): 

304 _attrs = () 

305 _inputs_flat = _op.inputs 

306 _execute.record_gradient( 

307 "AnonymousMemoryCache", _inputs_flat, _attrs, _result) 

308 _result = _AnonymousMemoryCacheOutput._make(_result) 

309 return _result 

310 

311AnonymousMemoryCache = tf_export("raw_ops.AnonymousMemoryCache")(_ops.to_raw_op(anonymous_memory_cache)) 

312 

313 

314def anonymous_memory_cache_eager_fallback(name, ctx): 

315 _inputs_flat = [] 

316 _attrs = None 

317 _result = _execute.execute(b"AnonymousMemoryCache", 2, inputs=_inputs_flat, 

318 attrs=_attrs, ctx=ctx, name=name) 

319 if _execute.must_record_gradient(): 

320 _execute.record_gradient( 

321 "AnonymousMemoryCache", _inputs_flat, _attrs, _result) 

322 _result = _AnonymousMemoryCacheOutput._make(_result) 

323 return _result 

324 

325_AnonymousMultiDeviceIteratorOutput = collections.namedtuple( 

326 "AnonymousMultiDeviceIterator", 

327 ["handle", "deleter"]) 

328 

329 

330def anonymous_multi_device_iterator(devices, output_types, output_shapes, name=None): 

331 r"""A container for a multi device iterator resource. 

332 

333 Args: 

334 devices: A list of `strings` that has length `>= 1`. 

335 output_types: A list of `tf.DTypes` that has length `>= 1`. 

336 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

337 name: A name for the operation (optional). 

338 

339 Returns: 

340 A tuple of `Tensor` objects (handle, deleter). 

341 

342 handle: A `Tensor` of type `resource`. 

343 deleter: A `Tensor` of type `variant`. 

344 """ 

345 _ctx = _context._context or _context.context() 

346 tld = _ctx._thread_local_data 

347 if tld.is_eager: 

348 try: 

349 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

350 _ctx, "AnonymousMultiDeviceIterator", name, "devices", devices, 

351 "output_types", output_types, "output_shapes", output_shapes) 

352 _result = _AnonymousMultiDeviceIteratorOutput._make(_result) 

353 return _result 

354 except _core._NotOkStatusException as e: 

355 _ops.raise_from_not_ok_status(e, name) 

356 except _core._FallbackException: 

357 pass 

358 try: 

359 return anonymous_multi_device_iterator_eager_fallback( 

360 devices=devices, output_types=output_types, 

361 output_shapes=output_shapes, name=name, ctx=_ctx) 

362 except _core._SymbolicException: 

363 pass # Add nodes to the TensorFlow graph. 

364 # Add nodes to the TensorFlow graph. 

365 if not isinstance(devices, (list, tuple)): 

366 raise TypeError( 

367 "Expected list for 'devices' argument to " 

368 "'anonymous_multi_device_iterator' Op, not %r." % devices) 

369 devices = [_execute.make_str(_s, "devices") for _s in devices] 

370 if not isinstance(output_types, (list, tuple)): 

371 raise TypeError( 

372 "Expected list for 'output_types' argument to " 

373 "'anonymous_multi_device_iterator' Op, not %r." % output_types) 

374 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

375 if not isinstance(output_shapes, (list, tuple)): 

376 raise TypeError( 

377 "Expected list for 'output_shapes' argument to " 

378 "'anonymous_multi_device_iterator' Op, not %r." % output_shapes) 

379 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

380 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

381 "AnonymousMultiDeviceIterator", devices=devices, 

382 output_types=output_types, 

383 output_shapes=output_shapes, 

384 name=name) 

385 _result = _outputs[:] 

386 if _execute.must_record_gradient(): 

387 _attrs = ("devices", _op.get_attr("devices"), "output_types", 

388 _op.get_attr("output_types"), "output_shapes", 

389 _op.get_attr("output_shapes")) 

390 _inputs_flat = _op.inputs 

391 _execute.record_gradient( 

392 "AnonymousMultiDeviceIterator", _inputs_flat, _attrs, _result) 

393 _result = _AnonymousMultiDeviceIteratorOutput._make(_result) 

394 return _result 

395 

396AnonymousMultiDeviceIterator = tf_export("raw_ops.AnonymousMultiDeviceIterator")(_ops.to_raw_op(anonymous_multi_device_iterator)) 

397 

398 

399def anonymous_multi_device_iterator_eager_fallback(devices, output_types, output_shapes, name, ctx): 

400 if not isinstance(devices, (list, tuple)): 

401 raise TypeError( 

402 "Expected list for 'devices' argument to " 

403 "'anonymous_multi_device_iterator' Op, not %r." % devices) 

404 devices = [_execute.make_str(_s, "devices") for _s in devices] 

405 if not isinstance(output_types, (list, tuple)): 

406 raise TypeError( 

407 "Expected list for 'output_types' argument to " 

408 "'anonymous_multi_device_iterator' Op, not %r." % output_types) 

409 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

410 if not isinstance(output_shapes, (list, tuple)): 

411 raise TypeError( 

412 "Expected list for 'output_shapes' argument to " 

413 "'anonymous_multi_device_iterator' Op, not %r." % output_shapes) 

414 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

415 _inputs_flat = [] 

416 _attrs = ("devices", devices, "output_types", output_types, "output_shapes", 

417 output_shapes) 

418 _result = _execute.execute(b"AnonymousMultiDeviceIterator", 2, 

419 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

420 name=name) 

421 if _execute.must_record_gradient(): 

422 _execute.record_gradient( 

423 "AnonymousMultiDeviceIterator", _inputs_flat, _attrs, _result) 

424 _result = _AnonymousMultiDeviceIteratorOutput._make(_result) 

425 return _result 

426 

427 

428def anonymous_multi_device_iterator_v3(devices, output_types, output_shapes, name=None): 

429 r"""A container for a multi device iterator resource. 

430 

431 Args: 

432 devices: A list of `strings` that has length `>= 1`. 

433 output_types: A list of `tf.DTypes` that has length `>= 1`. 

434 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

435 name: A name for the operation (optional). 

436 

437 Returns: 

438 A `Tensor` of type `resource`. 

439 """ 

440 _ctx = _context._context or _context.context() 

441 tld = _ctx._thread_local_data 

442 if tld.is_eager: 

443 try: 

444 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

445 _ctx, "AnonymousMultiDeviceIteratorV3", name, "devices", devices, 

446 "output_types", output_types, "output_shapes", output_shapes) 

447 return _result 

448 except _core._NotOkStatusException as e: 

449 _ops.raise_from_not_ok_status(e, name) 

450 except _core._FallbackException: 

451 pass 

452 try: 

453 return anonymous_multi_device_iterator_v3_eager_fallback( 

454 devices=devices, output_types=output_types, 

455 output_shapes=output_shapes, name=name, ctx=_ctx) 

456 except _core._SymbolicException: 

457 pass # Add nodes to the TensorFlow graph. 

458 # Add nodes to the TensorFlow graph. 

459 if not isinstance(devices, (list, tuple)): 

460 raise TypeError( 

461 "Expected list for 'devices' argument to " 

462 "'anonymous_multi_device_iterator_v3' Op, not %r." % devices) 

463 devices = [_execute.make_str(_s, "devices") for _s in devices] 

464 if not isinstance(output_types, (list, tuple)): 

465 raise TypeError( 

466 "Expected list for 'output_types' argument to " 

467 "'anonymous_multi_device_iterator_v3' Op, not %r." % output_types) 

468 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

469 if not isinstance(output_shapes, (list, tuple)): 

470 raise TypeError( 

471 "Expected list for 'output_shapes' argument to " 

472 "'anonymous_multi_device_iterator_v3' Op, not %r." % output_shapes) 

473 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

474 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

475 "AnonymousMultiDeviceIteratorV3", devices=devices, 

476 output_types=output_types, 

477 output_shapes=output_shapes, 

478 name=name) 

479 _result = _outputs[:] 

480 if _execute.must_record_gradient(): 

481 _attrs = ("devices", _op.get_attr("devices"), "output_types", 

482 _op.get_attr("output_types"), "output_shapes", 

483 _op.get_attr("output_shapes")) 

484 _inputs_flat = _op.inputs 

485 _execute.record_gradient( 

486 "AnonymousMultiDeviceIteratorV3", _inputs_flat, _attrs, _result) 

487 _result, = _result 

488 return _result 

489 

490AnonymousMultiDeviceIteratorV3 = tf_export("raw_ops.AnonymousMultiDeviceIteratorV3")(_ops.to_raw_op(anonymous_multi_device_iterator_v3)) 

491 

492 

493def anonymous_multi_device_iterator_v3_eager_fallback(devices, output_types, output_shapes, name, ctx): 

494 if not isinstance(devices, (list, tuple)): 

495 raise TypeError( 

496 "Expected list for 'devices' argument to " 

497 "'anonymous_multi_device_iterator_v3' Op, not %r." % devices) 

498 devices = [_execute.make_str(_s, "devices") for _s in devices] 

499 if not isinstance(output_types, (list, tuple)): 

500 raise TypeError( 

501 "Expected list for 'output_types' argument to " 

502 "'anonymous_multi_device_iterator_v3' Op, not %r." % output_types) 

503 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

504 if not isinstance(output_shapes, (list, tuple)): 

505 raise TypeError( 

506 "Expected list for 'output_shapes' argument to " 

507 "'anonymous_multi_device_iterator_v3' Op, not %r." % output_shapes) 

508 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

509 _inputs_flat = [] 

510 _attrs = ("devices", devices, "output_types", output_types, "output_shapes", 

511 output_shapes) 

512 _result = _execute.execute(b"AnonymousMultiDeviceIteratorV3", 1, 

513 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

514 name=name) 

515 if _execute.must_record_gradient(): 

516 _execute.record_gradient( 

517 "AnonymousMultiDeviceIteratorV3", _inputs_flat, _attrs, _result) 

518 _result, = _result 

519 return _result 

520 

521_AnonymousRandomSeedGeneratorOutput = collections.namedtuple( 

522 "AnonymousRandomSeedGenerator", 

523 ["handle", "deleter"]) 

524 

525 

526def anonymous_random_seed_generator(seed, seed2, name=None): 

527 r"""TODO: add doc. 

528 

529 Args: 

530 seed: A `Tensor` of type `int64`. 

531 seed2: A `Tensor` of type `int64`. 

532 name: A name for the operation (optional). 

533 

534 Returns: 

535 A tuple of `Tensor` objects (handle, deleter). 

536 

537 handle: A `Tensor` of type `resource`. 

538 deleter: A `Tensor` of type `variant`. 

539 """ 

540 _ctx = _context._context or _context.context() 

541 tld = _ctx._thread_local_data 

542 if tld.is_eager: 

543 try: 

544 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

545 _ctx, "AnonymousRandomSeedGenerator", name, seed, seed2) 

546 _result = _AnonymousRandomSeedGeneratorOutput._make(_result) 

547 return _result 

548 except _core._NotOkStatusException as e: 

549 _ops.raise_from_not_ok_status(e, name) 

550 except _core._FallbackException: 

551 pass 

552 try: 

553 return anonymous_random_seed_generator_eager_fallback( 

554 seed, seed2, name=name, ctx=_ctx) 

555 except _core._SymbolicException: 

556 pass # Add nodes to the TensorFlow graph. 

557 # Add nodes to the TensorFlow graph. 

558 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

559 "AnonymousRandomSeedGenerator", seed=seed, seed2=seed2, name=name) 

560 _result = _outputs[:] 

561 if _execute.must_record_gradient(): 

562 _attrs = () 

563 _inputs_flat = _op.inputs 

564 _execute.record_gradient( 

565 "AnonymousRandomSeedGenerator", _inputs_flat, _attrs, _result) 

566 _result = _AnonymousRandomSeedGeneratorOutput._make(_result) 

567 return _result 

568 

569AnonymousRandomSeedGenerator = tf_export("raw_ops.AnonymousRandomSeedGenerator")(_ops.to_raw_op(anonymous_random_seed_generator)) 

570 

571 

572def anonymous_random_seed_generator_eager_fallback(seed, seed2, name, ctx): 

573 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

574 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

575 _inputs_flat = [seed, seed2] 

576 _attrs = None 

577 _result = _execute.execute(b"AnonymousRandomSeedGenerator", 2, 

578 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

579 name=name) 

580 if _execute.must_record_gradient(): 

581 _execute.record_gradient( 

582 "AnonymousRandomSeedGenerator", _inputs_flat, _attrs, _result) 

583 _result = _AnonymousRandomSeedGeneratorOutput._make(_result) 

584 return _result 

585 

586_AnonymousSeedGeneratorOutput = collections.namedtuple( 

587 "AnonymousSeedGenerator", 

588 ["handle", "deleter"]) 

589 

590 

591def anonymous_seed_generator(seed, seed2, reshuffle, name=None): 

592 r"""TODO: add doc. 

593 

594 Args: 

595 seed: A `Tensor` of type `int64`. 

596 seed2: A `Tensor` of type `int64`. 

597 reshuffle: A `Tensor` of type `bool`. 

598 name: A name for the operation (optional). 

599 

600 Returns: 

601 A tuple of `Tensor` objects (handle, deleter). 

602 

603 handle: A `Tensor` of type `resource`. 

604 deleter: A `Tensor` of type `variant`. 

605 """ 

606 _ctx = _context._context or _context.context() 

607 tld = _ctx._thread_local_data 

608 if tld.is_eager: 

609 try: 

610 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

611 _ctx, "AnonymousSeedGenerator", name, seed, seed2, reshuffle) 

612 _result = _AnonymousSeedGeneratorOutput._make(_result) 

613 return _result 

614 except _core._NotOkStatusException as e: 

615 _ops.raise_from_not_ok_status(e, name) 

616 except _core._FallbackException: 

617 pass 

618 try: 

619 return anonymous_seed_generator_eager_fallback( 

620 seed, seed2, reshuffle, name=name, ctx=_ctx) 

621 except _core._SymbolicException: 

622 pass # Add nodes to the TensorFlow graph. 

623 # Add nodes to the TensorFlow graph. 

624 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

625 "AnonymousSeedGenerator", seed=seed, seed2=seed2, reshuffle=reshuffle, 

626 name=name) 

627 _result = _outputs[:] 

628 if _execute.must_record_gradient(): 

629 _attrs = () 

630 _inputs_flat = _op.inputs 

631 _execute.record_gradient( 

632 "AnonymousSeedGenerator", _inputs_flat, _attrs, _result) 

633 _result = _AnonymousSeedGeneratorOutput._make(_result) 

634 return _result 

635 

636AnonymousSeedGenerator = tf_export("raw_ops.AnonymousSeedGenerator")(_ops.to_raw_op(anonymous_seed_generator)) 

637 

638 

639def anonymous_seed_generator_eager_fallback(seed, seed2, reshuffle, name, ctx): 

640 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

641 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

642 reshuffle = _ops.convert_to_tensor(reshuffle, _dtypes.bool) 

643 _inputs_flat = [seed, seed2, reshuffle] 

644 _attrs = None 

645 _result = _execute.execute(b"AnonymousSeedGenerator", 2, 

646 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

647 name=name) 

648 if _execute.must_record_gradient(): 

649 _execute.record_gradient( 

650 "AnonymousSeedGenerator", _inputs_flat, _attrs, _result) 

651 _result = _AnonymousSeedGeneratorOutput._make(_result) 

652 return _result 

653 

654 

655def batch_dataset(input_dataset, batch_size, output_types, output_shapes, metadata="", name=None): 

656 r"""Creates a dataset that batches `batch_size` elements from `input_dataset`. 

657 

658 Args: 

659 input_dataset: A `Tensor` of type `variant`. 

660 batch_size: A `Tensor` of type `int64`. 

661 A scalar representing the number of elements to accumulate in a 

662 batch. 

663 output_types: A list of `tf.DTypes` that has length `>= 1`. 

664 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

665 metadata: An optional `string`. Defaults to `""`. 

666 name: A name for the operation (optional). 

667 

668 Returns: 

669 A `Tensor` of type `variant`. 

670 """ 

671 _ctx = _context._context or _context.context() 

672 tld = _ctx._thread_local_data 

673 if tld.is_eager: 

674 try: 

675 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

676 _ctx, "BatchDataset", name, input_dataset, batch_size, "output_types", 

677 output_types, "output_shapes", output_shapes, "metadata", metadata) 

678 return _result 

679 except _core._NotOkStatusException as e: 

680 _ops.raise_from_not_ok_status(e, name) 

681 except _core._FallbackException: 

682 pass 

683 try: 

684 return batch_dataset_eager_fallback( 

685 input_dataset, batch_size, output_types=output_types, 

686 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

687 except _core._SymbolicException: 

688 pass # Add nodes to the TensorFlow graph. 

689 # Add nodes to the TensorFlow graph. 

690 if not isinstance(output_types, (list, tuple)): 

691 raise TypeError( 

692 "Expected list for 'output_types' argument to " 

693 "'batch_dataset' Op, not %r." % output_types) 

694 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

695 if not isinstance(output_shapes, (list, tuple)): 

696 raise TypeError( 

697 "Expected list for 'output_shapes' argument to " 

698 "'batch_dataset' Op, not %r." % output_shapes) 

699 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

700 if metadata is None: 

701 metadata = "" 

702 metadata = _execute.make_str(metadata, "metadata") 

703 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

704 "BatchDataset", input_dataset=input_dataset, batch_size=batch_size, 

705 output_types=output_types, 

706 output_shapes=output_shapes, metadata=metadata, 

707 name=name) 

708 _result = _outputs[:] 

709 if _execute.must_record_gradient(): 

710 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

711 _op.get_attr("output_shapes"), "metadata", 

712 _op.get_attr("metadata")) 

713 _inputs_flat = _op.inputs 

714 _execute.record_gradient( 

715 "BatchDataset", _inputs_flat, _attrs, _result) 

716 _result, = _result 

717 return _result 

718 

719BatchDataset = tf_export("raw_ops.BatchDataset")(_ops.to_raw_op(batch_dataset)) 

720 

721 

722def batch_dataset_eager_fallback(input_dataset, batch_size, output_types, output_shapes, metadata, name, ctx): 

723 if not isinstance(output_types, (list, tuple)): 

724 raise TypeError( 

725 "Expected list for 'output_types' argument to " 

726 "'batch_dataset' Op, not %r." % output_types) 

727 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

728 if not isinstance(output_shapes, (list, tuple)): 

729 raise TypeError( 

730 "Expected list for 'output_shapes' argument to " 

731 "'batch_dataset' Op, not %r." % output_shapes) 

732 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

733 if metadata is None: 

734 metadata = "" 

735 metadata = _execute.make_str(metadata, "metadata") 

736 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

737 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64) 

738 _inputs_flat = [input_dataset, batch_size] 

739 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

740 "metadata", metadata) 

741 _result = _execute.execute(b"BatchDataset", 1, inputs=_inputs_flat, 

742 attrs=_attrs, ctx=ctx, name=name) 

743 if _execute.must_record_gradient(): 

744 _execute.record_gradient( 

745 "BatchDataset", _inputs_flat, _attrs, _result) 

746 _result, = _result 

747 return _result 

748 

749 

750def batch_dataset_v2(input_dataset, batch_size, drop_remainder, output_types, output_shapes, parallel_copy=False, metadata="", name=None): 

751 r"""Creates a dataset that batches `batch_size` elements from `input_dataset`. 

752 

753 Args: 

754 input_dataset: A `Tensor` of type `variant`. 

755 batch_size: A `Tensor` of type `int64`. 

756 A scalar representing the number of elements to accumulate in a batch. 

757 drop_remainder: A `Tensor` of type `bool`. 

758 A scalar representing whether the last batch should be dropped in case its size 

759 is smaller than desired. 

760 output_types: A list of `tf.DTypes` that has length `>= 1`. 

761 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

762 parallel_copy: An optional `bool`. Defaults to `False`. 

763 metadata: An optional `string`. Defaults to `""`. 

764 name: A name for the operation (optional). 

765 

766 Returns: 

767 A `Tensor` of type `variant`. 

768 """ 

769 _ctx = _context._context or _context.context() 

770 tld = _ctx._thread_local_data 

771 if tld.is_eager: 

772 try: 

773 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

774 _ctx, "BatchDatasetV2", name, input_dataset, batch_size, 

775 drop_remainder, "parallel_copy", parallel_copy, "output_types", 

776 output_types, "output_shapes", output_shapes, "metadata", metadata) 

777 return _result 

778 except _core._NotOkStatusException as e: 

779 _ops.raise_from_not_ok_status(e, name) 

780 except _core._FallbackException: 

781 pass 

782 try: 

783 return batch_dataset_v2_eager_fallback( 

784 input_dataset, batch_size, drop_remainder, 

785 parallel_copy=parallel_copy, output_types=output_types, 

786 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

787 except _core._SymbolicException: 

788 pass # Add nodes to the TensorFlow graph. 

789 # Add nodes to the TensorFlow graph. 

790 if not isinstance(output_types, (list, tuple)): 

791 raise TypeError( 

792 "Expected list for 'output_types' argument to " 

793 "'batch_dataset_v2' Op, not %r." % output_types) 

794 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

795 if not isinstance(output_shapes, (list, tuple)): 

796 raise TypeError( 

797 "Expected list for 'output_shapes' argument to " 

798 "'batch_dataset_v2' Op, not %r." % output_shapes) 

799 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

800 if parallel_copy is None: 

801 parallel_copy = False 

802 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

803 if metadata is None: 

804 metadata = "" 

805 metadata = _execute.make_str(metadata, "metadata") 

806 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

807 "BatchDatasetV2", input_dataset=input_dataset, batch_size=batch_size, 

808 drop_remainder=drop_remainder, 

809 output_types=output_types, 

810 output_shapes=output_shapes, 

811 parallel_copy=parallel_copy, metadata=metadata, 

812 name=name) 

813 _result = _outputs[:] 

814 if _execute.must_record_gradient(): 

815 _attrs = ("parallel_copy", _op._get_attr_bool("parallel_copy"), 

816 "output_types", _op.get_attr("output_types"), "output_shapes", 

817 _op.get_attr("output_shapes"), "metadata", 

818 _op.get_attr("metadata")) 

819 _inputs_flat = _op.inputs 

820 _execute.record_gradient( 

821 "BatchDatasetV2", _inputs_flat, _attrs, _result) 

822 _result, = _result 

823 return _result 

824 

825BatchDatasetV2 = tf_export("raw_ops.BatchDatasetV2")(_ops.to_raw_op(batch_dataset_v2)) 

826 

827 

828def batch_dataset_v2_eager_fallback(input_dataset, batch_size, drop_remainder, output_types, output_shapes, parallel_copy, metadata, name, ctx): 

829 if not isinstance(output_types, (list, tuple)): 

830 raise TypeError( 

831 "Expected list for 'output_types' argument to " 

832 "'batch_dataset_v2' Op, not %r." % output_types) 

833 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

834 if not isinstance(output_shapes, (list, tuple)): 

835 raise TypeError( 

836 "Expected list for 'output_shapes' argument to " 

837 "'batch_dataset_v2' Op, not %r." % output_shapes) 

838 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

839 if parallel_copy is None: 

840 parallel_copy = False 

841 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

842 if metadata is None: 

843 metadata = "" 

844 metadata = _execute.make_str(metadata, "metadata") 

845 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

846 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64) 

847 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool) 

848 _inputs_flat = [input_dataset, batch_size, drop_remainder] 

849 _attrs = ("parallel_copy", parallel_copy, "output_types", output_types, 

850 "output_shapes", output_shapes, "metadata", metadata) 

851 _result = _execute.execute(b"BatchDatasetV2", 1, inputs=_inputs_flat, 

852 attrs=_attrs, ctx=ctx, name=name) 

853 if _execute.must_record_gradient(): 

854 _execute.record_gradient( 

855 "BatchDatasetV2", _inputs_flat, _attrs, _result) 

856 _result, = _result 

857 return _result 

858 

859 

860def cache_dataset(input_dataset, filename, output_types, output_shapes, metadata="", name=None): 

861 r"""Creates a dataset that caches elements from `input_dataset`. 

862 

863 A CacheDataset will iterate over the input_dataset, and store tensors. If the 

864 cache already exists, the cache will be used. If the cache is inappropriate 

865 (e.g. cannot be opened, contains tensors of the wrong shape / size), an error 

866 will the returned when used. 

867 

868 Args: 

869 input_dataset: A `Tensor` of type `variant`. 

870 filename: A `Tensor` of type `string`. 

871 A path on the filesystem where we should cache the dataset. Note: this 

872 will be a directory. 

873 output_types: A list of `tf.DTypes` that has length `>= 1`. 

874 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

875 metadata: An optional `string`. Defaults to `""`. 

876 name: A name for the operation (optional). 

877 

878 Returns: 

879 A `Tensor` of type `variant`. 

880 """ 

881 _ctx = _context._context or _context.context() 

882 tld = _ctx._thread_local_data 

883 if tld.is_eager: 

884 try: 

885 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

886 _ctx, "CacheDataset", name, input_dataset, filename, "output_types", 

887 output_types, "output_shapes", output_shapes, "metadata", metadata) 

888 return _result 

889 except _core._NotOkStatusException as e: 

890 _ops.raise_from_not_ok_status(e, name) 

891 except _core._FallbackException: 

892 pass 

893 try: 

894 return cache_dataset_eager_fallback( 

895 input_dataset, filename, output_types=output_types, 

896 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

897 except _core._SymbolicException: 

898 pass # Add nodes to the TensorFlow graph. 

899 # Add nodes to the TensorFlow graph. 

900 if not isinstance(output_types, (list, tuple)): 

901 raise TypeError( 

902 "Expected list for 'output_types' argument to " 

903 "'cache_dataset' Op, not %r." % output_types) 

904 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

905 if not isinstance(output_shapes, (list, tuple)): 

906 raise TypeError( 

907 "Expected list for 'output_shapes' argument to " 

908 "'cache_dataset' Op, not %r." % output_shapes) 

909 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

910 if metadata is None: 

911 metadata = "" 

912 metadata = _execute.make_str(metadata, "metadata") 

913 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

914 "CacheDataset", input_dataset=input_dataset, filename=filename, 

915 output_types=output_types, 

916 output_shapes=output_shapes, metadata=metadata, 

917 name=name) 

918 _result = _outputs[:] 

919 if _execute.must_record_gradient(): 

920 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

921 _op.get_attr("output_shapes"), "metadata", 

922 _op.get_attr("metadata")) 

923 _inputs_flat = _op.inputs 

924 _execute.record_gradient( 

925 "CacheDataset", _inputs_flat, _attrs, _result) 

926 _result, = _result 

927 return _result 

928 

929CacheDataset = tf_export("raw_ops.CacheDataset")(_ops.to_raw_op(cache_dataset)) 

930 

931 

932def cache_dataset_eager_fallback(input_dataset, filename, output_types, output_shapes, metadata, name, ctx): 

933 if not isinstance(output_types, (list, tuple)): 

934 raise TypeError( 

935 "Expected list for 'output_types' argument to " 

936 "'cache_dataset' Op, not %r." % output_types) 

937 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

938 if not isinstance(output_shapes, (list, tuple)): 

939 raise TypeError( 

940 "Expected list for 'output_shapes' argument to " 

941 "'cache_dataset' Op, not %r." % output_shapes) 

942 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

943 if metadata is None: 

944 metadata = "" 

945 metadata = _execute.make_str(metadata, "metadata") 

946 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

947 filename = _ops.convert_to_tensor(filename, _dtypes.string) 

948 _inputs_flat = [input_dataset, filename] 

949 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

950 "metadata", metadata) 

951 _result = _execute.execute(b"CacheDataset", 1, inputs=_inputs_flat, 

952 attrs=_attrs, ctx=ctx, name=name) 

953 if _execute.must_record_gradient(): 

954 _execute.record_gradient( 

955 "CacheDataset", _inputs_flat, _attrs, _result) 

956 _result, = _result 

957 return _result 

958 

959 

960def cache_dataset_v2(input_dataset, filename, cache, output_types, output_shapes, metadata="", name=None): 

961 r"""TODO: add doc. 

962 

963 Args: 

964 input_dataset: A `Tensor` of type `variant`. 

965 filename: A `Tensor` of type `string`. 

966 cache: A `Tensor` of type `resource`. 

967 output_types: A list of `tf.DTypes` that has length `>= 1`. 

968 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

969 metadata: An optional `string`. Defaults to `""`. 

970 name: A name for the operation (optional). 

971 

972 Returns: 

973 A `Tensor` of type `variant`. 

974 """ 

975 _ctx = _context._context or _context.context() 

976 tld = _ctx._thread_local_data 

977 if tld.is_eager: 

978 try: 

979 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

980 _ctx, "CacheDatasetV2", name, input_dataset, filename, cache, 

981 "output_types", output_types, "output_shapes", output_shapes, 

982 "metadata", metadata) 

983 return _result 

984 except _core._NotOkStatusException as e: 

985 _ops.raise_from_not_ok_status(e, name) 

986 except _core._FallbackException: 

987 pass 

988 try: 

989 return cache_dataset_v2_eager_fallback( 

990 input_dataset, filename, cache, output_types=output_types, 

991 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

992 except _core._SymbolicException: 

993 pass # Add nodes to the TensorFlow graph. 

994 # Add nodes to the TensorFlow graph. 

995 if not isinstance(output_types, (list, tuple)): 

996 raise TypeError( 

997 "Expected list for 'output_types' argument to " 

998 "'cache_dataset_v2' Op, not %r." % output_types) 

999 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1000 if not isinstance(output_shapes, (list, tuple)): 

1001 raise TypeError( 

1002 "Expected list for 'output_shapes' argument to " 

1003 "'cache_dataset_v2' Op, not %r." % output_shapes) 

1004 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1005 if metadata is None: 

1006 metadata = "" 

1007 metadata = _execute.make_str(metadata, "metadata") 

1008 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1009 "CacheDatasetV2", input_dataset=input_dataset, filename=filename, 

1010 cache=cache, output_types=output_types, 

1011 output_shapes=output_shapes, metadata=metadata, 

1012 name=name) 

1013 _result = _outputs[:] 

1014 if _execute.must_record_gradient(): 

1015 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

1016 _op.get_attr("output_shapes"), "metadata", 

1017 _op.get_attr("metadata")) 

1018 _inputs_flat = _op.inputs 

1019 _execute.record_gradient( 

1020 "CacheDatasetV2", _inputs_flat, _attrs, _result) 

1021 _result, = _result 

1022 return _result 

1023 

1024CacheDatasetV2 = tf_export("raw_ops.CacheDatasetV2")(_ops.to_raw_op(cache_dataset_v2)) 

1025 

1026 

1027def cache_dataset_v2_eager_fallback(input_dataset, filename, cache, output_types, output_shapes, metadata, name, ctx): 

1028 if not isinstance(output_types, (list, tuple)): 

1029 raise TypeError( 

1030 "Expected list for 'output_types' argument to " 

1031 "'cache_dataset_v2' Op, not %r." % output_types) 

1032 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1033 if not isinstance(output_shapes, (list, tuple)): 

1034 raise TypeError( 

1035 "Expected list for 'output_shapes' argument to " 

1036 "'cache_dataset_v2' Op, not %r." % output_shapes) 

1037 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1038 if metadata is None: 

1039 metadata = "" 

1040 metadata = _execute.make_str(metadata, "metadata") 

1041 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1042 filename = _ops.convert_to_tensor(filename, _dtypes.string) 

1043 cache = _ops.convert_to_tensor(cache, _dtypes.resource) 

1044 _inputs_flat = [input_dataset, filename, cache] 

1045 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

1046 "metadata", metadata) 

1047 _result = _execute.execute(b"CacheDatasetV2", 1, inputs=_inputs_flat, 

1048 attrs=_attrs, ctx=ctx, name=name) 

1049 if _execute.must_record_gradient(): 

1050 _execute.record_gradient( 

1051 "CacheDatasetV2", _inputs_flat, _attrs, _result) 

1052 _result, = _result 

1053 return _result 

1054 

1055 

1056def concatenate_dataset(input_dataset, another_dataset, output_types, output_shapes, metadata="", name=None): 

1057 r"""Creates a dataset that concatenates `input_dataset` with `another_dataset`. 

1058 

1059 Args: 

1060 input_dataset: A `Tensor` of type `variant`. 

1061 another_dataset: A `Tensor` of type `variant`. 

1062 output_types: A list of `tf.DTypes` that has length `>= 1`. 

1063 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

1064 metadata: An optional `string`. Defaults to `""`. 

1065 name: A name for the operation (optional). 

1066 

1067 Returns: 

1068 A `Tensor` of type `variant`. 

1069 """ 

1070 _ctx = _context._context or _context.context() 

1071 tld = _ctx._thread_local_data 

1072 if tld.is_eager: 

1073 try: 

1074 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1075 _ctx, "ConcatenateDataset", name, input_dataset, another_dataset, 

1076 "output_types", output_types, "output_shapes", output_shapes, 

1077 "metadata", metadata) 

1078 return _result 

1079 except _core._NotOkStatusException as e: 

1080 _ops.raise_from_not_ok_status(e, name) 

1081 except _core._FallbackException: 

1082 pass 

1083 try: 

1084 return concatenate_dataset_eager_fallback( 

1085 input_dataset, another_dataset, output_types=output_types, 

1086 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

1087 except _core._SymbolicException: 

1088 pass # Add nodes to the TensorFlow graph. 

1089 # Add nodes to the TensorFlow graph. 

1090 if not isinstance(output_types, (list, tuple)): 

1091 raise TypeError( 

1092 "Expected list for 'output_types' argument to " 

1093 "'concatenate_dataset' Op, not %r." % output_types) 

1094 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1095 if not isinstance(output_shapes, (list, tuple)): 

1096 raise TypeError( 

1097 "Expected list for 'output_shapes' argument to " 

1098 "'concatenate_dataset' Op, not %r." % output_shapes) 

1099 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1100 if metadata is None: 

1101 metadata = "" 

1102 metadata = _execute.make_str(metadata, "metadata") 

1103 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1104 "ConcatenateDataset", input_dataset=input_dataset, 

1105 another_dataset=another_dataset, 

1106 output_types=output_types, 

1107 output_shapes=output_shapes, metadata=metadata, 

1108 name=name) 

1109 _result = _outputs[:] 

1110 if _execute.must_record_gradient(): 

1111 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

1112 _op.get_attr("output_shapes"), "metadata", 

1113 _op.get_attr("metadata")) 

1114 _inputs_flat = _op.inputs 

1115 _execute.record_gradient( 

1116 "ConcatenateDataset", _inputs_flat, _attrs, _result) 

1117 _result, = _result 

1118 return _result 

1119 

1120ConcatenateDataset = tf_export("raw_ops.ConcatenateDataset")(_ops.to_raw_op(concatenate_dataset)) 

1121 

1122 

1123def concatenate_dataset_eager_fallback(input_dataset, another_dataset, output_types, output_shapes, metadata, name, ctx): 

1124 if not isinstance(output_types, (list, tuple)): 

1125 raise TypeError( 

1126 "Expected list for 'output_types' argument to " 

1127 "'concatenate_dataset' Op, not %r." % output_types) 

1128 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1129 if not isinstance(output_shapes, (list, tuple)): 

1130 raise TypeError( 

1131 "Expected list for 'output_shapes' argument to " 

1132 "'concatenate_dataset' Op, not %r." % output_shapes) 

1133 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1134 if metadata is None: 

1135 metadata = "" 

1136 metadata = _execute.make_str(metadata, "metadata") 

1137 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1138 another_dataset = _ops.convert_to_tensor(another_dataset, _dtypes.variant) 

1139 _inputs_flat = [input_dataset, another_dataset] 

1140 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

1141 "metadata", metadata) 

1142 _result = _execute.execute(b"ConcatenateDataset", 1, inputs=_inputs_flat, 

1143 attrs=_attrs, ctx=ctx, name=name) 

1144 if _execute.must_record_gradient(): 

1145 _execute.record_gradient( 

1146 "ConcatenateDataset", _inputs_flat, _attrs, _result) 

1147 _result, = _result 

1148 return _result 

1149 

1150 

1151def dataset_cardinality(input_dataset, cardinality_options="", name=None): 

1152 r"""Returns the cardinality of `input_dataset`. 

1153 

1154 Returns the cardinality of `input_dataset`. 

1155 

1156 Args: 

1157 input_dataset: A `Tensor` of type `variant`. 

1158 A variant tensor representing the dataset to return cardinality for. 

1159 cardinality_options: An optional `string`. Defaults to `""`. 

1160 name: A name for the operation (optional). 

1161 

1162 Returns: 

1163 A `Tensor` of type `int64`. 

1164 """ 

1165 _ctx = _context._context or _context.context() 

1166 tld = _ctx._thread_local_data 

1167 if tld.is_eager: 

1168 try: 

1169 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1170 _ctx, "DatasetCardinality", name, input_dataset, 

1171 "cardinality_options", cardinality_options) 

1172 return _result 

1173 except _core._NotOkStatusException as e: 

1174 _ops.raise_from_not_ok_status(e, name) 

1175 except _core._FallbackException: 

1176 pass 

1177 try: 

1178 return dataset_cardinality_eager_fallback( 

1179 input_dataset, cardinality_options=cardinality_options, name=name, 

1180 ctx=_ctx) 

1181 except _core._SymbolicException: 

1182 pass # Add nodes to the TensorFlow graph. 

1183 # Add nodes to the TensorFlow graph. 

1184 if cardinality_options is None: 

1185 cardinality_options = "" 

1186 cardinality_options = _execute.make_str(cardinality_options, "cardinality_options") 

1187 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1188 "DatasetCardinality", input_dataset=input_dataset, 

1189 cardinality_options=cardinality_options, 

1190 name=name) 

1191 _result = _outputs[:] 

1192 if _execute.must_record_gradient(): 

1193 _attrs = ("cardinality_options", _op.get_attr("cardinality_options")) 

1194 _inputs_flat = _op.inputs 

1195 _execute.record_gradient( 

1196 "DatasetCardinality", _inputs_flat, _attrs, _result) 

1197 _result, = _result 

1198 return _result 

1199 

1200DatasetCardinality = tf_export("raw_ops.DatasetCardinality")(_ops.to_raw_op(dataset_cardinality)) 

1201 

1202 

1203def dataset_cardinality_eager_fallback(input_dataset, cardinality_options, name, ctx): 

1204 if cardinality_options is None: 

1205 cardinality_options = "" 

1206 cardinality_options = _execute.make_str(cardinality_options, "cardinality_options") 

1207 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1208 _inputs_flat = [input_dataset] 

1209 _attrs = ("cardinality_options", cardinality_options) 

1210 _result = _execute.execute(b"DatasetCardinality", 1, inputs=_inputs_flat, 

1211 attrs=_attrs, ctx=ctx, name=name) 

1212 if _execute.must_record_gradient(): 

1213 _execute.record_gradient( 

1214 "DatasetCardinality", _inputs_flat, _attrs, _result) 

1215 _result, = _result 

1216 return _result 

1217 

1218 

1219def dataset_to_graph(input_dataset, stateful_whitelist=[], allow_stateful=False, strip_device_assignment=False, name=None): 

1220 r"""Returns a serialized GraphDef representing `input_dataset`. 

1221 

1222 Returns a graph representation for `input_dataset`. 

1223 

1224 Args: 

1225 input_dataset: A `Tensor` of type `variant`. 

1226 A variant tensor representing the dataset to return the graph representation for. 

1227 stateful_whitelist: An optional list of `strings`. Defaults to `[]`. 

1228 allow_stateful: An optional `bool`. Defaults to `False`. 

1229 strip_device_assignment: An optional `bool`. Defaults to `False`. 

1230 name: A name for the operation (optional). 

1231 

1232 Returns: 

1233 A `Tensor` of type `string`. 

1234 """ 

1235 _ctx = _context._context or _context.context() 

1236 tld = _ctx._thread_local_data 

1237 if tld.is_eager: 

1238 try: 

1239 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1240 _ctx, "DatasetToGraph", name, input_dataset, "stateful_whitelist", 

1241 stateful_whitelist, "allow_stateful", allow_stateful, 

1242 "strip_device_assignment", strip_device_assignment) 

1243 return _result 

1244 except _core._NotOkStatusException as e: 

1245 _ops.raise_from_not_ok_status(e, name) 

1246 except _core._FallbackException: 

1247 pass 

1248 try: 

1249 return dataset_to_graph_eager_fallback( 

1250 input_dataset, stateful_whitelist=stateful_whitelist, 

1251 allow_stateful=allow_stateful, 

1252 strip_device_assignment=strip_device_assignment, name=name, 

1253 ctx=_ctx) 

1254 except _core._SymbolicException: 

1255 pass # Add nodes to the TensorFlow graph. 

1256 # Add nodes to the TensorFlow graph. 

1257 if stateful_whitelist is None: 

1258 stateful_whitelist = [] 

1259 if not isinstance(stateful_whitelist, (list, tuple)): 

1260 raise TypeError( 

1261 "Expected list for 'stateful_whitelist' argument to " 

1262 "'dataset_to_graph' Op, not %r." % stateful_whitelist) 

1263 stateful_whitelist = [_execute.make_str(_s, "stateful_whitelist") for _s in stateful_whitelist] 

1264 if allow_stateful is None: 

1265 allow_stateful = False 

1266 allow_stateful = _execute.make_bool(allow_stateful, "allow_stateful") 

1267 if strip_device_assignment is None: 

1268 strip_device_assignment = False 

1269 strip_device_assignment = _execute.make_bool(strip_device_assignment, "strip_device_assignment") 

1270 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1271 "DatasetToGraph", input_dataset=input_dataset, 

1272 stateful_whitelist=stateful_whitelist, 

1273 allow_stateful=allow_stateful, 

1274 strip_device_assignment=strip_device_assignment, 

1275 name=name) 

1276 _result = _outputs[:] 

1277 if _execute.must_record_gradient(): 

1278 _attrs = ("stateful_whitelist", _op.get_attr("stateful_whitelist"), 

1279 "allow_stateful", _op._get_attr_bool("allow_stateful"), 

1280 "strip_device_assignment", 

1281 _op._get_attr_bool("strip_device_assignment")) 

1282 _inputs_flat = _op.inputs 

1283 _execute.record_gradient( 

1284 "DatasetToGraph", _inputs_flat, _attrs, _result) 

1285 _result, = _result 

1286 return _result 

1287 

1288DatasetToGraph = tf_export("raw_ops.DatasetToGraph")(_ops.to_raw_op(dataset_to_graph)) 

1289 

1290 

1291def dataset_to_graph_eager_fallback(input_dataset, stateful_whitelist, allow_stateful, strip_device_assignment, name, ctx): 

1292 if stateful_whitelist is None: 

1293 stateful_whitelist = [] 

1294 if not isinstance(stateful_whitelist, (list, tuple)): 

1295 raise TypeError( 

1296 "Expected list for 'stateful_whitelist' argument to " 

1297 "'dataset_to_graph' Op, not %r." % stateful_whitelist) 

1298 stateful_whitelist = [_execute.make_str(_s, "stateful_whitelist") for _s in stateful_whitelist] 

1299 if allow_stateful is None: 

1300 allow_stateful = False 

1301 allow_stateful = _execute.make_bool(allow_stateful, "allow_stateful") 

1302 if strip_device_assignment is None: 

1303 strip_device_assignment = False 

1304 strip_device_assignment = _execute.make_bool(strip_device_assignment, "strip_device_assignment") 

1305 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1306 _inputs_flat = [input_dataset] 

1307 _attrs = ("stateful_whitelist", stateful_whitelist, "allow_stateful", 

1308 allow_stateful, "strip_device_assignment", strip_device_assignment) 

1309 _result = _execute.execute(b"DatasetToGraph", 1, inputs=_inputs_flat, 

1310 attrs=_attrs, ctx=ctx, name=name) 

1311 if _execute.must_record_gradient(): 

1312 _execute.record_gradient( 

1313 "DatasetToGraph", _inputs_flat, _attrs, _result) 

1314 _result, = _result 

1315 return _result 

1316 

1317 

1318def dataset_to_graph_v2(input_dataset, external_state_policy=0, strip_device_assignment=False, name=None): 

1319 r"""Returns a serialized GraphDef representing `input_dataset`. 

1320 

1321 Returns a graph representation for `input_dataset`. 

1322 

1323 Args: 

1324 input_dataset: A `Tensor` of type `variant`. 

1325 A variant tensor representing the dataset to return the graph representation for. 

1326 external_state_policy: An optional `int`. Defaults to `0`. 

1327 strip_device_assignment: An optional `bool`. Defaults to `False`. 

1328 name: A name for the operation (optional). 

1329 

1330 Returns: 

1331 A `Tensor` of type `string`. 

1332 """ 

1333 _ctx = _context._context or _context.context() 

1334 tld = _ctx._thread_local_data 

1335 if tld.is_eager: 

1336 try: 

1337 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1338 _ctx, "DatasetToGraphV2", name, input_dataset, 

1339 "external_state_policy", external_state_policy, 

1340 "strip_device_assignment", strip_device_assignment) 

1341 return _result 

1342 except _core._NotOkStatusException as e: 

1343 _ops.raise_from_not_ok_status(e, name) 

1344 except _core._FallbackException: 

1345 pass 

1346 try: 

1347 return dataset_to_graph_v2_eager_fallback( 

1348 input_dataset, external_state_policy=external_state_policy, 

1349 strip_device_assignment=strip_device_assignment, name=name, 

1350 ctx=_ctx) 

1351 except _core._SymbolicException: 

1352 pass # Add nodes to the TensorFlow graph. 

1353 # Add nodes to the TensorFlow graph. 

1354 if external_state_policy is None: 

1355 external_state_policy = 0 

1356 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy") 

1357 if strip_device_assignment is None: 

1358 strip_device_assignment = False 

1359 strip_device_assignment = _execute.make_bool(strip_device_assignment, "strip_device_assignment") 

1360 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1361 "DatasetToGraphV2", input_dataset=input_dataset, 

1362 external_state_policy=external_state_policy, 

1363 strip_device_assignment=strip_device_assignment, 

1364 name=name) 

1365 _result = _outputs[:] 

1366 if _execute.must_record_gradient(): 

1367 _attrs = ("external_state_policy", 

1368 _op._get_attr_int("external_state_policy"), 

1369 "strip_device_assignment", 

1370 _op._get_attr_bool("strip_device_assignment")) 

1371 _inputs_flat = _op.inputs 

1372 _execute.record_gradient( 

1373 "DatasetToGraphV2", _inputs_flat, _attrs, _result) 

1374 _result, = _result 

1375 return _result 

1376 

1377DatasetToGraphV2 = tf_export("raw_ops.DatasetToGraphV2")(_ops.to_raw_op(dataset_to_graph_v2)) 

1378 

1379 

1380def dataset_to_graph_v2_eager_fallback(input_dataset, external_state_policy, strip_device_assignment, name, ctx): 

1381 if external_state_policy is None: 

1382 external_state_policy = 0 

1383 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy") 

1384 if strip_device_assignment is None: 

1385 strip_device_assignment = False 

1386 strip_device_assignment = _execute.make_bool(strip_device_assignment, "strip_device_assignment") 

1387 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1388 _inputs_flat = [input_dataset] 

1389 _attrs = ("external_state_policy", external_state_policy, 

1390 "strip_device_assignment", strip_device_assignment) 

1391 _result = _execute.execute(b"DatasetToGraphV2", 1, inputs=_inputs_flat, 

1392 attrs=_attrs, ctx=ctx, name=name) 

1393 if _execute.must_record_gradient(): 

1394 _execute.record_gradient( 

1395 "DatasetToGraphV2", _inputs_flat, _attrs, _result) 

1396 _result, = _result 

1397 return _result 

1398 

1399 

1400def dataset_to_single_element(dataset, output_types, output_shapes, metadata="", name=None): 

1401 r"""Outputs the single element from the given dataset. 

1402 

1403 Args: 

1404 dataset: A `Tensor` of type `variant`. 

1405 A handle to a dataset that contains a single element. 

1406 output_types: A list of `tf.DTypes` that has length `>= 1`. 

1407 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

1408 metadata: An optional `string`. Defaults to `""`. 

1409 name: A name for the operation (optional). 

1410 

1411 Returns: 

1412 A list of `Tensor` objects of type `output_types`. 

1413 """ 

1414 _ctx = _context._context or _context.context() 

1415 tld = _ctx._thread_local_data 

1416 if tld.is_eager: 

1417 try: 

1418 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1419 _ctx, "DatasetToSingleElement", name, dataset, "output_types", 

1420 output_types, "output_shapes", output_shapes, "metadata", metadata) 

1421 return _result 

1422 except _core._NotOkStatusException as e: 

1423 _ops.raise_from_not_ok_status(e, name) 

1424 except _core._FallbackException: 

1425 pass 

1426 try: 

1427 return dataset_to_single_element_eager_fallback( 

1428 dataset, output_types=output_types, output_shapes=output_shapes, 

1429 metadata=metadata, name=name, ctx=_ctx) 

1430 except _core._SymbolicException: 

1431 pass # Add nodes to the TensorFlow graph. 

1432 # Add nodes to the TensorFlow graph. 

1433 if not isinstance(output_types, (list, tuple)): 

1434 raise TypeError( 

1435 "Expected list for 'output_types' argument to " 

1436 "'dataset_to_single_element' Op, not %r." % output_types) 

1437 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1438 if not isinstance(output_shapes, (list, tuple)): 

1439 raise TypeError( 

1440 "Expected list for 'output_shapes' argument to " 

1441 "'dataset_to_single_element' Op, not %r." % output_shapes) 

1442 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1443 if metadata is None: 

1444 metadata = "" 

1445 metadata = _execute.make_str(metadata, "metadata") 

1446 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1447 "DatasetToSingleElement", dataset=dataset, output_types=output_types, 

1448 output_shapes=output_shapes, 

1449 metadata=metadata, name=name) 

1450 _result = _outputs[:] 

1451 if not _result: 

1452 return _op 

1453 if _execute.must_record_gradient(): 

1454 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

1455 _op.get_attr("output_shapes"), "metadata", 

1456 _op.get_attr("metadata")) 

1457 _inputs_flat = _op.inputs 

1458 _execute.record_gradient( 

1459 "DatasetToSingleElement", _inputs_flat, _attrs, _result) 

1460 return _result 

1461 

1462DatasetToSingleElement = tf_export("raw_ops.DatasetToSingleElement")(_ops.to_raw_op(dataset_to_single_element)) 

1463 

1464 

1465def dataset_to_single_element_eager_fallback(dataset, output_types, output_shapes, metadata, name, ctx): 

1466 if not isinstance(output_types, (list, tuple)): 

1467 raise TypeError( 

1468 "Expected list for 'output_types' argument to " 

1469 "'dataset_to_single_element' Op, not %r." % output_types) 

1470 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1471 if not isinstance(output_shapes, (list, tuple)): 

1472 raise TypeError( 

1473 "Expected list for 'output_shapes' argument to " 

1474 "'dataset_to_single_element' Op, not %r." % output_shapes) 

1475 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1476 if metadata is None: 

1477 metadata = "" 

1478 metadata = _execute.make_str(metadata, "metadata") 

1479 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant) 

1480 _inputs_flat = [dataset] 

1481 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

1482 "metadata", metadata) 

1483 _result = _execute.execute(b"DatasetToSingleElement", len(output_types), 

1484 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

1485 name=name) 

1486 if _execute.must_record_gradient(): 

1487 _execute.record_gradient( 

1488 "DatasetToSingleElement", _inputs_flat, _attrs, _result) 

1489 return _result 

1490 

1491 

1492def delete_iterator(handle, deleter, name=None): 

1493 r"""A container for an iterator resource. 

1494 

1495 Args: 

1496 handle: A `Tensor` of type `resource`. A handle to the iterator to delete. 

1497 deleter: A `Tensor` of type `variant`. A variant deleter. 

1498 name: A name for the operation (optional). 

1499 

1500 Returns: 

1501 The created Operation. 

1502 """ 

1503 _ctx = _context._context or _context.context() 

1504 tld = _ctx._thread_local_data 

1505 if tld.is_eager: 

1506 try: 

1507 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1508 _ctx, "DeleteIterator", name, handle, deleter) 

1509 return _result 

1510 except _core._NotOkStatusException as e: 

1511 _ops.raise_from_not_ok_status(e, name) 

1512 except _core._FallbackException: 

1513 pass 

1514 try: 

1515 return delete_iterator_eager_fallback( 

1516 handle, deleter, name=name, ctx=_ctx) 

1517 except _core._SymbolicException: 

1518 pass # Add nodes to the TensorFlow graph. 

1519 # Add nodes to the TensorFlow graph. 

1520 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1521 "DeleteIterator", handle=handle, deleter=deleter, name=name) 

1522 return _op 

1523DeleteIterator = tf_export("raw_ops.DeleteIterator")(_ops.to_raw_op(delete_iterator)) 

1524 

1525 

1526def delete_iterator_eager_fallback(handle, deleter, name, ctx): 

1527 handle = _ops.convert_to_tensor(handle, _dtypes.resource) 

1528 deleter = _ops.convert_to_tensor(deleter, _dtypes.variant) 

1529 _inputs_flat = [handle, deleter] 

1530 _attrs = None 

1531 _result = _execute.execute(b"DeleteIterator", 0, inputs=_inputs_flat, 

1532 attrs=_attrs, ctx=ctx, name=name) 

1533 _result = None 

1534 return _result 

1535 

1536 

1537def delete_memory_cache(handle, deleter, name=None): 

1538 r"""TODO: add doc. 

1539 

1540 Args: 

1541 handle: A `Tensor` of type `resource`. 

1542 deleter: A `Tensor` of type `variant`. 

1543 name: A name for the operation (optional). 

1544 

1545 Returns: 

1546 The created Operation. 

1547 """ 

1548 _ctx = _context._context or _context.context() 

1549 tld = _ctx._thread_local_data 

1550 if tld.is_eager: 

1551 try: 

1552 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1553 _ctx, "DeleteMemoryCache", name, handle, deleter) 

1554 return _result 

1555 except _core._NotOkStatusException as e: 

1556 _ops.raise_from_not_ok_status(e, name) 

1557 except _core._FallbackException: 

1558 pass 

1559 try: 

1560 return delete_memory_cache_eager_fallback( 

1561 handle, deleter, name=name, ctx=_ctx) 

1562 except _core._SymbolicException: 

1563 pass # Add nodes to the TensorFlow graph. 

1564 # Add nodes to the TensorFlow graph. 

1565 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1566 "DeleteMemoryCache", handle=handle, deleter=deleter, name=name) 

1567 return _op 

1568DeleteMemoryCache = tf_export("raw_ops.DeleteMemoryCache")(_ops.to_raw_op(delete_memory_cache)) 

1569 

1570 

1571def delete_memory_cache_eager_fallback(handle, deleter, name, ctx): 

1572 handle = _ops.convert_to_tensor(handle, _dtypes.resource) 

1573 deleter = _ops.convert_to_tensor(deleter, _dtypes.variant) 

1574 _inputs_flat = [handle, deleter] 

1575 _attrs = None 

1576 _result = _execute.execute(b"DeleteMemoryCache", 0, inputs=_inputs_flat, 

1577 attrs=_attrs, ctx=ctx, name=name) 

1578 _result = None 

1579 return _result 

1580 

1581 

1582def delete_multi_device_iterator(multi_device_iterator, iterators, deleter, name=None): 

1583 r"""A container for an iterator resource. 

1584 

1585 Args: 

1586 multi_device_iterator: A `Tensor` of type `resource`. 

1587 A handle to the multi device iterator to delete. 

1588 iterators: A list of `Tensor` objects with type `resource`. 

1589 A list of iterator handles (unused). This is added so that automatic control dependencies get added during function tracing that ensure this op runs after all the dependent iterators are deleted. 

1590 deleter: A `Tensor` of type `variant`. A variant deleter. 

1591 name: A name for the operation (optional). 

1592 

1593 Returns: 

1594 The created Operation. 

1595 """ 

1596 _ctx = _context._context or _context.context() 

1597 tld = _ctx._thread_local_data 

1598 if tld.is_eager: 

1599 try: 

1600 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1601 _ctx, "DeleteMultiDeviceIterator", name, multi_device_iterator, 

1602 iterators, deleter) 

1603 return _result 

1604 except _core._NotOkStatusException as e: 

1605 _ops.raise_from_not_ok_status(e, name) 

1606 except _core._FallbackException: 

1607 pass 

1608 try: 

1609 return delete_multi_device_iterator_eager_fallback( 

1610 multi_device_iterator, iterators, deleter, name=name, ctx=_ctx) 

1611 except _core._SymbolicException: 

1612 pass # Add nodes to the TensorFlow graph. 

1613 # Add nodes to the TensorFlow graph. 

1614 if not isinstance(iterators, (list, tuple)): 

1615 raise TypeError( 

1616 "Expected list for 'iterators' argument to " 

1617 "'delete_multi_device_iterator' Op, not %r." % iterators) 

1618 _attr_N = len(iterators) 

1619 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1620 "DeleteMultiDeviceIterator", multi_device_iterator=multi_device_iterator, 

1621 iterators=iterators, deleter=deleter, 

1622 name=name) 

1623 return _op 

1624DeleteMultiDeviceIterator = tf_export("raw_ops.DeleteMultiDeviceIterator")(_ops.to_raw_op(delete_multi_device_iterator)) 

1625 

1626 

1627def delete_multi_device_iterator_eager_fallback(multi_device_iterator, iterators, deleter, name, ctx): 

1628 if not isinstance(iterators, (list, tuple)): 

1629 raise TypeError( 

1630 "Expected list for 'iterators' argument to " 

1631 "'delete_multi_device_iterator' Op, not %r." % iterators) 

1632 _attr_N = len(iterators) 

1633 multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource) 

1634 iterators = _ops.convert_n_to_tensor(iterators, _dtypes.resource) 

1635 deleter = _ops.convert_to_tensor(deleter, _dtypes.variant) 

1636 _inputs_flat = [multi_device_iterator] + list(iterators) + [deleter] 

1637 _attrs = ("N", _attr_N) 

1638 _result = _execute.execute(b"DeleteMultiDeviceIterator", 0, 

1639 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

1640 name=name) 

1641 _result = None 

1642 return _result 

1643 

1644 

1645def delete_random_seed_generator(handle, deleter, name=None): 

1646 r"""TODO: add doc. 

1647 

1648 Args: 

1649 handle: A `Tensor` of type `resource`. 

1650 deleter: A `Tensor` of type `variant`. 

1651 name: A name for the operation (optional). 

1652 

1653 Returns: 

1654 The created Operation. 

1655 """ 

1656 _ctx = _context._context or _context.context() 

1657 tld = _ctx._thread_local_data 

1658 if tld.is_eager: 

1659 try: 

1660 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1661 _ctx, "DeleteRandomSeedGenerator", name, handle, deleter) 

1662 return _result 

1663 except _core._NotOkStatusException as e: 

1664 _ops.raise_from_not_ok_status(e, name) 

1665 except _core._FallbackException: 

1666 pass 

1667 try: 

1668 return delete_random_seed_generator_eager_fallback( 

1669 handle, deleter, name=name, ctx=_ctx) 

1670 except _core._SymbolicException: 

1671 pass # Add nodes to the TensorFlow graph. 

1672 # Add nodes to the TensorFlow graph. 

1673 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1674 "DeleteRandomSeedGenerator", handle=handle, deleter=deleter, 

1675 name=name) 

1676 return _op 

1677DeleteRandomSeedGenerator = tf_export("raw_ops.DeleteRandomSeedGenerator")(_ops.to_raw_op(delete_random_seed_generator)) 

1678 

1679 

1680def delete_random_seed_generator_eager_fallback(handle, deleter, name, ctx): 

1681 handle = _ops.convert_to_tensor(handle, _dtypes.resource) 

1682 deleter = _ops.convert_to_tensor(deleter, _dtypes.variant) 

1683 _inputs_flat = [handle, deleter] 

1684 _attrs = None 

1685 _result = _execute.execute(b"DeleteRandomSeedGenerator", 0, 

1686 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

1687 name=name) 

1688 _result = None 

1689 return _result 

1690 

1691 

1692def delete_seed_generator(handle, deleter, name=None): 

1693 r"""TODO: add doc. 

1694 

1695 Args: 

1696 handle: A `Tensor` of type `resource`. 

1697 deleter: A `Tensor` of type `variant`. 

1698 name: A name for the operation (optional). 

1699 

1700 Returns: 

1701 The created Operation. 

1702 """ 

1703 _ctx = _context._context or _context.context() 

1704 tld = _ctx._thread_local_data 

1705 if tld.is_eager: 

1706 try: 

1707 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1708 _ctx, "DeleteSeedGenerator", name, handle, deleter) 

1709 return _result 

1710 except _core._NotOkStatusException as e: 

1711 _ops.raise_from_not_ok_status(e, name) 

1712 except _core._FallbackException: 

1713 pass 

1714 try: 

1715 return delete_seed_generator_eager_fallback( 

1716 handle, deleter, name=name, ctx=_ctx) 

1717 except _core._SymbolicException: 

1718 pass # Add nodes to the TensorFlow graph. 

1719 # Add nodes to the TensorFlow graph. 

1720 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1721 "DeleteSeedGenerator", handle=handle, deleter=deleter, name=name) 

1722 return _op 

1723DeleteSeedGenerator = tf_export("raw_ops.DeleteSeedGenerator")(_ops.to_raw_op(delete_seed_generator)) 

1724 

1725 

1726def delete_seed_generator_eager_fallback(handle, deleter, name, ctx): 

1727 handle = _ops.convert_to_tensor(handle, _dtypes.resource) 

1728 deleter = _ops.convert_to_tensor(deleter, _dtypes.variant) 

1729 _inputs_flat = [handle, deleter] 

1730 _attrs = None 

1731 _result = _execute.execute(b"DeleteSeedGenerator", 0, inputs=_inputs_flat, 

1732 attrs=_attrs, ctx=ctx, name=name) 

1733 _result = None 

1734 return _result 

1735 

1736 

1737def deserialize_iterator(resource_handle, serialized, name=None): 

1738 r"""Converts the given variant tensor to an iterator and stores it in the given resource. 

1739 

1740 Args: 

1741 resource_handle: A `Tensor` of type `resource`. 

1742 A handle to an iterator resource. 

1743 serialized: A `Tensor` of type `variant`. 

1744 A variant tensor storing the state of the iterator contained in the 

1745 resource. 

1746 name: A name for the operation (optional). 

1747 

1748 Returns: 

1749 The created Operation. 

1750 """ 

1751 _ctx = _context._context or _context.context() 

1752 tld = _ctx._thread_local_data 

1753 if tld.is_eager: 

1754 try: 

1755 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1756 _ctx, "DeserializeIterator", name, resource_handle, serialized) 

1757 return _result 

1758 except _core._NotOkStatusException as e: 

1759 _ops.raise_from_not_ok_status(e, name) 

1760 except _core._FallbackException: 

1761 pass 

1762 try: 

1763 return deserialize_iterator_eager_fallback( 

1764 resource_handle, serialized, name=name, ctx=_ctx) 

1765 except _core._SymbolicException: 

1766 pass # Add nodes to the TensorFlow graph. 

1767 # Add nodes to the TensorFlow graph. 

1768 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1769 "DeserializeIterator", resource_handle=resource_handle, 

1770 serialized=serialized, name=name) 

1771 return _op 

1772DeserializeIterator = tf_export("raw_ops.DeserializeIterator")(_ops.to_raw_op(deserialize_iterator)) 

1773 

1774 

1775def deserialize_iterator_eager_fallback(resource_handle, serialized, name, ctx): 

1776 resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource) 

1777 serialized = _ops.convert_to_tensor(serialized, _dtypes.variant) 

1778 _inputs_flat = [resource_handle, serialized] 

1779 _attrs = None 

1780 _result = _execute.execute(b"DeserializeIterator", 0, inputs=_inputs_flat, 

1781 attrs=_attrs, ctx=ctx, name=name) 

1782 _result = None 

1783 return _result 

1784 

1785 

1786def dummy_memory_cache(name=None): 

1787 r"""TODO: add doc. 

1788 

1789 Args: 

1790 name: A name for the operation (optional). 

1791 

1792 Returns: 

1793 A `Tensor` of type `resource`. 

1794 """ 

1795 _ctx = _context._context or _context.context() 

1796 tld = _ctx._thread_local_data 

1797 if tld.is_eager: 

1798 try: 

1799 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1800 _ctx, "DummyMemoryCache", name) 

1801 return _result 

1802 except _core._NotOkStatusException as e: 

1803 _ops.raise_from_not_ok_status(e, name) 

1804 except _core._FallbackException: 

1805 pass 

1806 try: 

1807 return dummy_memory_cache_eager_fallback( 

1808 name=name, ctx=_ctx) 

1809 except _core._SymbolicException: 

1810 pass # Add nodes to the TensorFlow graph. 

1811 # Add nodes to the TensorFlow graph. 

1812 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1813 "DummyMemoryCache", name=name) 

1814 _result = _outputs[:] 

1815 if _execute.must_record_gradient(): 

1816 _attrs = () 

1817 _inputs_flat = _op.inputs 

1818 _execute.record_gradient( 

1819 "DummyMemoryCache", _inputs_flat, _attrs, _result) 

1820 _result, = _result 

1821 return _result 

1822 

1823DummyMemoryCache = tf_export("raw_ops.DummyMemoryCache")(_ops.to_raw_op(dummy_memory_cache)) 

1824 

1825 

1826def dummy_memory_cache_eager_fallback(name, ctx): 

1827 _inputs_flat = [] 

1828 _attrs = None 

1829 _result = _execute.execute(b"DummyMemoryCache", 1, inputs=_inputs_flat, 

1830 attrs=_attrs, ctx=ctx, name=name) 

1831 if _execute.must_record_gradient(): 

1832 _execute.record_gradient( 

1833 "DummyMemoryCache", _inputs_flat, _attrs, _result) 

1834 _result, = _result 

1835 return _result 

1836 

1837 

1838def dummy_seed_generator(name=None): 

1839 r"""TODO: add doc. 

1840 

1841 Args: 

1842 name: A name for the operation (optional). 

1843 

1844 Returns: 

1845 A `Tensor` of type `resource`. 

1846 """ 

1847 _ctx = _context._context or _context.context() 

1848 tld = _ctx._thread_local_data 

1849 if tld.is_eager: 

1850 try: 

1851 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1852 _ctx, "DummySeedGenerator", name) 

1853 return _result 

1854 except _core._NotOkStatusException as e: 

1855 _ops.raise_from_not_ok_status(e, name) 

1856 except _core._FallbackException: 

1857 pass 

1858 try: 

1859 return dummy_seed_generator_eager_fallback( 

1860 name=name, ctx=_ctx) 

1861 except _core._SymbolicException: 

1862 pass # Add nodes to the TensorFlow graph. 

1863 # Add nodes to the TensorFlow graph. 

1864 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1865 "DummySeedGenerator", name=name) 

1866 _result = _outputs[:] 

1867 if _execute.must_record_gradient(): 

1868 _attrs = () 

1869 _inputs_flat = _op.inputs 

1870 _execute.record_gradient( 

1871 "DummySeedGenerator", _inputs_flat, _attrs, _result) 

1872 _result, = _result 

1873 return _result 

1874 

1875DummySeedGenerator = tf_export("raw_ops.DummySeedGenerator")(_ops.to_raw_op(dummy_seed_generator)) 

1876 

1877 

1878def dummy_seed_generator_eager_fallback(name, ctx): 

1879 _inputs_flat = [] 

1880 _attrs = None 

1881 _result = _execute.execute(b"DummySeedGenerator", 1, inputs=_inputs_flat, 

1882 attrs=_attrs, ctx=ctx, name=name) 

1883 if _execute.must_record_gradient(): 

1884 _execute.record_gradient( 

1885 "DummySeedGenerator", _inputs_flat, _attrs, _result) 

1886 _result, = _result 

1887 return _result 

1888 

1889 

1890def filter_by_last_component_dataset(input_dataset, output_types, output_shapes, name=None): 

1891 r"""Creates a dataset containing elements of first component of `input_dataset` having true in the last component. 

1892 

1893 Args: 

1894 input_dataset: A `Tensor` of type `variant`. 

1895 output_types: A list of `tf.DTypes` that has length `>= 1`. 

1896 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

1897 name: A name for the operation (optional). 

1898 

1899 Returns: 

1900 A `Tensor` of type `variant`. 

1901 """ 

1902 _ctx = _context._context or _context.context() 

1903 tld = _ctx._thread_local_data 

1904 if tld.is_eager: 

1905 try: 

1906 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

1907 _ctx, "FilterByLastComponentDataset", name, input_dataset, 

1908 "output_types", output_types, "output_shapes", output_shapes) 

1909 return _result 

1910 except _core._NotOkStatusException as e: 

1911 _ops.raise_from_not_ok_status(e, name) 

1912 except _core._FallbackException: 

1913 pass 

1914 try: 

1915 return filter_by_last_component_dataset_eager_fallback( 

1916 input_dataset, output_types=output_types, 

1917 output_shapes=output_shapes, name=name, ctx=_ctx) 

1918 except _core._SymbolicException: 

1919 pass # Add nodes to the TensorFlow graph. 

1920 # Add nodes to the TensorFlow graph. 

1921 if not isinstance(output_types, (list, tuple)): 

1922 raise TypeError( 

1923 "Expected list for 'output_types' argument to " 

1924 "'filter_by_last_component_dataset' Op, not %r." % output_types) 

1925 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1926 if not isinstance(output_shapes, (list, tuple)): 

1927 raise TypeError( 

1928 "Expected list for 'output_shapes' argument to " 

1929 "'filter_by_last_component_dataset' Op, not %r." % output_shapes) 

1930 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1931 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

1932 "FilterByLastComponentDataset", input_dataset=input_dataset, 

1933 output_types=output_types, 

1934 output_shapes=output_shapes, 

1935 name=name) 

1936 _result = _outputs[:] 

1937 if _execute.must_record_gradient(): 

1938 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

1939 _op.get_attr("output_shapes")) 

1940 _inputs_flat = _op.inputs 

1941 _execute.record_gradient( 

1942 "FilterByLastComponentDataset", _inputs_flat, _attrs, _result) 

1943 _result, = _result 

1944 return _result 

1945 

1946FilterByLastComponentDataset = tf_export("raw_ops.FilterByLastComponentDataset")(_ops.to_raw_op(filter_by_last_component_dataset)) 

1947 

1948 

1949def filter_by_last_component_dataset_eager_fallback(input_dataset, output_types, output_shapes, name, ctx): 

1950 if not isinstance(output_types, (list, tuple)): 

1951 raise TypeError( 

1952 "Expected list for 'output_types' argument to " 

1953 "'filter_by_last_component_dataset' Op, not %r." % output_types) 

1954 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

1955 if not isinstance(output_shapes, (list, tuple)): 

1956 raise TypeError( 

1957 "Expected list for 'output_shapes' argument to " 

1958 "'filter_by_last_component_dataset' Op, not %r." % output_shapes) 

1959 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

1960 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

1961 _inputs_flat = [input_dataset] 

1962 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

1963 _result = _execute.execute(b"FilterByLastComponentDataset", 1, 

1964 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

1965 name=name) 

1966 if _execute.must_record_gradient(): 

1967 _execute.record_gradient( 

1968 "FilterByLastComponentDataset", _inputs_flat, _attrs, _result) 

1969 _result, = _result 

1970 return _result 

1971 

1972 

1973def filter_dataset(input_dataset, other_arguments, predicate, output_types, output_shapes, metadata="", name=None): 

1974 r"""Creates a dataset containing elements of `input_dataset` matching `predicate`. 

1975 

1976 The `predicate` function must return a scalar boolean and accept the 

1977 following arguments: 

1978 

1979 * One tensor for each component of an element of `input_dataset`. 

1980 * One tensor for each value in `other_arguments`. 

1981 

1982 Args: 

1983 input_dataset: A `Tensor` of type `variant`. 

1984 other_arguments: A list of `Tensor` objects. 

1985 A list of tensors, typically values that were captured when 

1986 building a closure for `predicate`. 

1987 predicate: A function decorated with @Defun. 

1988 A function returning a scalar boolean. 

1989 output_types: A list of `tf.DTypes` that has length `>= 1`. 

1990 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

1991 metadata: An optional `string`. Defaults to `""`. 

1992 name: A name for the operation (optional). 

1993 

1994 Returns: 

1995 A `Tensor` of type `variant`. 

1996 """ 

1997 _ctx = _context._context or _context.context() 

1998 tld = _ctx._thread_local_data 

1999 if tld.is_eager: 

2000 try: 

2001 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2002 _ctx, "FilterDataset", name, input_dataset, other_arguments, 

2003 "predicate", predicate, "output_types", output_types, "output_shapes", 

2004 output_shapes, "metadata", metadata) 

2005 return _result 

2006 except _core._NotOkStatusException as e: 

2007 _ops.raise_from_not_ok_status(e, name) 

2008 except _core._FallbackException: 

2009 pass 

2010 try: 

2011 return filter_dataset_eager_fallback( 

2012 input_dataset, other_arguments, predicate=predicate, 

2013 output_types=output_types, output_shapes=output_shapes, 

2014 metadata=metadata, name=name, ctx=_ctx) 

2015 except _core._SymbolicException: 

2016 pass # Add nodes to the TensorFlow graph. 

2017 # Add nodes to the TensorFlow graph. 

2018 if not isinstance(output_types, (list, tuple)): 

2019 raise TypeError( 

2020 "Expected list for 'output_types' argument to " 

2021 "'filter_dataset' Op, not %r." % output_types) 

2022 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2023 if not isinstance(output_shapes, (list, tuple)): 

2024 raise TypeError( 

2025 "Expected list for 'output_shapes' argument to " 

2026 "'filter_dataset' Op, not %r." % output_shapes) 

2027 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2028 if metadata is None: 

2029 metadata = "" 

2030 metadata = _execute.make_str(metadata, "metadata") 

2031 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2032 "FilterDataset", input_dataset=input_dataset, 

2033 other_arguments=other_arguments, predicate=predicate, 

2034 output_types=output_types, 

2035 output_shapes=output_shapes, metadata=metadata, 

2036 name=name) 

2037 _result = _outputs[:] 

2038 if _execute.must_record_gradient(): 

2039 _attrs = ("predicate", _op.get_attr("predicate"), "Targuments", 

2040 _op.get_attr("Targuments"), "output_types", 

2041 _op.get_attr("output_types"), "output_shapes", 

2042 _op.get_attr("output_shapes"), "metadata", 

2043 _op.get_attr("metadata")) 

2044 _inputs_flat = _op.inputs 

2045 _execute.record_gradient( 

2046 "FilterDataset", _inputs_flat, _attrs, _result) 

2047 _result, = _result 

2048 return _result 

2049 

2050FilterDataset = tf_export("raw_ops.FilterDataset")(_ops.to_raw_op(filter_dataset)) 

2051 

2052 

2053def filter_dataset_eager_fallback(input_dataset, other_arguments, predicate, output_types, output_shapes, metadata, name, ctx): 

2054 if not isinstance(output_types, (list, tuple)): 

2055 raise TypeError( 

2056 "Expected list for 'output_types' argument to " 

2057 "'filter_dataset' Op, not %r." % output_types) 

2058 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2059 if not isinstance(output_shapes, (list, tuple)): 

2060 raise TypeError( 

2061 "Expected list for 'output_shapes' argument to " 

2062 "'filter_dataset' Op, not %r." % output_shapes) 

2063 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2064 if metadata is None: 

2065 metadata = "" 

2066 metadata = _execute.make_str(metadata, "metadata") 

2067 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

2068 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

2069 _inputs_flat = [input_dataset] + list(other_arguments) 

2070 _attrs = ("predicate", predicate, "Targuments", _attr_Targuments, 

2071 "output_types", output_types, "output_shapes", output_shapes, "metadata", 

2072 metadata) 

2073 _result = _execute.execute(b"FilterDataset", 1, inputs=_inputs_flat, 

2074 attrs=_attrs, ctx=ctx, name=name) 

2075 if _execute.must_record_gradient(): 

2076 _execute.record_gradient( 

2077 "FilterDataset", _inputs_flat, _attrs, _result) 

2078 _result, = _result 

2079 return _result 

2080 

2081 

2082def finalize_dataset(input_dataset, output_types, output_shapes, has_captured_ref=False, name=None): 

2083 r"""Creates a dataset by applying `tf.data.Options` to `input_dataset`. 

2084 

2085 Args: 

2086 input_dataset: A `Tensor` of type `variant`. 

2087 A variant tensor representing the input dataset. 

2088 output_types: A list of `tf.DTypes` that has length `>= 1`. 

2089 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

2090 has_captured_ref: An optional `bool`. Defaults to `False`. 

2091 name: A name for the operation (optional). 

2092 

2093 Returns: 

2094 A `Tensor` of type `variant`. 

2095 """ 

2096 _ctx = _context._context or _context.context() 

2097 tld = _ctx._thread_local_data 

2098 if tld.is_eager: 

2099 try: 

2100 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2101 _ctx, "FinalizeDataset", name, input_dataset, "has_captured_ref", 

2102 has_captured_ref, "output_types", output_types, "output_shapes", 

2103 output_shapes) 

2104 return _result 

2105 except _core._NotOkStatusException as e: 

2106 _ops.raise_from_not_ok_status(e, name) 

2107 except _core._FallbackException: 

2108 pass 

2109 try: 

2110 return finalize_dataset_eager_fallback( 

2111 input_dataset, has_captured_ref=has_captured_ref, 

2112 output_types=output_types, output_shapes=output_shapes, name=name, 

2113 ctx=_ctx) 

2114 except _core._SymbolicException: 

2115 pass # Add nodes to the TensorFlow graph. 

2116 # Add nodes to the TensorFlow graph. 

2117 if not isinstance(output_types, (list, tuple)): 

2118 raise TypeError( 

2119 "Expected list for 'output_types' argument to " 

2120 "'finalize_dataset' Op, not %r." % output_types) 

2121 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2122 if not isinstance(output_shapes, (list, tuple)): 

2123 raise TypeError( 

2124 "Expected list for 'output_shapes' argument to " 

2125 "'finalize_dataset' Op, not %r." % output_shapes) 

2126 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2127 if has_captured_ref is None: 

2128 has_captured_ref = False 

2129 has_captured_ref = _execute.make_bool(has_captured_ref, "has_captured_ref") 

2130 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2131 "FinalizeDataset", input_dataset=input_dataset, 

2132 output_types=output_types, 

2133 output_shapes=output_shapes, 

2134 has_captured_ref=has_captured_ref, name=name) 

2135 _result = _outputs[:] 

2136 if _execute.must_record_gradient(): 

2137 _attrs = ("has_captured_ref", _op._get_attr_bool("has_captured_ref"), 

2138 "output_types", _op.get_attr("output_types"), "output_shapes", 

2139 _op.get_attr("output_shapes")) 

2140 _inputs_flat = _op.inputs 

2141 _execute.record_gradient( 

2142 "FinalizeDataset", _inputs_flat, _attrs, _result) 

2143 _result, = _result 

2144 return _result 

2145 

2146FinalizeDataset = tf_export("raw_ops.FinalizeDataset")(_ops.to_raw_op(finalize_dataset)) 

2147 

2148 

2149def finalize_dataset_eager_fallback(input_dataset, output_types, output_shapes, has_captured_ref, name, ctx): 

2150 if not isinstance(output_types, (list, tuple)): 

2151 raise TypeError( 

2152 "Expected list for 'output_types' argument to " 

2153 "'finalize_dataset' Op, not %r." % output_types) 

2154 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2155 if not isinstance(output_shapes, (list, tuple)): 

2156 raise TypeError( 

2157 "Expected list for 'output_shapes' argument to " 

2158 "'finalize_dataset' Op, not %r." % output_shapes) 

2159 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2160 if has_captured_ref is None: 

2161 has_captured_ref = False 

2162 has_captured_ref = _execute.make_bool(has_captured_ref, "has_captured_ref") 

2163 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

2164 _inputs_flat = [input_dataset] 

2165 _attrs = ("has_captured_ref", has_captured_ref, "output_types", 

2166 output_types, "output_shapes", output_shapes) 

2167 _result = _execute.execute(b"FinalizeDataset", 1, inputs=_inputs_flat, 

2168 attrs=_attrs, ctx=ctx, name=name) 

2169 if _execute.must_record_gradient(): 

2170 _execute.record_gradient( 

2171 "FinalizeDataset", _inputs_flat, _attrs, _result) 

2172 _result, = _result 

2173 return _result 

2174 

2175 

2176def fixed_length_record_dataset(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, metadata="", name=None): 

2177 r"""Creates a dataset that emits the records from one or more binary files. 

2178 

2179 Args: 

2180 filenames: A `Tensor` of type `string`. 

2181 A scalar or a vector containing the name(s) of the file(s) to be 

2182 read. 

2183 header_bytes: A `Tensor` of type `int64`. 

2184 A scalar representing the number of bytes to skip at the 

2185 beginning of a file. 

2186 record_bytes: A `Tensor` of type `int64`. 

2187 A scalar representing the number of bytes in each record. 

2188 footer_bytes: A `Tensor` of type `int64`. 

2189 A scalar representing the number of bytes to skip at the end 

2190 of a file. 

2191 buffer_size: A `Tensor` of type `int64`. 

2192 A scalar representing the number of bytes to buffer. Must be > 0. 

2193 metadata: An optional `string`. Defaults to `""`. 

2194 name: A name for the operation (optional). 

2195 

2196 Returns: 

2197 A `Tensor` of type `variant`. 

2198 """ 

2199 _ctx = _context._context or _context.context() 

2200 tld = _ctx._thread_local_data 

2201 if tld.is_eager: 

2202 try: 

2203 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2204 _ctx, "FixedLengthRecordDataset", name, filenames, header_bytes, 

2205 record_bytes, footer_bytes, buffer_size, "metadata", metadata) 

2206 return _result 

2207 except _core._NotOkStatusException as e: 

2208 _ops.raise_from_not_ok_status(e, name) 

2209 except _core._FallbackException: 

2210 pass 

2211 try: 

2212 return fixed_length_record_dataset_eager_fallback( 

2213 filenames, header_bytes, record_bytes, footer_bytes, buffer_size, 

2214 metadata=metadata, name=name, ctx=_ctx) 

2215 except _core._SymbolicException: 

2216 pass # Add nodes to the TensorFlow graph. 

2217 # Add nodes to the TensorFlow graph. 

2218 if metadata is None: 

2219 metadata = "" 

2220 metadata = _execute.make_str(metadata, "metadata") 

2221 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2222 "FixedLengthRecordDataset", filenames=filenames, 

2223 header_bytes=header_bytes, 

2224 record_bytes=record_bytes, 

2225 footer_bytes=footer_bytes, 

2226 buffer_size=buffer_size, 

2227 metadata=metadata, name=name) 

2228 _result = _outputs[:] 

2229 if _execute.must_record_gradient(): 

2230 _attrs = ("metadata", _op.get_attr("metadata")) 

2231 _inputs_flat = _op.inputs 

2232 _execute.record_gradient( 

2233 "FixedLengthRecordDataset", _inputs_flat, _attrs, _result) 

2234 _result, = _result 

2235 return _result 

2236 

2237FixedLengthRecordDataset = tf_export("raw_ops.FixedLengthRecordDataset")(_ops.to_raw_op(fixed_length_record_dataset)) 

2238 

2239 

2240def fixed_length_record_dataset_eager_fallback(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, metadata, name, ctx): 

2241 if metadata is None: 

2242 metadata = "" 

2243 metadata = _execute.make_str(metadata, "metadata") 

2244 filenames = _ops.convert_to_tensor(filenames, _dtypes.string) 

2245 header_bytes = _ops.convert_to_tensor(header_bytes, _dtypes.int64) 

2246 record_bytes = _ops.convert_to_tensor(record_bytes, _dtypes.int64) 

2247 footer_bytes = _ops.convert_to_tensor(footer_bytes, _dtypes.int64) 

2248 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

2249 _inputs_flat = [filenames, header_bytes, record_bytes, footer_bytes, buffer_size] 

2250 _attrs = ("metadata", metadata) 

2251 _result = _execute.execute(b"FixedLengthRecordDataset", 1, 

2252 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

2253 name=name) 

2254 if _execute.must_record_gradient(): 

2255 _execute.record_gradient( 

2256 "FixedLengthRecordDataset", _inputs_flat, _attrs, _result) 

2257 _result, = _result 

2258 return _result 

2259 

2260 

2261def fixed_length_record_dataset_v2(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, compression_type, metadata="", name=None): 

2262 r"""TODO: add doc. 

2263 

2264 Args: 

2265 filenames: A `Tensor` of type `string`. 

2266 header_bytes: A `Tensor` of type `int64`. 

2267 record_bytes: A `Tensor` of type `int64`. 

2268 footer_bytes: A `Tensor` of type `int64`. 

2269 buffer_size: A `Tensor` of type `int64`. 

2270 compression_type: A `Tensor` of type `string`. 

2271 metadata: An optional `string`. Defaults to `""`. 

2272 name: A name for the operation (optional). 

2273 

2274 Returns: 

2275 A `Tensor` of type `variant`. 

2276 """ 

2277 _ctx = _context._context or _context.context() 

2278 tld = _ctx._thread_local_data 

2279 if tld.is_eager: 

2280 try: 

2281 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2282 _ctx, "FixedLengthRecordDatasetV2", name, filenames, header_bytes, 

2283 record_bytes, footer_bytes, buffer_size, compression_type, "metadata", 

2284 metadata) 

2285 return _result 

2286 except _core._NotOkStatusException as e: 

2287 _ops.raise_from_not_ok_status(e, name) 

2288 except _core._FallbackException: 

2289 pass 

2290 try: 

2291 return fixed_length_record_dataset_v2_eager_fallback( 

2292 filenames, header_bytes, record_bytes, footer_bytes, buffer_size, 

2293 compression_type, metadata=metadata, name=name, ctx=_ctx) 

2294 except _core._SymbolicException: 

2295 pass # Add nodes to the TensorFlow graph. 

2296 # Add nodes to the TensorFlow graph. 

2297 if metadata is None: 

2298 metadata = "" 

2299 metadata = _execute.make_str(metadata, "metadata") 

2300 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2301 "FixedLengthRecordDatasetV2", filenames=filenames, 

2302 header_bytes=header_bytes, 

2303 record_bytes=record_bytes, 

2304 footer_bytes=footer_bytes, 

2305 buffer_size=buffer_size, 

2306 compression_type=compression_type, 

2307 metadata=metadata, name=name) 

2308 _result = _outputs[:] 

2309 if _execute.must_record_gradient(): 

2310 _attrs = ("metadata", _op.get_attr("metadata")) 

2311 _inputs_flat = _op.inputs 

2312 _execute.record_gradient( 

2313 "FixedLengthRecordDatasetV2", _inputs_flat, _attrs, _result) 

2314 _result, = _result 

2315 return _result 

2316 

2317FixedLengthRecordDatasetV2 = tf_export("raw_ops.FixedLengthRecordDatasetV2")(_ops.to_raw_op(fixed_length_record_dataset_v2)) 

2318 

2319 

2320def fixed_length_record_dataset_v2_eager_fallback(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, compression_type, metadata, name, ctx): 

2321 if metadata is None: 

2322 metadata = "" 

2323 metadata = _execute.make_str(metadata, "metadata") 

2324 filenames = _ops.convert_to_tensor(filenames, _dtypes.string) 

2325 header_bytes = _ops.convert_to_tensor(header_bytes, _dtypes.int64) 

2326 record_bytes = _ops.convert_to_tensor(record_bytes, _dtypes.int64) 

2327 footer_bytes = _ops.convert_to_tensor(footer_bytes, _dtypes.int64) 

2328 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

2329 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string) 

2330 _inputs_flat = [filenames, header_bytes, record_bytes, footer_bytes, buffer_size, compression_type] 

2331 _attrs = ("metadata", metadata) 

2332 _result = _execute.execute(b"FixedLengthRecordDatasetV2", 1, 

2333 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

2334 name=name) 

2335 if _execute.must_record_gradient(): 

2336 _execute.record_gradient( 

2337 "FixedLengthRecordDatasetV2", _inputs_flat, _attrs, _result) 

2338 _result, = _result 

2339 return _result 

2340 

2341 

2342def flat_map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, metadata="", name=None): 

2343 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

2344 

2345 Unlike MapDataset, the `f` in FlatMapDataset is expected to return a 

2346 Dataset variant, and FlatMapDataset will flatten successive results 

2347 into a single Dataset. 

2348 

2349 Args: 

2350 input_dataset: A `Tensor` of type `variant`. 

2351 other_arguments: A list of `Tensor` objects. 

2352 f: A function decorated with @Defun. 

2353 A function mapping elements of `input_dataset`, concatenated with 

2354 `other_arguments`, to a Dataset variant that contains elements matching 

2355 `output_types` and `output_shapes`. 

2356 output_types: A list of `tf.DTypes` that has length `>= 1`. 

2357 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

2358 metadata: An optional `string`. Defaults to `""`. 

2359 name: A name for the operation (optional). 

2360 

2361 Returns: 

2362 A `Tensor` of type `variant`. 

2363 """ 

2364 _ctx = _context._context or _context.context() 

2365 tld = _ctx._thread_local_data 

2366 if tld.is_eager: 

2367 try: 

2368 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2369 _ctx, "FlatMapDataset", name, input_dataset, other_arguments, "f", f, 

2370 "output_types", output_types, "output_shapes", output_shapes, 

2371 "metadata", metadata) 

2372 return _result 

2373 except _core._NotOkStatusException as e: 

2374 _ops.raise_from_not_ok_status(e, name) 

2375 except _core._FallbackException: 

2376 pass 

2377 try: 

2378 return flat_map_dataset_eager_fallback( 

2379 input_dataset, other_arguments, f=f, output_types=output_types, 

2380 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

2381 except _core._SymbolicException: 

2382 pass # Add nodes to the TensorFlow graph. 

2383 # Add nodes to the TensorFlow graph. 

2384 if not isinstance(output_types, (list, tuple)): 

2385 raise TypeError( 

2386 "Expected list for 'output_types' argument to " 

2387 "'flat_map_dataset' Op, not %r." % output_types) 

2388 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2389 if not isinstance(output_shapes, (list, tuple)): 

2390 raise TypeError( 

2391 "Expected list for 'output_shapes' argument to " 

2392 "'flat_map_dataset' Op, not %r." % output_shapes) 

2393 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2394 if metadata is None: 

2395 metadata = "" 

2396 metadata = _execute.make_str(metadata, "metadata") 

2397 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2398 "FlatMapDataset", input_dataset=input_dataset, 

2399 other_arguments=other_arguments, f=f, 

2400 output_types=output_types, 

2401 output_shapes=output_shapes, metadata=metadata, 

2402 name=name) 

2403 _result = _outputs[:] 

2404 if _execute.must_record_gradient(): 

2405 _attrs = ("f", _op.get_attr("f"), "Targuments", 

2406 _op.get_attr("Targuments"), "output_types", 

2407 _op.get_attr("output_types"), "output_shapes", 

2408 _op.get_attr("output_shapes"), "metadata", 

2409 _op.get_attr("metadata")) 

2410 _inputs_flat = _op.inputs 

2411 _execute.record_gradient( 

2412 "FlatMapDataset", _inputs_flat, _attrs, _result) 

2413 _result, = _result 

2414 return _result 

2415 

2416FlatMapDataset = tf_export("raw_ops.FlatMapDataset")(_ops.to_raw_op(flat_map_dataset)) 

2417 

2418 

2419def flat_map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, metadata, name, ctx): 

2420 if not isinstance(output_types, (list, tuple)): 

2421 raise TypeError( 

2422 "Expected list for 'output_types' argument to " 

2423 "'flat_map_dataset' Op, not %r." % output_types) 

2424 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2425 if not isinstance(output_shapes, (list, tuple)): 

2426 raise TypeError( 

2427 "Expected list for 'output_shapes' argument to " 

2428 "'flat_map_dataset' Op, not %r." % output_shapes) 

2429 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2430 if metadata is None: 

2431 metadata = "" 

2432 metadata = _execute.make_str(metadata, "metadata") 

2433 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

2434 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

2435 _inputs_flat = [input_dataset] + list(other_arguments) 

2436 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

2437 output_types, "output_shapes", output_shapes, "metadata", metadata) 

2438 _result = _execute.execute(b"FlatMapDataset", 1, inputs=_inputs_flat, 

2439 attrs=_attrs, ctx=ctx, name=name) 

2440 if _execute.must_record_gradient(): 

2441 _execute.record_gradient( 

2442 "FlatMapDataset", _inputs_flat, _attrs, _result) 

2443 _result, = _result 

2444 return _result 

2445 

2446 

2447def generator_dataset(init_func_other_args, next_func_other_args, finalize_func_other_args, init_func, next_func, finalize_func, output_types, output_shapes, metadata="", name=None): 

2448 r"""Creates a dataset that invokes a function to generate elements. 

2449 

2450 Args: 

2451 init_func_other_args: A list of `Tensor` objects. 

2452 next_func_other_args: A list of `Tensor` objects. 

2453 finalize_func_other_args: A list of `Tensor` objects. 

2454 init_func: A function decorated with @Defun. 

2455 next_func: A function decorated with @Defun. 

2456 finalize_func: A function decorated with @Defun. 

2457 output_types: A list of `tf.DTypes` that has length `>= 1`. 

2458 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

2459 metadata: An optional `string`. Defaults to `""`. 

2460 name: A name for the operation (optional). 

2461 

2462 Returns: 

2463 A `Tensor` of type `variant`. 

2464 """ 

2465 _ctx = _context._context or _context.context() 

2466 tld = _ctx._thread_local_data 

2467 if tld.is_eager: 

2468 try: 

2469 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2470 _ctx, "GeneratorDataset", name, init_func_other_args, 

2471 next_func_other_args, finalize_func_other_args, "init_func", 

2472 init_func, "next_func", next_func, "finalize_func", finalize_func, 

2473 "output_types", output_types, "output_shapes", output_shapes, 

2474 "metadata", metadata) 

2475 return _result 

2476 except _core._NotOkStatusException as e: 

2477 _ops.raise_from_not_ok_status(e, name) 

2478 except _core._FallbackException: 

2479 pass 

2480 try: 

2481 return generator_dataset_eager_fallback( 

2482 init_func_other_args, next_func_other_args, 

2483 finalize_func_other_args, init_func=init_func, next_func=next_func, 

2484 finalize_func=finalize_func, output_types=output_types, 

2485 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

2486 except _core._SymbolicException: 

2487 pass # Add nodes to the TensorFlow graph. 

2488 # Add nodes to the TensorFlow graph. 

2489 if not isinstance(output_types, (list, tuple)): 

2490 raise TypeError( 

2491 "Expected list for 'output_types' argument to " 

2492 "'generator_dataset' Op, not %r." % output_types) 

2493 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2494 if not isinstance(output_shapes, (list, tuple)): 

2495 raise TypeError( 

2496 "Expected list for 'output_shapes' argument to " 

2497 "'generator_dataset' Op, not %r." % output_shapes) 

2498 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2499 if metadata is None: 

2500 metadata = "" 

2501 metadata = _execute.make_str(metadata, "metadata") 

2502 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2503 "GeneratorDataset", init_func_other_args=init_func_other_args, 

2504 next_func_other_args=next_func_other_args, 

2505 finalize_func_other_args=finalize_func_other_args, 

2506 init_func=init_func, next_func=next_func, 

2507 finalize_func=finalize_func, 

2508 output_types=output_types, 

2509 output_shapes=output_shapes, metadata=metadata, 

2510 name=name) 

2511 _result = _outputs[:] 

2512 if _execute.must_record_gradient(): 

2513 _attrs = ("init_func", _op.get_attr("init_func"), "next_func", 

2514 _op.get_attr("next_func"), "finalize_func", 

2515 _op.get_attr("finalize_func"), "Tinit_func_args", 

2516 _op.get_attr("Tinit_func_args"), "Tnext_func_args", 

2517 _op.get_attr("Tnext_func_args"), "Tfinalize_func_args", 

2518 _op.get_attr("Tfinalize_func_args"), "output_types", 

2519 _op.get_attr("output_types"), "output_shapes", 

2520 _op.get_attr("output_shapes"), "metadata", 

2521 _op.get_attr("metadata")) 

2522 _inputs_flat = _op.inputs 

2523 _execute.record_gradient( 

2524 "GeneratorDataset", _inputs_flat, _attrs, _result) 

2525 _result, = _result 

2526 return _result 

2527 

2528GeneratorDataset = tf_export("raw_ops.GeneratorDataset")(_ops.to_raw_op(generator_dataset)) 

2529 

2530 

2531def generator_dataset_eager_fallback(init_func_other_args, next_func_other_args, finalize_func_other_args, init_func, next_func, finalize_func, output_types, output_shapes, metadata, name, ctx): 

2532 if not isinstance(output_types, (list, tuple)): 

2533 raise TypeError( 

2534 "Expected list for 'output_types' argument to " 

2535 "'generator_dataset' Op, not %r." % output_types) 

2536 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2537 if not isinstance(output_shapes, (list, tuple)): 

2538 raise TypeError( 

2539 "Expected list for 'output_shapes' argument to " 

2540 "'generator_dataset' Op, not %r." % output_shapes) 

2541 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2542 if metadata is None: 

2543 metadata = "" 

2544 metadata = _execute.make_str(metadata, "metadata") 

2545 _attr_Tinit_func_args, init_func_other_args = _execute.convert_to_mixed_eager_tensors(init_func_other_args, ctx) 

2546 _attr_Tnext_func_args, next_func_other_args = _execute.convert_to_mixed_eager_tensors(next_func_other_args, ctx) 

2547 _attr_Tfinalize_func_args, finalize_func_other_args = _execute.convert_to_mixed_eager_tensors(finalize_func_other_args, ctx) 

2548 _inputs_flat = list(init_func_other_args) + list(next_func_other_args) + list(finalize_func_other_args) 

2549 _attrs = ("init_func", init_func, "next_func", next_func, "finalize_func", 

2550 finalize_func, "Tinit_func_args", _attr_Tinit_func_args, "Tnext_func_args", 

2551 _attr_Tnext_func_args, "Tfinalize_func_args", _attr_Tfinalize_func_args, 

2552 "output_types", output_types, "output_shapes", output_shapes, "metadata", 

2553 metadata) 

2554 _result = _execute.execute(b"GeneratorDataset", 1, inputs=_inputs_flat, 

2555 attrs=_attrs, ctx=ctx, name=name) 

2556 if _execute.must_record_gradient(): 

2557 _execute.record_gradient( 

2558 "GeneratorDataset", _inputs_flat, _attrs, _result) 

2559 _result, = _result 

2560 return _result 

2561 

2562 

2563def get_options(input_dataset, name=None): 

2564 r"""Returns the `tf.data.Options` attached to `input_dataset`. 

2565 

2566 Args: 

2567 input_dataset: A `Tensor` of type `variant`. 

2568 A variant tensor representing the input dataset. 

2569 name: A name for the operation (optional). 

2570 

2571 Returns: 

2572 A `Tensor` of type `string`. 

2573 """ 

2574 _ctx = _context._context or _context.context() 

2575 tld = _ctx._thread_local_data 

2576 if tld.is_eager: 

2577 try: 

2578 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2579 _ctx, "GetOptions", name, input_dataset) 

2580 return _result 

2581 except _core._NotOkStatusException as e: 

2582 _ops.raise_from_not_ok_status(e, name) 

2583 except _core._FallbackException: 

2584 pass 

2585 try: 

2586 return get_options_eager_fallback( 

2587 input_dataset, name=name, ctx=_ctx) 

2588 except _core._SymbolicException: 

2589 pass # Add nodes to the TensorFlow graph. 

2590 # Add nodes to the TensorFlow graph. 

2591 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2592 "GetOptions", input_dataset=input_dataset, name=name) 

2593 _result = _outputs[:] 

2594 if _execute.must_record_gradient(): 

2595 _attrs = () 

2596 _inputs_flat = _op.inputs 

2597 _execute.record_gradient( 

2598 "GetOptions", _inputs_flat, _attrs, _result) 

2599 _result, = _result 

2600 return _result 

2601 

2602GetOptions = tf_export("raw_ops.GetOptions")(_ops.to_raw_op(get_options)) 

2603 

2604 

2605def get_options_eager_fallback(input_dataset, name, ctx): 

2606 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

2607 _inputs_flat = [input_dataset] 

2608 _attrs = None 

2609 _result = _execute.execute(b"GetOptions", 1, inputs=_inputs_flat, 

2610 attrs=_attrs, ctx=ctx, name=name) 

2611 if _execute.must_record_gradient(): 

2612 _execute.record_gradient( 

2613 "GetOptions", _inputs_flat, _attrs, _result) 

2614 _result, = _result 

2615 return _result 

2616 

2617 

2618def interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, f, output_types, output_shapes, metadata="", name=None): 

2619 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

2620 

2621 Unlike MapDataset, the `f` in InterleaveDataset is expected to return 

2622 a Dataset variant, and InterleaveDataset will flatten successive 

2623 results into a single Dataset. Unlike FlatMapDataset, 

2624 InterleaveDataset will interleave sequences of up to `block_length` 

2625 consecutive elements from `cycle_length` input elements. 

2626 

2627 Args: 

2628 input_dataset: A `Tensor` of type `variant`. 

2629 other_arguments: A list of `Tensor` objects. 

2630 cycle_length: A `Tensor` of type `int64`. 

2631 block_length: A `Tensor` of type `int64`. 

2632 f: A function decorated with @Defun. 

2633 A function mapping elements of `input_dataset`, concatenated with 

2634 `other_arguments`, to a Dataset variant that contains elements matching 

2635 `output_types` and `output_shapes`. 

2636 output_types: A list of `tf.DTypes` that has length `>= 1`. 

2637 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

2638 metadata: An optional `string`. Defaults to `""`. 

2639 name: A name for the operation (optional). 

2640 

2641 Returns: 

2642 A `Tensor` of type `variant`. 

2643 """ 

2644 _ctx = _context._context or _context.context() 

2645 tld = _ctx._thread_local_data 

2646 if tld.is_eager: 

2647 try: 

2648 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2649 _ctx, "InterleaveDataset", name, input_dataset, other_arguments, 

2650 cycle_length, block_length, "f", f, "output_types", output_types, 

2651 "output_shapes", output_shapes, "metadata", metadata) 

2652 return _result 

2653 except _core._NotOkStatusException as e: 

2654 _ops.raise_from_not_ok_status(e, name) 

2655 except _core._FallbackException: 

2656 pass 

2657 try: 

2658 return interleave_dataset_eager_fallback( 

2659 input_dataset, other_arguments, cycle_length, block_length, f=f, 

2660 output_types=output_types, output_shapes=output_shapes, 

2661 metadata=metadata, name=name, ctx=_ctx) 

2662 except _core._SymbolicException: 

2663 pass # Add nodes to the TensorFlow graph. 

2664 # Add nodes to the TensorFlow graph. 

2665 if not isinstance(output_types, (list, tuple)): 

2666 raise TypeError( 

2667 "Expected list for 'output_types' argument to " 

2668 "'interleave_dataset' Op, not %r." % output_types) 

2669 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2670 if not isinstance(output_shapes, (list, tuple)): 

2671 raise TypeError( 

2672 "Expected list for 'output_shapes' argument to " 

2673 "'interleave_dataset' Op, not %r." % output_shapes) 

2674 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2675 if metadata is None: 

2676 metadata = "" 

2677 metadata = _execute.make_str(metadata, "metadata") 

2678 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2679 "InterleaveDataset", input_dataset=input_dataset, 

2680 other_arguments=other_arguments, 

2681 cycle_length=cycle_length, 

2682 block_length=block_length, f=f, 

2683 output_types=output_types, 

2684 output_shapes=output_shapes, metadata=metadata, 

2685 name=name) 

2686 _result = _outputs[:] 

2687 if _execute.must_record_gradient(): 

2688 _attrs = ("f", _op.get_attr("f"), "Targuments", 

2689 _op.get_attr("Targuments"), "output_types", 

2690 _op.get_attr("output_types"), "output_shapes", 

2691 _op.get_attr("output_shapes"), "metadata", 

2692 _op.get_attr("metadata")) 

2693 _inputs_flat = _op.inputs 

2694 _execute.record_gradient( 

2695 "InterleaveDataset", _inputs_flat, _attrs, _result) 

2696 _result, = _result 

2697 return _result 

2698 

2699InterleaveDataset = tf_export("raw_ops.InterleaveDataset")(_ops.to_raw_op(interleave_dataset)) 

2700 

2701 

2702def interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, f, output_types, output_shapes, metadata, name, ctx): 

2703 if not isinstance(output_types, (list, tuple)): 

2704 raise TypeError( 

2705 "Expected list for 'output_types' argument to " 

2706 "'interleave_dataset' Op, not %r." % output_types) 

2707 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2708 if not isinstance(output_shapes, (list, tuple)): 

2709 raise TypeError( 

2710 "Expected list for 'output_shapes' argument to " 

2711 "'interleave_dataset' Op, not %r." % output_shapes) 

2712 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2713 if metadata is None: 

2714 metadata = "" 

2715 metadata = _execute.make_str(metadata, "metadata") 

2716 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

2717 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

2718 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64) 

2719 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64) 

2720 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length] 

2721 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

2722 output_types, "output_shapes", output_shapes, "metadata", metadata) 

2723 _result = _execute.execute(b"InterleaveDataset", 1, inputs=_inputs_flat, 

2724 attrs=_attrs, ctx=ctx, name=name) 

2725 if _execute.must_record_gradient(): 

2726 _execute.record_gradient( 

2727 "InterleaveDataset", _inputs_flat, _attrs, _result) 

2728 _result, = _result 

2729 return _result 

2730 

2731 

2732def iterator(shared_name, container, output_types, output_shapes, name=None): 

2733 r"""A container for an iterator resource. 

2734 

2735 Args: 

2736 shared_name: A `string`. 

2737 container: A `string`. 

2738 output_types: A list of `tf.DTypes` that has length `>= 1`. 

2739 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

2740 name: A name for the operation (optional). 

2741 

2742 Returns: 

2743 A `Tensor` of type `resource`. 

2744 """ 

2745 _ctx = _context._context or _context.context() 

2746 tld = _ctx._thread_local_data 

2747 if tld.is_eager: 

2748 try: 

2749 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2750 _ctx, "Iterator", name, "shared_name", shared_name, "container", 

2751 container, "output_types", output_types, "output_shapes", 

2752 output_shapes) 

2753 return _result 

2754 except _core._NotOkStatusException as e: 

2755 _ops.raise_from_not_ok_status(e, name) 

2756 except _core._FallbackException: 

2757 pass 

2758 try: 

2759 return iterator_eager_fallback( 

2760 shared_name=shared_name, container=container, 

2761 output_types=output_types, output_shapes=output_shapes, name=name, 

2762 ctx=_ctx) 

2763 except _core._SymbolicException: 

2764 pass # Add nodes to the TensorFlow graph. 

2765 # Add nodes to the TensorFlow graph. 

2766 shared_name = _execute.make_str(shared_name, "shared_name") 

2767 container = _execute.make_str(container, "container") 

2768 if not isinstance(output_types, (list, tuple)): 

2769 raise TypeError( 

2770 "Expected list for 'output_types' argument to " 

2771 "'iterator' Op, not %r." % output_types) 

2772 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2773 if not isinstance(output_shapes, (list, tuple)): 

2774 raise TypeError( 

2775 "Expected list for 'output_shapes' argument to " 

2776 "'iterator' Op, not %r." % output_shapes) 

2777 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2778 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2779 "Iterator", shared_name=shared_name, container=container, 

2780 output_types=output_types, output_shapes=output_shapes, 

2781 name=name) 

2782 _result = _outputs[:] 

2783 if _execute.must_record_gradient(): 

2784 _attrs = ("shared_name", _op.get_attr("shared_name"), "container", 

2785 _op.get_attr("container"), "output_types", 

2786 _op.get_attr("output_types"), "output_shapes", 

2787 _op.get_attr("output_shapes")) 

2788 _inputs_flat = _op.inputs 

2789 _execute.record_gradient( 

2790 "Iterator", _inputs_flat, _attrs, _result) 

2791 _result, = _result 

2792 return _result 

2793 

2794Iterator = tf_export("raw_ops.Iterator")(_ops.to_raw_op(iterator)) 

2795 

2796 

2797def iterator_eager_fallback(shared_name, container, output_types, output_shapes, name, ctx): 

2798 shared_name = _execute.make_str(shared_name, "shared_name") 

2799 container = _execute.make_str(container, "container") 

2800 if not isinstance(output_types, (list, tuple)): 

2801 raise TypeError( 

2802 "Expected list for 'output_types' argument to " 

2803 "'iterator' Op, not %r." % output_types) 

2804 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2805 if not isinstance(output_shapes, (list, tuple)): 

2806 raise TypeError( 

2807 "Expected list for 'output_shapes' argument to " 

2808 "'iterator' Op, not %r." % output_shapes) 

2809 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2810 _inputs_flat = [] 

2811 _attrs = ("shared_name", shared_name, "container", container, 

2812 "output_types", output_types, "output_shapes", output_shapes) 

2813 _result = _execute.execute(b"Iterator", 1, inputs=_inputs_flat, 

2814 attrs=_attrs, ctx=ctx, name=name) 

2815 if _execute.must_record_gradient(): 

2816 _execute.record_gradient( 

2817 "Iterator", _inputs_flat, _attrs, _result) 

2818 _result, = _result 

2819 return _result 

2820 

2821 

2822def iterator_from_string_handle(string_handle, output_types=[], output_shapes=[], name=None): 

2823 r"""Converts the given string representing a handle to an iterator to a resource. 

2824 

2825 Args: 

2826 string_handle: A `Tensor` of type `string`. 

2827 A string representation of the given handle. 

2828 output_types: An optional list of `tf.DTypes`. Defaults to `[]`. 

2829 If specified, defines the type of each tuple component in an 

2830 element produced by the resulting iterator. 

2831 output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`. 

2832 If specified, defines the shape of each tuple component in an 

2833 element produced by the resulting iterator. 

2834 name: A name for the operation (optional). 

2835 

2836 Returns: 

2837 A `Tensor` of type `resource`. 

2838 """ 

2839 _ctx = _context._context or _context.context() 

2840 tld = _ctx._thread_local_data 

2841 if tld.is_eager: 

2842 try: 

2843 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2844 _ctx, "IteratorFromStringHandle", name, string_handle, "output_types", 

2845 output_types, "output_shapes", output_shapes) 

2846 return _result 

2847 except _core._NotOkStatusException as e: 

2848 _ops.raise_from_not_ok_status(e, name) 

2849 except _core._FallbackException: 

2850 pass 

2851 try: 

2852 return iterator_from_string_handle_eager_fallback( 

2853 string_handle, output_types=output_types, 

2854 output_shapes=output_shapes, name=name, ctx=_ctx) 

2855 except _core._SymbolicException: 

2856 pass # Add nodes to the TensorFlow graph. 

2857 # Add nodes to the TensorFlow graph. 

2858 if output_types is None: 

2859 output_types = [] 

2860 if not isinstance(output_types, (list, tuple)): 

2861 raise TypeError( 

2862 "Expected list for 'output_types' argument to " 

2863 "'iterator_from_string_handle' Op, not %r." % output_types) 

2864 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2865 if output_shapes is None: 

2866 output_shapes = [] 

2867 if not isinstance(output_shapes, (list, tuple)): 

2868 raise TypeError( 

2869 "Expected list for 'output_shapes' argument to " 

2870 "'iterator_from_string_handle' Op, not %r." % output_shapes) 

2871 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2872 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2873 "IteratorFromStringHandle", string_handle=string_handle, 

2874 output_types=output_types, 

2875 output_shapes=output_shapes, name=name) 

2876 _result = _outputs[:] 

2877 if _execute.must_record_gradient(): 

2878 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

2879 _op.get_attr("output_shapes")) 

2880 _inputs_flat = _op.inputs 

2881 _execute.record_gradient( 

2882 "IteratorFromStringHandle", _inputs_flat, _attrs, _result) 

2883 _result, = _result 

2884 return _result 

2885 

2886IteratorFromStringHandle = tf_export("raw_ops.IteratorFromStringHandle")(_ops.to_raw_op(iterator_from_string_handle)) 

2887 

2888 

2889def iterator_from_string_handle_eager_fallback(string_handle, output_types, output_shapes, name, ctx): 

2890 if output_types is None: 

2891 output_types = [] 

2892 if not isinstance(output_types, (list, tuple)): 

2893 raise TypeError( 

2894 "Expected list for 'output_types' argument to " 

2895 "'iterator_from_string_handle' Op, not %r." % output_types) 

2896 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2897 if output_shapes is None: 

2898 output_shapes = [] 

2899 if not isinstance(output_shapes, (list, tuple)): 

2900 raise TypeError( 

2901 "Expected list for 'output_shapes' argument to " 

2902 "'iterator_from_string_handle' Op, not %r." % output_shapes) 

2903 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2904 string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string) 

2905 _inputs_flat = [string_handle] 

2906 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

2907 _result = _execute.execute(b"IteratorFromStringHandle", 1, 

2908 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

2909 name=name) 

2910 if _execute.must_record_gradient(): 

2911 _execute.record_gradient( 

2912 "IteratorFromStringHandle", _inputs_flat, _attrs, _result) 

2913 _result, = _result 

2914 return _result 

2915 

2916 

2917def iterator_from_string_handle_v2(string_handle, output_types=[], output_shapes=[], name=None): 

2918 r"""TODO: add doc. 

2919 

2920 Args: 

2921 string_handle: A `Tensor` of type `string`. 

2922 output_types: An optional list of `tf.DTypes`. Defaults to `[]`. 

2923 output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`. 

2924 name: A name for the operation (optional). 

2925 

2926 Returns: 

2927 A `Tensor` of type `resource`. 

2928 """ 

2929 _ctx = _context._context or _context.context() 

2930 tld = _ctx._thread_local_data 

2931 if tld.is_eager: 

2932 try: 

2933 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

2934 _ctx, "IteratorFromStringHandleV2", name, string_handle, 

2935 "output_types", output_types, "output_shapes", output_shapes) 

2936 return _result 

2937 except _core._NotOkStatusException as e: 

2938 _ops.raise_from_not_ok_status(e, name) 

2939 except _core._FallbackException: 

2940 pass 

2941 try: 

2942 return iterator_from_string_handle_v2_eager_fallback( 

2943 string_handle, output_types=output_types, 

2944 output_shapes=output_shapes, name=name, ctx=_ctx) 

2945 except _core._SymbolicException: 

2946 pass # Add nodes to the TensorFlow graph. 

2947 # Add nodes to the TensorFlow graph. 

2948 if output_types is None: 

2949 output_types = [] 

2950 if not isinstance(output_types, (list, tuple)): 

2951 raise TypeError( 

2952 "Expected list for 'output_types' argument to " 

2953 "'iterator_from_string_handle_v2' Op, not %r." % output_types) 

2954 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2955 if output_shapes is None: 

2956 output_shapes = [] 

2957 if not isinstance(output_shapes, (list, tuple)): 

2958 raise TypeError( 

2959 "Expected list for 'output_shapes' argument to " 

2960 "'iterator_from_string_handle_v2' Op, not %r." % output_shapes) 

2961 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2962 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

2963 "IteratorFromStringHandleV2", string_handle=string_handle, 

2964 output_types=output_types, 

2965 output_shapes=output_shapes, name=name) 

2966 _result = _outputs[:] 

2967 if _execute.must_record_gradient(): 

2968 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

2969 _op.get_attr("output_shapes")) 

2970 _inputs_flat = _op.inputs 

2971 _execute.record_gradient( 

2972 "IteratorFromStringHandleV2", _inputs_flat, _attrs, _result) 

2973 _result, = _result 

2974 return _result 

2975 

2976IteratorFromStringHandleV2 = tf_export("raw_ops.IteratorFromStringHandleV2")(_ops.to_raw_op(iterator_from_string_handle_v2)) 

2977 

2978 

2979def iterator_from_string_handle_v2_eager_fallback(string_handle, output_types, output_shapes, name, ctx): 

2980 if output_types is None: 

2981 output_types = [] 

2982 if not isinstance(output_types, (list, tuple)): 

2983 raise TypeError( 

2984 "Expected list for 'output_types' argument to " 

2985 "'iterator_from_string_handle_v2' Op, not %r." % output_types) 

2986 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

2987 if output_shapes is None: 

2988 output_shapes = [] 

2989 if not isinstance(output_shapes, (list, tuple)): 

2990 raise TypeError( 

2991 "Expected list for 'output_shapes' argument to " 

2992 "'iterator_from_string_handle_v2' Op, not %r." % output_shapes) 

2993 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

2994 string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string) 

2995 _inputs_flat = [string_handle] 

2996 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

2997 _result = _execute.execute(b"IteratorFromStringHandleV2", 1, 

2998 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

2999 name=name) 

3000 if _execute.must_record_gradient(): 

3001 _execute.record_gradient( 

3002 "IteratorFromStringHandleV2", _inputs_flat, _attrs, _result) 

3003 _result, = _result 

3004 return _result 

3005 

3006 

3007def iterator_get_next(iterator, output_types, output_shapes, name=None): 

3008 r"""Gets the next output from the given iterator . 

3009 

3010 Args: 

3011 iterator: A `Tensor` of type `resource`. 

3012 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3013 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3014 name: A name for the operation (optional). 

3015 

3016 Returns: 

3017 A list of `Tensor` objects of type `output_types`. 

3018 """ 

3019 _ctx = _context._context or _context.context() 

3020 tld = _ctx._thread_local_data 

3021 if tld.is_eager: 

3022 try: 

3023 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3024 _ctx, "IteratorGetNext", name, iterator, "output_types", output_types, 

3025 "output_shapes", output_shapes) 

3026 return _result 

3027 except _core._NotOkStatusException as e: 

3028 _ops.raise_from_not_ok_status(e, name) 

3029 except _core._FallbackException: 

3030 pass 

3031 try: 

3032 return iterator_get_next_eager_fallback( 

3033 iterator, output_types=output_types, output_shapes=output_shapes, 

3034 name=name, ctx=_ctx) 

3035 except _core._SymbolicException: 

3036 pass # Add nodes to the TensorFlow graph. 

3037 # Add nodes to the TensorFlow graph. 

3038 if not isinstance(output_types, (list, tuple)): 

3039 raise TypeError( 

3040 "Expected list for 'output_types' argument to " 

3041 "'iterator_get_next' Op, not %r." % output_types) 

3042 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3043 if not isinstance(output_shapes, (list, tuple)): 

3044 raise TypeError( 

3045 "Expected list for 'output_shapes' argument to " 

3046 "'iterator_get_next' Op, not %r." % output_shapes) 

3047 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3048 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3049 "IteratorGetNext", iterator=iterator, output_types=output_types, 

3050 output_shapes=output_shapes, name=name) 

3051 _result = _outputs[:] 

3052 if not _result: 

3053 return _op 

3054 if _execute.must_record_gradient(): 

3055 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

3056 _op.get_attr("output_shapes")) 

3057 _inputs_flat = _op.inputs 

3058 _execute.record_gradient( 

3059 "IteratorGetNext", _inputs_flat, _attrs, _result) 

3060 return _result 

3061 

3062IteratorGetNext = tf_export("raw_ops.IteratorGetNext")(_ops.to_raw_op(iterator_get_next)) 

3063 

3064 

3065def iterator_get_next_eager_fallback(iterator, output_types, output_shapes, name, ctx): 

3066 if not isinstance(output_types, (list, tuple)): 

3067 raise TypeError( 

3068 "Expected list for 'output_types' argument to " 

3069 "'iterator_get_next' Op, not %r." % output_types) 

3070 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3071 if not isinstance(output_shapes, (list, tuple)): 

3072 raise TypeError( 

3073 "Expected list for 'output_shapes' argument to " 

3074 "'iterator_get_next' Op, not %r." % output_shapes) 

3075 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3076 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource) 

3077 _inputs_flat = [iterator] 

3078 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

3079 _result = _execute.execute(b"IteratorGetNext", len(output_types), 

3080 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3081 name=name) 

3082 if _execute.must_record_gradient(): 

3083 _execute.record_gradient( 

3084 "IteratorGetNext", _inputs_flat, _attrs, _result) 

3085 return _result 

3086 

3087 

3088def iterator_get_next_as_optional(iterator, output_types, output_shapes, name=None): 

3089 r"""Gets the next output from the given iterator as an Optional variant. 

3090 

3091 Args: 

3092 iterator: A `Tensor` of type `resource`. 

3093 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3094 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3095 name: A name for the operation (optional). 

3096 

3097 Returns: 

3098 A `Tensor` of type `variant`. 

3099 """ 

3100 _ctx = _context._context or _context.context() 

3101 tld = _ctx._thread_local_data 

3102 if tld.is_eager: 

3103 try: 

3104 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3105 _ctx, "IteratorGetNextAsOptional", name, iterator, "output_types", 

3106 output_types, "output_shapes", output_shapes) 

3107 return _result 

3108 except _core._NotOkStatusException as e: 

3109 _ops.raise_from_not_ok_status(e, name) 

3110 except _core._FallbackException: 

3111 pass 

3112 try: 

3113 return iterator_get_next_as_optional_eager_fallback( 

3114 iterator, output_types=output_types, output_shapes=output_shapes, 

3115 name=name, ctx=_ctx) 

3116 except _core._SymbolicException: 

3117 pass # Add nodes to the TensorFlow graph. 

3118 # Add nodes to the TensorFlow graph. 

3119 if not isinstance(output_types, (list, tuple)): 

3120 raise TypeError( 

3121 "Expected list for 'output_types' argument to " 

3122 "'iterator_get_next_as_optional' Op, not %r." % output_types) 

3123 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3124 if not isinstance(output_shapes, (list, tuple)): 

3125 raise TypeError( 

3126 "Expected list for 'output_shapes' argument to " 

3127 "'iterator_get_next_as_optional' Op, not %r." % output_shapes) 

3128 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3129 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3130 "IteratorGetNextAsOptional", iterator=iterator, 

3131 output_types=output_types, 

3132 output_shapes=output_shapes, name=name) 

3133 _result = _outputs[:] 

3134 if _execute.must_record_gradient(): 

3135 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

3136 _op.get_attr("output_shapes")) 

3137 _inputs_flat = _op.inputs 

3138 _execute.record_gradient( 

3139 "IteratorGetNextAsOptional", _inputs_flat, _attrs, _result) 

3140 _result, = _result 

3141 return _result 

3142 

3143IteratorGetNextAsOptional = tf_export("raw_ops.IteratorGetNextAsOptional")(_ops.to_raw_op(iterator_get_next_as_optional)) 

3144 

3145 

3146def iterator_get_next_as_optional_eager_fallback(iterator, output_types, output_shapes, name, ctx): 

3147 if not isinstance(output_types, (list, tuple)): 

3148 raise TypeError( 

3149 "Expected list for 'output_types' argument to " 

3150 "'iterator_get_next_as_optional' Op, not %r." % output_types) 

3151 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3152 if not isinstance(output_shapes, (list, tuple)): 

3153 raise TypeError( 

3154 "Expected list for 'output_shapes' argument to " 

3155 "'iterator_get_next_as_optional' Op, not %r." % output_shapes) 

3156 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3157 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource) 

3158 _inputs_flat = [iterator] 

3159 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

3160 _result = _execute.execute(b"IteratorGetNextAsOptional", 1, 

3161 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3162 name=name) 

3163 if _execute.must_record_gradient(): 

3164 _execute.record_gradient( 

3165 "IteratorGetNextAsOptional", _inputs_flat, _attrs, _result) 

3166 _result, = _result 

3167 return _result 

3168 

3169 

3170def iterator_get_next_sync(iterator, output_types, output_shapes, name=None): 

3171 r"""Gets the next output from the given iterator. 

3172 

3173 This operation is a synchronous version IteratorGetNext. It should only be used 

3174 in situations where the iterator does not block the calling thread, or where 

3175 the calling thread is not a member of the thread pool used to execute parallel 

3176 operations (e.g. in eager mode). 

3177 

3178 Args: 

3179 iterator: A `Tensor` of type `resource`. 

3180 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3181 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3182 name: A name for the operation (optional). 

3183 

3184 Returns: 

3185 A list of `Tensor` objects of type `output_types`. 

3186 """ 

3187 _ctx = _context._context or _context.context() 

3188 tld = _ctx._thread_local_data 

3189 if tld.is_eager: 

3190 try: 

3191 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3192 _ctx, "IteratorGetNextSync", name, iterator, "output_types", 

3193 output_types, "output_shapes", output_shapes) 

3194 return _result 

3195 except _core._NotOkStatusException as e: 

3196 _ops.raise_from_not_ok_status(e, name) 

3197 except _core._FallbackException: 

3198 pass 

3199 try: 

3200 return iterator_get_next_sync_eager_fallback( 

3201 iterator, output_types=output_types, output_shapes=output_shapes, 

3202 name=name, ctx=_ctx) 

3203 except _core._SymbolicException: 

3204 pass # Add nodes to the TensorFlow graph. 

3205 # Add nodes to the TensorFlow graph. 

3206 if not isinstance(output_types, (list, tuple)): 

3207 raise TypeError( 

3208 "Expected list for 'output_types' argument to " 

3209 "'iterator_get_next_sync' Op, not %r." % output_types) 

3210 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3211 if not isinstance(output_shapes, (list, tuple)): 

3212 raise TypeError( 

3213 "Expected list for 'output_shapes' argument to " 

3214 "'iterator_get_next_sync' Op, not %r." % output_shapes) 

3215 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3216 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3217 "IteratorGetNextSync", iterator=iterator, output_types=output_types, 

3218 output_shapes=output_shapes, name=name) 

3219 _result = _outputs[:] 

3220 if not _result: 

3221 return _op 

3222 if _execute.must_record_gradient(): 

3223 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

3224 _op.get_attr("output_shapes")) 

3225 _inputs_flat = _op.inputs 

3226 _execute.record_gradient( 

3227 "IteratorGetNextSync", _inputs_flat, _attrs, _result) 

3228 return _result 

3229 

3230IteratorGetNextSync = tf_export("raw_ops.IteratorGetNextSync")(_ops.to_raw_op(iterator_get_next_sync)) 

3231 

3232 

3233def iterator_get_next_sync_eager_fallback(iterator, output_types, output_shapes, name, ctx): 

3234 if not isinstance(output_types, (list, tuple)): 

3235 raise TypeError( 

3236 "Expected list for 'output_types' argument to " 

3237 "'iterator_get_next_sync' Op, not %r." % output_types) 

3238 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3239 if not isinstance(output_shapes, (list, tuple)): 

3240 raise TypeError( 

3241 "Expected list for 'output_shapes' argument to " 

3242 "'iterator_get_next_sync' Op, not %r." % output_shapes) 

3243 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3244 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource) 

3245 _inputs_flat = [iterator] 

3246 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

3247 _result = _execute.execute(b"IteratorGetNextSync", len(output_types), 

3248 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3249 name=name) 

3250 if _execute.must_record_gradient(): 

3251 _execute.record_gradient( 

3252 "IteratorGetNextSync", _inputs_flat, _attrs, _result) 

3253 return _result 

3254 

3255 

3256def iterator_to_string_handle(resource_handle, name=None): 

3257 r"""Converts the given `resource_handle` representing an iterator to a string. 

3258 

3259 Args: 

3260 resource_handle: A `Tensor` of type `resource`. 

3261 A handle to an iterator resource. 

3262 name: A name for the operation (optional). 

3263 

3264 Returns: 

3265 A `Tensor` of type `string`. 

3266 """ 

3267 _ctx = _context._context or _context.context() 

3268 tld = _ctx._thread_local_data 

3269 if tld.is_eager: 

3270 try: 

3271 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3272 _ctx, "IteratorToStringHandle", name, resource_handle) 

3273 return _result 

3274 except _core._NotOkStatusException as e: 

3275 _ops.raise_from_not_ok_status(e, name) 

3276 except _core._FallbackException: 

3277 pass 

3278 try: 

3279 return iterator_to_string_handle_eager_fallback( 

3280 resource_handle, name=name, ctx=_ctx) 

3281 except _core._SymbolicException: 

3282 pass # Add nodes to the TensorFlow graph. 

3283 # Add nodes to the TensorFlow graph. 

3284 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3285 "IteratorToStringHandle", resource_handle=resource_handle, name=name) 

3286 _result = _outputs[:] 

3287 if _execute.must_record_gradient(): 

3288 _attrs = () 

3289 _inputs_flat = _op.inputs 

3290 _execute.record_gradient( 

3291 "IteratorToStringHandle", _inputs_flat, _attrs, _result) 

3292 _result, = _result 

3293 return _result 

3294 

3295IteratorToStringHandle = tf_export("raw_ops.IteratorToStringHandle")(_ops.to_raw_op(iterator_to_string_handle)) 

3296 

3297 

3298def iterator_to_string_handle_eager_fallback(resource_handle, name, ctx): 

3299 resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource) 

3300 _inputs_flat = [resource_handle] 

3301 _attrs = None 

3302 _result = _execute.execute(b"IteratorToStringHandle", 1, 

3303 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3304 name=name) 

3305 if _execute.must_record_gradient(): 

3306 _execute.record_gradient( 

3307 "IteratorToStringHandle", _inputs_flat, _attrs, _result) 

3308 _result, = _result 

3309 return _result 

3310 

3311 

3312def iterator_v2(shared_name, container, output_types, output_shapes, name=None): 

3313 r"""TODO: add doc. 

3314 

3315 Args: 

3316 shared_name: A `string`. 

3317 container: A `string`. 

3318 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3319 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3320 name: A name for the operation (optional). 

3321 

3322 Returns: 

3323 A `Tensor` of type `resource`. 

3324 """ 

3325 _ctx = _context._context or _context.context() 

3326 tld = _ctx._thread_local_data 

3327 if tld.is_eager: 

3328 try: 

3329 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3330 _ctx, "IteratorV2", name, "shared_name", shared_name, "container", 

3331 container, "output_types", output_types, "output_shapes", 

3332 output_shapes) 

3333 return _result 

3334 except _core._NotOkStatusException as e: 

3335 _ops.raise_from_not_ok_status(e, name) 

3336 except _core._FallbackException: 

3337 pass 

3338 try: 

3339 return iterator_v2_eager_fallback( 

3340 shared_name=shared_name, container=container, 

3341 output_types=output_types, output_shapes=output_shapes, name=name, 

3342 ctx=_ctx) 

3343 except _core._SymbolicException: 

3344 pass # Add nodes to the TensorFlow graph. 

3345 # Add nodes to the TensorFlow graph. 

3346 shared_name = _execute.make_str(shared_name, "shared_name") 

3347 container = _execute.make_str(container, "container") 

3348 if not isinstance(output_types, (list, tuple)): 

3349 raise TypeError( 

3350 "Expected list for 'output_types' argument to " 

3351 "'iterator_v2' Op, not %r." % output_types) 

3352 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3353 if not isinstance(output_shapes, (list, tuple)): 

3354 raise TypeError( 

3355 "Expected list for 'output_shapes' argument to " 

3356 "'iterator_v2' Op, not %r." % output_shapes) 

3357 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3358 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3359 "IteratorV2", shared_name=shared_name, container=container, 

3360 output_types=output_types, output_shapes=output_shapes, 

3361 name=name) 

3362 _result = _outputs[:] 

3363 if _execute.must_record_gradient(): 

3364 _attrs = ("shared_name", _op.get_attr("shared_name"), "container", 

3365 _op.get_attr("container"), "output_types", 

3366 _op.get_attr("output_types"), "output_shapes", 

3367 _op.get_attr("output_shapes")) 

3368 _inputs_flat = _op.inputs 

3369 _execute.record_gradient( 

3370 "IteratorV2", _inputs_flat, _attrs, _result) 

3371 _result, = _result 

3372 return _result 

3373 

3374IteratorV2 = tf_export("raw_ops.IteratorV2")(_ops.to_raw_op(iterator_v2)) 

3375 

3376 

3377def iterator_v2_eager_fallback(shared_name, container, output_types, output_shapes, name, ctx): 

3378 shared_name = _execute.make_str(shared_name, "shared_name") 

3379 container = _execute.make_str(container, "container") 

3380 if not isinstance(output_types, (list, tuple)): 

3381 raise TypeError( 

3382 "Expected list for 'output_types' argument to " 

3383 "'iterator_v2' Op, not %r." % output_types) 

3384 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3385 if not isinstance(output_shapes, (list, tuple)): 

3386 raise TypeError( 

3387 "Expected list for 'output_shapes' argument to " 

3388 "'iterator_v2' Op, not %r." % output_shapes) 

3389 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3390 _inputs_flat = [] 

3391 _attrs = ("shared_name", shared_name, "container", container, 

3392 "output_types", output_types, "output_shapes", output_shapes) 

3393 _result = _execute.execute(b"IteratorV2", 1, inputs=_inputs_flat, 

3394 attrs=_attrs, ctx=ctx, name=name) 

3395 if _execute.must_record_gradient(): 

3396 _execute.record_gradient( 

3397 "IteratorV2", _inputs_flat, _attrs, _result) 

3398 _result, = _result 

3399 return _result 

3400 

3401 

3402def make_iterator(dataset, iterator, name=None): 

3403 r"""Makes a new iterator from the given `dataset` and stores it in `iterator`. 

3404 

3405 This operation may be executed multiple times. Each execution will reset the 

3406 iterator in `iterator` to the first element of `dataset`. 

3407 

3408 Args: 

3409 dataset: A `Tensor` of type `variant`. 

3410 iterator: A `Tensor` of type `resource`. 

3411 name: A name for the operation (optional). 

3412 

3413 Returns: 

3414 The created Operation. 

3415 """ 

3416 _ctx = _context._context or _context.context() 

3417 tld = _ctx._thread_local_data 

3418 if tld.is_eager: 

3419 try: 

3420 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3421 _ctx, "MakeIterator", name, dataset, iterator) 

3422 return _result 

3423 except _core._NotOkStatusException as e: 

3424 _ops.raise_from_not_ok_status(e, name) 

3425 except _core._FallbackException: 

3426 pass 

3427 try: 

3428 return make_iterator_eager_fallback( 

3429 dataset, iterator, name=name, ctx=_ctx) 

3430 except _core._SymbolicException: 

3431 pass # Add nodes to the TensorFlow graph. 

3432 # Add nodes to the TensorFlow graph. 

3433 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3434 "MakeIterator", dataset=dataset, iterator=iterator, name=name) 

3435 return _op 

3436MakeIterator = tf_export("raw_ops.MakeIterator")(_ops.to_raw_op(make_iterator)) 

3437 

3438 

3439def make_iterator_eager_fallback(dataset, iterator, name, ctx): 

3440 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant) 

3441 iterator = _ops.convert_to_tensor(iterator, _dtypes.resource) 

3442 _inputs_flat = [dataset, iterator] 

3443 _attrs = None 

3444 _result = _execute.execute(b"MakeIterator", 0, inputs=_inputs_flat, 

3445 attrs=_attrs, ctx=ctx, name=name) 

3446 _result = None 

3447 return _result 

3448 

3449 

3450def map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, preserve_cardinality=False, metadata="", name=None): 

3451 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

3452 

3453 Args: 

3454 input_dataset: A `Tensor` of type `variant`. 

3455 other_arguments: A list of `Tensor` objects. 

3456 f: A function decorated with @Defun. 

3457 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3458 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3459 use_inter_op_parallelism: An optional `bool`. Defaults to `True`. 

3460 preserve_cardinality: An optional `bool`. Defaults to `False`. 

3461 metadata: An optional `string`. Defaults to `""`. 

3462 name: A name for the operation (optional). 

3463 

3464 Returns: 

3465 A `Tensor` of type `variant`. 

3466 """ 

3467 _ctx = _context._context or _context.context() 

3468 tld = _ctx._thread_local_data 

3469 if tld.is_eager: 

3470 try: 

3471 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3472 _ctx, "MapDataset", name, input_dataset, other_arguments, "f", f, 

3473 "output_types", output_types, "output_shapes", output_shapes, 

3474 "use_inter_op_parallelism", use_inter_op_parallelism, 

3475 "preserve_cardinality", preserve_cardinality, "metadata", metadata) 

3476 return _result 

3477 except _core._NotOkStatusException as e: 

3478 _ops.raise_from_not_ok_status(e, name) 

3479 except _core._FallbackException: 

3480 pass 

3481 try: 

3482 return map_dataset_eager_fallback( 

3483 input_dataset, other_arguments, f=f, output_types=output_types, 

3484 output_shapes=output_shapes, 

3485 use_inter_op_parallelism=use_inter_op_parallelism, 

3486 preserve_cardinality=preserve_cardinality, metadata=metadata, 

3487 name=name, ctx=_ctx) 

3488 except _core._SymbolicException: 

3489 pass # Add nodes to the TensorFlow graph. 

3490 # Add nodes to the TensorFlow graph. 

3491 if not isinstance(output_types, (list, tuple)): 

3492 raise TypeError( 

3493 "Expected list for 'output_types' argument to " 

3494 "'map_dataset' Op, not %r." % output_types) 

3495 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3496 if not isinstance(output_shapes, (list, tuple)): 

3497 raise TypeError( 

3498 "Expected list for 'output_shapes' argument to " 

3499 "'map_dataset' Op, not %r." % output_shapes) 

3500 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3501 if use_inter_op_parallelism is None: 

3502 use_inter_op_parallelism = True 

3503 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

3504 if preserve_cardinality is None: 

3505 preserve_cardinality = False 

3506 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

3507 if metadata is None: 

3508 metadata = "" 

3509 metadata = _execute.make_str(metadata, "metadata") 

3510 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3511 "MapDataset", input_dataset=input_dataset, 

3512 other_arguments=other_arguments, f=f, 

3513 output_types=output_types, output_shapes=output_shapes, 

3514 use_inter_op_parallelism=use_inter_op_parallelism, 

3515 preserve_cardinality=preserve_cardinality, 

3516 metadata=metadata, name=name) 

3517 _result = _outputs[:] 

3518 if _execute.must_record_gradient(): 

3519 _attrs = ("f", _op.get_attr("f"), "Targuments", 

3520 _op.get_attr("Targuments"), "output_types", 

3521 _op.get_attr("output_types"), "output_shapes", 

3522 _op.get_attr("output_shapes"), "use_inter_op_parallelism", 

3523 _op._get_attr_bool("use_inter_op_parallelism"), 

3524 "preserve_cardinality", 

3525 _op._get_attr_bool("preserve_cardinality"), "metadata", 

3526 _op.get_attr("metadata")) 

3527 _inputs_flat = _op.inputs 

3528 _execute.record_gradient( 

3529 "MapDataset", _inputs_flat, _attrs, _result) 

3530 _result, = _result 

3531 return _result 

3532 

3533MapDataset = tf_export("raw_ops.MapDataset")(_ops.to_raw_op(map_dataset)) 

3534 

3535 

3536def map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism, preserve_cardinality, metadata, name, ctx): 

3537 if not isinstance(output_types, (list, tuple)): 

3538 raise TypeError( 

3539 "Expected list for 'output_types' argument to " 

3540 "'map_dataset' Op, not %r." % output_types) 

3541 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3542 if not isinstance(output_shapes, (list, tuple)): 

3543 raise TypeError( 

3544 "Expected list for 'output_shapes' argument to " 

3545 "'map_dataset' Op, not %r." % output_shapes) 

3546 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3547 if use_inter_op_parallelism is None: 

3548 use_inter_op_parallelism = True 

3549 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

3550 if preserve_cardinality is None: 

3551 preserve_cardinality = False 

3552 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

3553 if metadata is None: 

3554 metadata = "" 

3555 metadata = _execute.make_str(metadata, "metadata") 

3556 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

3557 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

3558 _inputs_flat = [input_dataset] + list(other_arguments) 

3559 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

3560 output_types, "output_shapes", output_shapes, "use_inter_op_parallelism", 

3561 use_inter_op_parallelism, "preserve_cardinality", preserve_cardinality, 

3562 "metadata", metadata) 

3563 _result = _execute.execute(b"MapDataset", 1, inputs=_inputs_flat, 

3564 attrs=_attrs, ctx=ctx, name=name) 

3565 if _execute.must_record_gradient(): 

3566 _execute.record_gradient( 

3567 "MapDataset", _inputs_flat, _attrs, _result) 

3568 _result, = _result 

3569 return _result 

3570 

3571 

3572def map_defun(arguments, captured_inputs, output_types, output_shapes, f, max_intra_op_parallelism=1, name=None): 

3573 r""" Maps a function on the list of tensors unpacked from arguments on dimension 0. 

3574 The function given by `f` is assumed to be stateless, and is executed 

3575 concurrently on all the slices; up to batch_size (i.e. the size of the 0th 

3576 dimension of each argument) functions will be scheduled at once. 

3577 

3578 The `max_intra_op_parallelism` attr, which defaults to 1, can be used to 

3579 limit the intra op parallelism. To limit inter-op parallelism, a user can 

3580 set a private threadpool on the dataset using `tf.data.Options`'s 

3581 `ThreadingOptions`. 

3582 

3583 Note that this op is not exposed to users directly, but is invoked in tf.data 

3584 rewrites. 

3585 

3586 Args: 

3587 arguments: A list of `Tensor` objects. 

3588 A list of tensors whose types are `Targuments`, corresponding to the inputs 

3589 the function should be mapped over. 

3590 captured_inputs: A list of `Tensor` objects. 

3591 A list of tensors whose types are `Tcaptured`, corresponding to the captured 

3592 inputs of the defun. 

3593 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3594 A list of types. 

3595 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3596 A list of shapes. 

3597 f: A function decorated with @Defun. 

3598 max_intra_op_parallelism: An optional `int`. Defaults to `1`. 

3599 name: A name for the operation (optional). 

3600 

3601 Returns: 

3602 A list of `Tensor` objects of type `output_types`. 

3603 """ 

3604 _ctx = _context._context or _context.context() 

3605 tld = _ctx._thread_local_data 

3606 if tld.is_eager: 

3607 try: 

3608 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3609 _ctx, "MapDefun", name, arguments, captured_inputs, "output_types", 

3610 output_types, "output_shapes", output_shapes, "f", f, 

3611 "max_intra_op_parallelism", max_intra_op_parallelism) 

3612 return _result 

3613 except _core._NotOkStatusException as e: 

3614 _ops.raise_from_not_ok_status(e, name) 

3615 except _core._FallbackException: 

3616 pass 

3617 try: 

3618 return map_defun_eager_fallback( 

3619 arguments, captured_inputs, output_types=output_types, 

3620 output_shapes=output_shapes, f=f, 

3621 max_intra_op_parallelism=max_intra_op_parallelism, name=name, 

3622 ctx=_ctx) 

3623 except _core._SymbolicException: 

3624 pass # Add nodes to the TensorFlow graph. 

3625 # Add nodes to the TensorFlow graph. 

3626 if not isinstance(output_types, (list, tuple)): 

3627 raise TypeError( 

3628 "Expected list for 'output_types' argument to " 

3629 "'map_defun' Op, not %r." % output_types) 

3630 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3631 if not isinstance(output_shapes, (list, tuple)): 

3632 raise TypeError( 

3633 "Expected list for 'output_shapes' argument to " 

3634 "'map_defun' Op, not %r." % output_shapes) 

3635 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3636 if max_intra_op_parallelism is None: 

3637 max_intra_op_parallelism = 1 

3638 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism") 

3639 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3640 "MapDefun", arguments=arguments, captured_inputs=captured_inputs, 

3641 output_types=output_types, output_shapes=output_shapes, 

3642 f=f, max_intra_op_parallelism=max_intra_op_parallelism, 

3643 name=name) 

3644 _result = _outputs[:] 

3645 if _execute.must_record_gradient(): 

3646 _attrs = ("Targuments", _op.get_attr("Targuments"), "Tcaptured", 

3647 _op.get_attr("Tcaptured"), "output_types", 

3648 _op.get_attr("output_types"), "output_shapes", 

3649 _op.get_attr("output_shapes"), "f", _op.get_attr("f"), 

3650 "max_intra_op_parallelism", 

3651 _op._get_attr_int("max_intra_op_parallelism")) 

3652 _inputs_flat = _op.inputs 

3653 _execute.record_gradient( 

3654 "MapDefun", _inputs_flat, _attrs, _result) 

3655 return _result 

3656 

3657MapDefun = tf_export("raw_ops.MapDefun")(_ops.to_raw_op(map_defun)) 

3658 

3659 

3660def map_defun_eager_fallback(arguments, captured_inputs, output_types, output_shapes, f, max_intra_op_parallelism, name, ctx): 

3661 if not isinstance(output_types, (list, tuple)): 

3662 raise TypeError( 

3663 "Expected list for 'output_types' argument to " 

3664 "'map_defun' Op, not %r." % output_types) 

3665 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3666 if not isinstance(output_shapes, (list, tuple)): 

3667 raise TypeError( 

3668 "Expected list for 'output_shapes' argument to " 

3669 "'map_defun' Op, not %r." % output_shapes) 

3670 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3671 if max_intra_op_parallelism is None: 

3672 max_intra_op_parallelism = 1 

3673 max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism") 

3674 _attr_Targuments, arguments = _execute.convert_to_mixed_eager_tensors(arguments, ctx) 

3675 _attr_Tcaptured, captured_inputs = _execute.convert_to_mixed_eager_tensors(captured_inputs, ctx) 

3676 _inputs_flat = list(arguments) + list(captured_inputs) 

3677 _attrs = ("Targuments", _attr_Targuments, "Tcaptured", _attr_Tcaptured, 

3678 "output_types", output_types, "output_shapes", output_shapes, "f", f, 

3679 "max_intra_op_parallelism", max_intra_op_parallelism) 

3680 _result = _execute.execute(b"MapDefun", len(output_types), 

3681 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3682 name=name) 

3683 if _execute.must_record_gradient(): 

3684 _execute.record_gradient( 

3685 "MapDefun", _inputs_flat, _attrs, _result) 

3686 return _result 

3687 

3688 

3689def model_dataset(input_dataset, output_types, output_shapes, algorithm=0, cpu_budget=0, ram_budget=0, name=None): 

3690 r"""Identity transformation that models performance. 

3691 

3692 Identity transformation that models performance. 

3693 

3694 Args: 

3695 input_dataset: A `Tensor` of type `variant`. 

3696 A variant tensor representing the input dataset. 

3697 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3698 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3699 algorithm: An optional `int`. Defaults to `0`. 

3700 cpu_budget: An optional `int`. Defaults to `0`. 

3701 ram_budget: An optional `int`. Defaults to `0`. 

3702 name: A name for the operation (optional). 

3703 

3704 Returns: 

3705 A `Tensor` of type `variant`. 

3706 """ 

3707 _ctx = _context._context or _context.context() 

3708 tld = _ctx._thread_local_data 

3709 if tld.is_eager: 

3710 try: 

3711 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3712 _ctx, "ModelDataset", name, input_dataset, "algorithm", algorithm, 

3713 "cpu_budget", cpu_budget, "ram_budget", ram_budget, "output_types", 

3714 output_types, "output_shapes", output_shapes) 

3715 return _result 

3716 except _core._NotOkStatusException as e: 

3717 _ops.raise_from_not_ok_status(e, name) 

3718 except _core._FallbackException: 

3719 pass 

3720 try: 

3721 return model_dataset_eager_fallback( 

3722 input_dataset, algorithm=algorithm, cpu_budget=cpu_budget, 

3723 ram_budget=ram_budget, output_types=output_types, 

3724 output_shapes=output_shapes, name=name, ctx=_ctx) 

3725 except _core._SymbolicException: 

3726 pass # Add nodes to the TensorFlow graph. 

3727 # Add nodes to the TensorFlow graph. 

3728 if not isinstance(output_types, (list, tuple)): 

3729 raise TypeError( 

3730 "Expected list for 'output_types' argument to " 

3731 "'model_dataset' Op, not %r." % output_types) 

3732 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3733 if not isinstance(output_shapes, (list, tuple)): 

3734 raise TypeError( 

3735 "Expected list for 'output_shapes' argument to " 

3736 "'model_dataset' Op, not %r." % output_shapes) 

3737 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3738 if algorithm is None: 

3739 algorithm = 0 

3740 algorithm = _execute.make_int(algorithm, "algorithm") 

3741 if cpu_budget is None: 

3742 cpu_budget = 0 

3743 cpu_budget = _execute.make_int(cpu_budget, "cpu_budget") 

3744 if ram_budget is None: 

3745 ram_budget = 0 

3746 ram_budget = _execute.make_int(ram_budget, "ram_budget") 

3747 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3748 "ModelDataset", input_dataset=input_dataset, 

3749 output_types=output_types, 

3750 output_shapes=output_shapes, algorithm=algorithm, 

3751 cpu_budget=cpu_budget, ram_budget=ram_budget, 

3752 name=name) 

3753 _result = _outputs[:] 

3754 if _execute.must_record_gradient(): 

3755 _attrs = ("algorithm", _op._get_attr_int("algorithm"), "cpu_budget", 

3756 _op._get_attr_int("cpu_budget"), "ram_budget", 

3757 _op._get_attr_int("ram_budget"), "output_types", 

3758 _op.get_attr("output_types"), "output_shapes", 

3759 _op.get_attr("output_shapes")) 

3760 _inputs_flat = _op.inputs 

3761 _execute.record_gradient( 

3762 "ModelDataset", _inputs_flat, _attrs, _result) 

3763 _result, = _result 

3764 return _result 

3765 

3766ModelDataset = tf_export("raw_ops.ModelDataset")(_ops.to_raw_op(model_dataset)) 

3767 

3768 

3769def model_dataset_eager_fallback(input_dataset, output_types, output_shapes, algorithm, cpu_budget, ram_budget, name, ctx): 

3770 if not isinstance(output_types, (list, tuple)): 

3771 raise TypeError( 

3772 "Expected list for 'output_types' argument to " 

3773 "'model_dataset' Op, not %r." % output_types) 

3774 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3775 if not isinstance(output_shapes, (list, tuple)): 

3776 raise TypeError( 

3777 "Expected list for 'output_shapes' argument to " 

3778 "'model_dataset' Op, not %r." % output_shapes) 

3779 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3780 if algorithm is None: 

3781 algorithm = 0 

3782 algorithm = _execute.make_int(algorithm, "algorithm") 

3783 if cpu_budget is None: 

3784 cpu_budget = 0 

3785 cpu_budget = _execute.make_int(cpu_budget, "cpu_budget") 

3786 if ram_budget is None: 

3787 ram_budget = 0 

3788 ram_budget = _execute.make_int(ram_budget, "ram_budget") 

3789 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

3790 _inputs_flat = [input_dataset] 

3791 _attrs = ("algorithm", algorithm, "cpu_budget", cpu_budget, "ram_budget", 

3792 ram_budget, "output_types", output_types, "output_shapes", output_shapes) 

3793 _result = _execute.execute(b"ModelDataset", 1, inputs=_inputs_flat, 

3794 attrs=_attrs, ctx=ctx, name=name) 

3795 if _execute.must_record_gradient(): 

3796 _execute.record_gradient( 

3797 "ModelDataset", _inputs_flat, _attrs, _result) 

3798 _result, = _result 

3799 return _result 

3800 

3801 

3802def multi_device_iterator(devices, shared_name, container, output_types, output_shapes, name=None): 

3803 r"""Creates a MultiDeviceIterator resource. 

3804 

3805 Args: 

3806 devices: A list of `strings` that has length `>= 1`. 

3807 A list of devices the iterator works across. 

3808 shared_name: A `string`. 

3809 If non-empty, this resource will be shared under the given name 

3810 across multiple sessions. 

3811 container: A `string`. 

3812 If non-empty, this resource is placed in the given container. 

3813 Otherwise, a default container is used. 

3814 output_types: A list of `tf.DTypes` that has length `>= 1`. 

3815 The type list for the return values. 

3816 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

3817 The list of shapes being produced. 

3818 name: A name for the operation (optional). 

3819 

3820 Returns: 

3821 A `Tensor` of type `resource`. 

3822 """ 

3823 _ctx = _context._context or _context.context() 

3824 tld = _ctx._thread_local_data 

3825 if tld.is_eager: 

3826 try: 

3827 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3828 _ctx, "MultiDeviceIterator", name, "devices", devices, "shared_name", 

3829 shared_name, "container", container, "output_types", output_types, 

3830 "output_shapes", output_shapes) 

3831 return _result 

3832 except _core._NotOkStatusException as e: 

3833 _ops.raise_from_not_ok_status(e, name) 

3834 except _core._FallbackException: 

3835 pass 

3836 try: 

3837 return multi_device_iterator_eager_fallback( 

3838 devices=devices, shared_name=shared_name, container=container, 

3839 output_types=output_types, output_shapes=output_shapes, name=name, 

3840 ctx=_ctx) 

3841 except _core._SymbolicException: 

3842 pass # Add nodes to the TensorFlow graph. 

3843 # Add nodes to the TensorFlow graph. 

3844 if not isinstance(devices, (list, tuple)): 

3845 raise TypeError( 

3846 "Expected list for 'devices' argument to " 

3847 "'multi_device_iterator' Op, not %r." % devices) 

3848 devices = [_execute.make_str(_s, "devices") for _s in devices] 

3849 shared_name = _execute.make_str(shared_name, "shared_name") 

3850 container = _execute.make_str(container, "container") 

3851 if not isinstance(output_types, (list, tuple)): 

3852 raise TypeError( 

3853 "Expected list for 'output_types' argument to " 

3854 "'multi_device_iterator' Op, not %r." % output_types) 

3855 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3856 if not isinstance(output_shapes, (list, tuple)): 

3857 raise TypeError( 

3858 "Expected list for 'output_shapes' argument to " 

3859 "'multi_device_iterator' Op, not %r." % output_shapes) 

3860 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3861 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3862 "MultiDeviceIterator", devices=devices, shared_name=shared_name, 

3863 container=container, output_types=output_types, 

3864 output_shapes=output_shapes, name=name) 

3865 _result = _outputs[:] 

3866 if _execute.must_record_gradient(): 

3867 _attrs = ("devices", _op.get_attr("devices"), "shared_name", 

3868 _op.get_attr("shared_name"), "container", 

3869 _op.get_attr("container"), "output_types", 

3870 _op.get_attr("output_types"), "output_shapes", 

3871 _op.get_attr("output_shapes")) 

3872 _inputs_flat = _op.inputs 

3873 _execute.record_gradient( 

3874 "MultiDeviceIterator", _inputs_flat, _attrs, _result) 

3875 _result, = _result 

3876 return _result 

3877 

3878MultiDeviceIterator = tf_export("raw_ops.MultiDeviceIterator")(_ops.to_raw_op(multi_device_iterator)) 

3879 

3880 

3881def multi_device_iterator_eager_fallback(devices, shared_name, container, output_types, output_shapes, name, ctx): 

3882 if not isinstance(devices, (list, tuple)): 

3883 raise TypeError( 

3884 "Expected list for 'devices' argument to " 

3885 "'multi_device_iterator' Op, not %r." % devices) 

3886 devices = [_execute.make_str(_s, "devices") for _s in devices] 

3887 shared_name = _execute.make_str(shared_name, "shared_name") 

3888 container = _execute.make_str(container, "container") 

3889 if not isinstance(output_types, (list, tuple)): 

3890 raise TypeError( 

3891 "Expected list for 'output_types' argument to " 

3892 "'multi_device_iterator' Op, not %r." % output_types) 

3893 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3894 if not isinstance(output_shapes, (list, tuple)): 

3895 raise TypeError( 

3896 "Expected list for 'output_shapes' argument to " 

3897 "'multi_device_iterator' Op, not %r." % output_shapes) 

3898 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3899 _inputs_flat = [] 

3900 _attrs = ("devices", devices, "shared_name", shared_name, "container", 

3901 container, "output_types", output_types, "output_shapes", output_shapes) 

3902 _result = _execute.execute(b"MultiDeviceIterator", 1, inputs=_inputs_flat, 

3903 attrs=_attrs, ctx=ctx, name=name) 

3904 if _execute.must_record_gradient(): 

3905 _execute.record_gradient( 

3906 "MultiDeviceIterator", _inputs_flat, _attrs, _result) 

3907 _result, = _result 

3908 return _result 

3909 

3910 

3911def multi_device_iterator_from_string_handle(string_handle, output_types=[], output_shapes=[], name=None): 

3912 r"""Generates a MultiDeviceIterator resource from its provided string handle. 

3913 

3914 Args: 

3915 string_handle: A `Tensor` of type `string`. 

3916 String representing the resource. 

3917 output_types: An optional list of `tf.DTypes`. Defaults to `[]`. 

3918 The type list for the return values. 

3919 output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`. 

3920 The list of shapes being produced. 

3921 name: A name for the operation (optional). 

3922 

3923 Returns: 

3924 A `Tensor` of type `resource`. 

3925 """ 

3926 _ctx = _context._context or _context.context() 

3927 tld = _ctx._thread_local_data 

3928 if tld.is_eager: 

3929 try: 

3930 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

3931 _ctx, "MultiDeviceIteratorFromStringHandle", name, string_handle, 

3932 "output_types", output_types, "output_shapes", output_shapes) 

3933 return _result 

3934 except _core._NotOkStatusException as e: 

3935 _ops.raise_from_not_ok_status(e, name) 

3936 except _core._FallbackException: 

3937 pass 

3938 try: 

3939 return multi_device_iterator_from_string_handle_eager_fallback( 

3940 string_handle, output_types=output_types, 

3941 output_shapes=output_shapes, name=name, ctx=_ctx) 

3942 except _core._SymbolicException: 

3943 pass # Add nodes to the TensorFlow graph. 

3944 # Add nodes to the TensorFlow graph. 

3945 if output_types is None: 

3946 output_types = [] 

3947 if not isinstance(output_types, (list, tuple)): 

3948 raise TypeError( 

3949 "Expected list for 'output_types' argument to " 

3950 "'multi_device_iterator_from_string_handle' Op, not %r." % output_types) 

3951 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3952 if output_shapes is None: 

3953 output_shapes = [] 

3954 if not isinstance(output_shapes, (list, tuple)): 

3955 raise TypeError( 

3956 "Expected list for 'output_shapes' argument to " 

3957 "'multi_device_iterator_from_string_handle' Op, not %r." % output_shapes) 

3958 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3959 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

3960 "MultiDeviceIteratorFromStringHandle", string_handle=string_handle, 

3961 output_types=output_types, 

3962 output_shapes=output_shapes, 

3963 name=name) 

3964 _result = _outputs[:] 

3965 if _execute.must_record_gradient(): 

3966 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

3967 _op.get_attr("output_shapes")) 

3968 _inputs_flat = _op.inputs 

3969 _execute.record_gradient( 

3970 "MultiDeviceIteratorFromStringHandle", _inputs_flat, _attrs, _result) 

3971 _result, = _result 

3972 return _result 

3973 

3974MultiDeviceIteratorFromStringHandle = tf_export("raw_ops.MultiDeviceIteratorFromStringHandle")(_ops.to_raw_op(multi_device_iterator_from_string_handle)) 

3975 

3976 

3977def multi_device_iterator_from_string_handle_eager_fallback(string_handle, output_types, output_shapes, name, ctx): 

3978 if output_types is None: 

3979 output_types = [] 

3980 if not isinstance(output_types, (list, tuple)): 

3981 raise TypeError( 

3982 "Expected list for 'output_types' argument to " 

3983 "'multi_device_iterator_from_string_handle' Op, not %r." % output_types) 

3984 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

3985 if output_shapes is None: 

3986 output_shapes = [] 

3987 if not isinstance(output_shapes, (list, tuple)): 

3988 raise TypeError( 

3989 "Expected list for 'output_shapes' argument to " 

3990 "'multi_device_iterator_from_string_handle' Op, not %r." % output_shapes) 

3991 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

3992 string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string) 

3993 _inputs_flat = [string_handle] 

3994 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

3995 _result = _execute.execute(b"MultiDeviceIteratorFromStringHandle", 1, 

3996 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

3997 name=name) 

3998 if _execute.must_record_gradient(): 

3999 _execute.record_gradient( 

4000 "MultiDeviceIteratorFromStringHandle", _inputs_flat, _attrs, _result) 

4001 _result, = _result 

4002 return _result 

4003 

4004 

4005def multi_device_iterator_get_next_from_shard(multi_device_iterator, shard_num, incarnation_id, output_types, output_shapes, name=None): 

4006 r"""Gets next element for the provided shard number. 

4007 

4008 Args: 

4009 multi_device_iterator: A `Tensor` of type `resource`. 

4010 A MultiDeviceIterator resource. 

4011 shard_num: A `Tensor` of type `int32`. 

4012 Integer representing which shard to fetch data for. 

4013 incarnation_id: A `Tensor` of type `int64`. 

4014 Which incarnation of the MultiDeviceIterator is running. 

4015 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4016 The type list for the return values. 

4017 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4018 The list of shapes being produced. 

4019 name: A name for the operation (optional). 

4020 

4021 Returns: 

4022 A list of `Tensor` objects of type `output_types`. 

4023 """ 

4024 _ctx = _context._context or _context.context() 

4025 tld = _ctx._thread_local_data 

4026 if tld.is_eager: 

4027 try: 

4028 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4029 _ctx, "MultiDeviceIteratorGetNextFromShard", name, 

4030 multi_device_iterator, shard_num, incarnation_id, "output_types", 

4031 output_types, "output_shapes", output_shapes) 

4032 return _result 

4033 except _core._NotOkStatusException as e: 

4034 _ops.raise_from_not_ok_status(e, name) 

4035 except _core._FallbackException: 

4036 pass 

4037 try: 

4038 return multi_device_iterator_get_next_from_shard_eager_fallback( 

4039 multi_device_iterator, shard_num, incarnation_id, 

4040 output_types=output_types, output_shapes=output_shapes, name=name, 

4041 ctx=_ctx) 

4042 except _core._SymbolicException: 

4043 pass # Add nodes to the TensorFlow graph. 

4044 # Add nodes to the TensorFlow graph. 

4045 if not isinstance(output_types, (list, tuple)): 

4046 raise TypeError( 

4047 "Expected list for 'output_types' argument to " 

4048 "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_types) 

4049 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4050 if not isinstance(output_shapes, (list, tuple)): 

4051 raise TypeError( 

4052 "Expected list for 'output_shapes' argument to " 

4053 "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_shapes) 

4054 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4055 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4056 "MultiDeviceIteratorGetNextFromShard", multi_device_iterator=multi_device_iterator, 

4057 shard_num=shard_num, 

4058 incarnation_id=incarnation_id, 

4059 output_types=output_types, 

4060 output_shapes=output_shapes, 

4061 name=name) 

4062 _result = _outputs[:] 

4063 if not _result: 

4064 return _op 

4065 if _execute.must_record_gradient(): 

4066 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

4067 _op.get_attr("output_shapes")) 

4068 _inputs_flat = _op.inputs 

4069 _execute.record_gradient( 

4070 "MultiDeviceIteratorGetNextFromShard", _inputs_flat, _attrs, _result) 

4071 return _result 

4072 

4073MultiDeviceIteratorGetNextFromShard = tf_export("raw_ops.MultiDeviceIteratorGetNextFromShard")(_ops.to_raw_op(multi_device_iterator_get_next_from_shard)) 

4074 

4075 

4076def multi_device_iterator_get_next_from_shard_eager_fallback(multi_device_iterator, shard_num, incarnation_id, output_types, output_shapes, name, ctx): 

4077 if not isinstance(output_types, (list, tuple)): 

4078 raise TypeError( 

4079 "Expected list for 'output_types' argument to " 

4080 "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_types) 

4081 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4082 if not isinstance(output_shapes, (list, tuple)): 

4083 raise TypeError( 

4084 "Expected list for 'output_shapes' argument to " 

4085 "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_shapes) 

4086 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4087 multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource) 

4088 shard_num = _ops.convert_to_tensor(shard_num, _dtypes.int32) 

4089 incarnation_id = _ops.convert_to_tensor(incarnation_id, _dtypes.int64) 

4090 _inputs_flat = [multi_device_iterator, shard_num, incarnation_id] 

4091 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

4092 _result = _execute.execute(b"MultiDeviceIteratorGetNextFromShard", 

4093 len(output_types), inputs=_inputs_flat, 

4094 attrs=_attrs, ctx=ctx, name=name) 

4095 if _execute.must_record_gradient(): 

4096 _execute.record_gradient( 

4097 "MultiDeviceIteratorGetNextFromShard", _inputs_flat, _attrs, _result) 

4098 return _result 

4099 

4100 

4101def multi_device_iterator_init(dataset, multi_device_iterator, max_buffer_size, name=None): 

4102 r"""Initializes the multi device iterator with the given dataset. 

4103 

4104 Args: 

4105 dataset: A `Tensor` of type `variant`. Dataset to be iterated upon. 

4106 multi_device_iterator: A `Tensor` of type `resource`. 

4107 A MultiDeviceIteratorResource. 

4108 max_buffer_size: A `Tensor` of type `int64`. 

4109 The maximum size of the host side per device buffer to keep. 

4110 name: A name for the operation (optional). 

4111 

4112 Returns: 

4113 A `Tensor` of type `int64`. 

4114 """ 

4115 _ctx = _context._context or _context.context() 

4116 tld = _ctx._thread_local_data 

4117 if tld.is_eager: 

4118 try: 

4119 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4120 _ctx, "MultiDeviceIteratorInit", name, dataset, multi_device_iterator, 

4121 max_buffer_size) 

4122 return _result 

4123 except _core._NotOkStatusException as e: 

4124 _ops.raise_from_not_ok_status(e, name) 

4125 except _core._FallbackException: 

4126 pass 

4127 try: 

4128 return multi_device_iterator_init_eager_fallback( 

4129 dataset, multi_device_iterator, max_buffer_size, name=name, 

4130 ctx=_ctx) 

4131 except _core._SymbolicException: 

4132 pass # Add nodes to the TensorFlow graph. 

4133 # Add nodes to the TensorFlow graph. 

4134 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4135 "MultiDeviceIteratorInit", dataset=dataset, 

4136 multi_device_iterator=multi_device_iterator, 

4137 max_buffer_size=max_buffer_size, name=name) 

4138 _result = _outputs[:] 

4139 if _execute.must_record_gradient(): 

4140 _attrs = () 

4141 _inputs_flat = _op.inputs 

4142 _execute.record_gradient( 

4143 "MultiDeviceIteratorInit", _inputs_flat, _attrs, _result) 

4144 _result, = _result 

4145 return _result 

4146 

4147MultiDeviceIteratorInit = tf_export("raw_ops.MultiDeviceIteratorInit")(_ops.to_raw_op(multi_device_iterator_init)) 

4148 

4149 

4150def multi_device_iterator_init_eager_fallback(dataset, multi_device_iterator, max_buffer_size, name, ctx): 

4151 dataset = _ops.convert_to_tensor(dataset, _dtypes.variant) 

4152 multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource) 

4153 max_buffer_size = _ops.convert_to_tensor(max_buffer_size, _dtypes.int64) 

4154 _inputs_flat = [dataset, multi_device_iterator, max_buffer_size] 

4155 _attrs = None 

4156 _result = _execute.execute(b"MultiDeviceIteratorInit", 1, 

4157 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

4158 name=name) 

4159 if _execute.must_record_gradient(): 

4160 _execute.record_gradient( 

4161 "MultiDeviceIteratorInit", _inputs_flat, _attrs, _result) 

4162 _result, = _result 

4163 return _result 

4164 

4165 

4166def multi_device_iterator_to_string_handle(multi_device_iterator, name=None): 

4167 r"""Produces a string handle for the given MultiDeviceIterator. 

4168 

4169 Args: 

4170 multi_device_iterator: A `Tensor` of type `resource`. 

4171 A MultiDeviceIterator resource. 

4172 name: A name for the operation (optional). 

4173 

4174 Returns: 

4175 A `Tensor` of type `string`. 

4176 """ 

4177 _ctx = _context._context or _context.context() 

4178 tld = _ctx._thread_local_data 

4179 if tld.is_eager: 

4180 try: 

4181 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4182 _ctx, "MultiDeviceIteratorToStringHandle", name, 

4183 multi_device_iterator) 

4184 return _result 

4185 except _core._NotOkStatusException as e: 

4186 _ops.raise_from_not_ok_status(e, name) 

4187 except _core._FallbackException: 

4188 pass 

4189 try: 

4190 return multi_device_iterator_to_string_handle_eager_fallback( 

4191 multi_device_iterator, name=name, ctx=_ctx) 

4192 except _core._SymbolicException: 

4193 pass # Add nodes to the TensorFlow graph. 

4194 # Add nodes to the TensorFlow graph. 

4195 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4196 "MultiDeviceIteratorToStringHandle", multi_device_iterator=multi_device_iterator, 

4197 name=name) 

4198 _result = _outputs[:] 

4199 if _execute.must_record_gradient(): 

4200 _attrs = () 

4201 _inputs_flat = _op.inputs 

4202 _execute.record_gradient( 

4203 "MultiDeviceIteratorToStringHandle", _inputs_flat, _attrs, _result) 

4204 _result, = _result 

4205 return _result 

4206 

4207MultiDeviceIteratorToStringHandle = tf_export("raw_ops.MultiDeviceIteratorToStringHandle")(_ops.to_raw_op(multi_device_iterator_to_string_handle)) 

4208 

4209 

4210def multi_device_iterator_to_string_handle_eager_fallback(multi_device_iterator, name, ctx): 

4211 multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource) 

4212 _inputs_flat = [multi_device_iterator] 

4213 _attrs = None 

4214 _result = _execute.execute(b"MultiDeviceIteratorToStringHandle", 1, 

4215 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

4216 name=name) 

4217 if _execute.must_record_gradient(): 

4218 _execute.record_gradient( 

4219 "MultiDeviceIteratorToStringHandle", _inputs_flat, _attrs, _result) 

4220 _result, = _result 

4221 return _result 

4222 

4223 

4224def one_shot_iterator(dataset_factory, output_types, output_shapes, container="", shared_name="", name=None): 

4225 r"""Makes a "one-shot" iterator that can be iterated only once. 

4226 

4227 A one-shot iterator bundles the logic for defining the dataset and 

4228 the state of the iterator in a single op, which allows simple input 

4229 pipelines to be defined without an additional initialization 

4230 ("MakeIterator") step. 

4231 

4232 One-shot iterators have the following limitations: 

4233 

4234 * They do not support parameterization: all logic for creating the underlying 

4235 dataset must be bundled in the `dataset_factory` function. 

4236 * They are not resettable. Once a one-shot iterator reaches the end of its 

4237 underlying dataset, subsequent "IteratorGetNext" operations on that 

4238 iterator will always produce an `OutOfRange` error. 

4239 

4240 For greater flexibility, use "Iterator" and "MakeIterator" to define 

4241 an iterator using an arbitrary subgraph, which may capture tensors 

4242 (including fed values) as parameters, and which may be reset multiple 

4243 times by rerunning "MakeIterator". 

4244 

4245 Args: 

4246 dataset_factory: A function decorated with @Defun. 

4247 A function of type `() -> DT_VARIANT`, where the returned 

4248 DT_VARIANT is a dataset. 

4249 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4250 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4251 container: An optional `string`. Defaults to `""`. 

4252 shared_name: An optional `string`. Defaults to `""`. 

4253 name: A name for the operation (optional). 

4254 

4255 Returns: 

4256 A `Tensor` of type `resource`. 

4257 """ 

4258 _ctx = _context._context or _context.context() 

4259 tld = _ctx._thread_local_data 

4260 if tld.is_eager: 

4261 try: 

4262 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4263 _ctx, "OneShotIterator", name, "dataset_factory", dataset_factory, 

4264 "output_types", output_types, "output_shapes", output_shapes, 

4265 "container", container, "shared_name", shared_name) 

4266 return _result 

4267 except _core._NotOkStatusException as e: 

4268 _ops.raise_from_not_ok_status(e, name) 

4269 except _core._FallbackException: 

4270 pass 

4271 try: 

4272 return one_shot_iterator_eager_fallback( 

4273 dataset_factory=dataset_factory, output_types=output_types, 

4274 output_shapes=output_shapes, container=container, 

4275 shared_name=shared_name, name=name, ctx=_ctx) 

4276 except _core._SymbolicException: 

4277 pass # Add nodes to the TensorFlow graph. 

4278 # Add nodes to the TensorFlow graph. 

4279 if not isinstance(output_types, (list, tuple)): 

4280 raise TypeError( 

4281 "Expected list for 'output_types' argument to " 

4282 "'one_shot_iterator' Op, not %r." % output_types) 

4283 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4284 if not isinstance(output_shapes, (list, tuple)): 

4285 raise TypeError( 

4286 "Expected list for 'output_shapes' argument to " 

4287 "'one_shot_iterator' Op, not %r." % output_shapes) 

4288 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4289 if container is None: 

4290 container = "" 

4291 container = _execute.make_str(container, "container") 

4292 if shared_name is None: 

4293 shared_name = "" 

4294 shared_name = _execute.make_str(shared_name, "shared_name") 

4295 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4296 "OneShotIterator", dataset_factory=dataset_factory, 

4297 output_types=output_types, 

4298 output_shapes=output_shapes, container=container, 

4299 shared_name=shared_name, name=name) 

4300 _result = _outputs[:] 

4301 if _execute.must_record_gradient(): 

4302 _attrs = ("dataset_factory", _op.get_attr("dataset_factory"), 

4303 "output_types", _op.get_attr("output_types"), "output_shapes", 

4304 _op.get_attr("output_shapes"), "container", 

4305 _op.get_attr("container"), "shared_name", 

4306 _op.get_attr("shared_name")) 

4307 _inputs_flat = _op.inputs 

4308 _execute.record_gradient( 

4309 "OneShotIterator", _inputs_flat, _attrs, _result) 

4310 _result, = _result 

4311 return _result 

4312 

4313OneShotIterator = tf_export("raw_ops.OneShotIterator")(_ops.to_raw_op(one_shot_iterator)) 

4314 

4315 

4316def one_shot_iterator_eager_fallback(dataset_factory, output_types, output_shapes, container, shared_name, name, ctx): 

4317 if not isinstance(output_types, (list, tuple)): 

4318 raise TypeError( 

4319 "Expected list for 'output_types' argument to " 

4320 "'one_shot_iterator' Op, not %r." % output_types) 

4321 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4322 if not isinstance(output_shapes, (list, tuple)): 

4323 raise TypeError( 

4324 "Expected list for 'output_shapes' argument to " 

4325 "'one_shot_iterator' Op, not %r." % output_shapes) 

4326 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4327 if container is None: 

4328 container = "" 

4329 container = _execute.make_str(container, "container") 

4330 if shared_name is None: 

4331 shared_name = "" 

4332 shared_name = _execute.make_str(shared_name, "shared_name") 

4333 _inputs_flat = [] 

4334 _attrs = ("dataset_factory", dataset_factory, "output_types", output_types, 

4335 "output_shapes", output_shapes, "container", container, "shared_name", 

4336 shared_name) 

4337 _result = _execute.execute(b"OneShotIterator", 1, inputs=_inputs_flat, 

4338 attrs=_attrs, ctx=ctx, name=name) 

4339 if _execute.must_record_gradient(): 

4340 _execute.record_gradient( 

4341 "OneShotIterator", _inputs_flat, _attrs, _result) 

4342 _result, = _result 

4343 return _result 

4344 

4345 

4346def optimize_dataset(input_dataset, optimizations, output_types, output_shapes, optimization_configs=[], name=None): 

4347 r"""Creates a dataset by applying optimizations to `input_dataset`. 

4348 

4349 Creates a dataset by applying optimizations to `input_dataset`. 

4350 

4351 Args: 

4352 input_dataset: A `Tensor` of type `variant`. 

4353 A variant tensor representing the input dataset. 

4354 optimizations: A `Tensor` of type `string`. 

4355 A `tf.string` vector `tf.Tensor` identifying optimizations to use. 

4356 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4357 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4358 optimization_configs: An optional list of `strings`. Defaults to `[]`. 

4359 name: A name for the operation (optional). 

4360 

4361 Returns: 

4362 A `Tensor` of type `variant`. 

4363 """ 

4364 _ctx = _context._context or _context.context() 

4365 tld = _ctx._thread_local_data 

4366 if tld.is_eager: 

4367 try: 

4368 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4369 _ctx, "OptimizeDataset", name, input_dataset, optimizations, 

4370 "output_types", output_types, "output_shapes", output_shapes, 

4371 "optimization_configs", optimization_configs) 

4372 return _result 

4373 except _core._NotOkStatusException as e: 

4374 _ops.raise_from_not_ok_status(e, name) 

4375 except _core._FallbackException: 

4376 pass 

4377 try: 

4378 return optimize_dataset_eager_fallback( 

4379 input_dataset, optimizations, output_types=output_types, 

4380 output_shapes=output_shapes, 

4381 optimization_configs=optimization_configs, name=name, ctx=_ctx) 

4382 except _core._SymbolicException: 

4383 pass # Add nodes to the TensorFlow graph. 

4384 # Add nodes to the TensorFlow graph. 

4385 if not isinstance(output_types, (list, tuple)): 

4386 raise TypeError( 

4387 "Expected list for 'output_types' argument to " 

4388 "'optimize_dataset' Op, not %r." % output_types) 

4389 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4390 if not isinstance(output_shapes, (list, tuple)): 

4391 raise TypeError( 

4392 "Expected list for 'output_shapes' argument to " 

4393 "'optimize_dataset' Op, not %r." % output_shapes) 

4394 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4395 if optimization_configs is None: 

4396 optimization_configs = [] 

4397 if not isinstance(optimization_configs, (list, tuple)): 

4398 raise TypeError( 

4399 "Expected list for 'optimization_configs' argument to " 

4400 "'optimize_dataset' Op, not %r." % optimization_configs) 

4401 optimization_configs = [_execute.make_str(_s, "optimization_configs") for _s in optimization_configs] 

4402 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4403 "OptimizeDataset", input_dataset=input_dataset, 

4404 optimizations=optimizations, 

4405 output_types=output_types, 

4406 output_shapes=output_shapes, 

4407 optimization_configs=optimization_configs, 

4408 name=name) 

4409 _result = _outputs[:] 

4410 if _execute.must_record_gradient(): 

4411 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

4412 _op.get_attr("output_shapes"), "optimization_configs", 

4413 _op.get_attr("optimization_configs")) 

4414 _inputs_flat = _op.inputs 

4415 _execute.record_gradient( 

4416 "OptimizeDataset", _inputs_flat, _attrs, _result) 

4417 _result, = _result 

4418 return _result 

4419 

4420OptimizeDataset = tf_export("raw_ops.OptimizeDataset")(_ops.to_raw_op(optimize_dataset)) 

4421 

4422 

4423def optimize_dataset_eager_fallback(input_dataset, optimizations, output_types, output_shapes, optimization_configs, name, ctx): 

4424 if not isinstance(output_types, (list, tuple)): 

4425 raise TypeError( 

4426 "Expected list for 'output_types' argument to " 

4427 "'optimize_dataset' Op, not %r." % output_types) 

4428 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4429 if not isinstance(output_shapes, (list, tuple)): 

4430 raise TypeError( 

4431 "Expected list for 'output_shapes' argument to " 

4432 "'optimize_dataset' Op, not %r." % output_shapes) 

4433 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4434 if optimization_configs is None: 

4435 optimization_configs = [] 

4436 if not isinstance(optimization_configs, (list, tuple)): 

4437 raise TypeError( 

4438 "Expected list for 'optimization_configs' argument to " 

4439 "'optimize_dataset' Op, not %r." % optimization_configs) 

4440 optimization_configs = [_execute.make_str(_s, "optimization_configs") for _s in optimization_configs] 

4441 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

4442 optimizations = _ops.convert_to_tensor(optimizations, _dtypes.string) 

4443 _inputs_flat = [input_dataset, optimizations] 

4444 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

4445 "optimization_configs", optimization_configs) 

4446 _result = _execute.execute(b"OptimizeDataset", 1, inputs=_inputs_flat, 

4447 attrs=_attrs, ctx=ctx, name=name) 

4448 if _execute.must_record_gradient(): 

4449 _execute.record_gradient( 

4450 "OptimizeDataset", _inputs_flat, _attrs, _result) 

4451 _result, = _result 

4452 return _result 

4453 

4454 

4455def optimize_dataset_v2(input_dataset, optimizations_enabled, optimizations_disabled, optimizations_default, output_types, output_shapes, optimization_configs=[], name=None): 

4456 r"""Creates a dataset by applying related optimizations to `input_dataset`. 

4457 

4458 Creates a dataset by applying related optimizations to `input_dataset`. 

4459 

4460 Args: 

4461 input_dataset: A `Tensor` of type `variant`. 

4462 A variant tensor representing the input dataset. 

4463 optimizations_enabled: A `Tensor` of type `string`. 

4464 A `tf.string` vector `tf.Tensor` identifying user enabled optimizations. 

4465 optimizations_disabled: A `Tensor` of type `string`. 

4466 A `tf.string` vector `tf.Tensor` identifying user disabled optimizations. 

4467 optimizations_default: A `Tensor` of type `string`. 

4468 A `tf.string` vector `tf.Tensor` identifying optimizations by default. 

4469 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4470 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4471 optimization_configs: An optional list of `strings`. Defaults to `[]`. 

4472 name: A name for the operation (optional). 

4473 

4474 Returns: 

4475 A `Tensor` of type `variant`. 

4476 """ 

4477 _ctx = _context._context or _context.context() 

4478 tld = _ctx._thread_local_data 

4479 if tld.is_eager: 

4480 try: 

4481 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4482 _ctx, "OptimizeDatasetV2", name, input_dataset, optimizations_enabled, 

4483 optimizations_disabled, optimizations_default, "output_types", 

4484 output_types, "output_shapes", output_shapes, "optimization_configs", 

4485 optimization_configs) 

4486 return _result 

4487 except _core._NotOkStatusException as e: 

4488 _ops.raise_from_not_ok_status(e, name) 

4489 except _core._FallbackException: 

4490 pass 

4491 try: 

4492 return optimize_dataset_v2_eager_fallback( 

4493 input_dataset, optimizations_enabled, optimizations_disabled, 

4494 optimizations_default, output_types=output_types, 

4495 output_shapes=output_shapes, 

4496 optimization_configs=optimization_configs, name=name, ctx=_ctx) 

4497 except _core._SymbolicException: 

4498 pass # Add nodes to the TensorFlow graph. 

4499 # Add nodes to the TensorFlow graph. 

4500 if not isinstance(output_types, (list, tuple)): 

4501 raise TypeError( 

4502 "Expected list for 'output_types' argument to " 

4503 "'optimize_dataset_v2' Op, not %r." % output_types) 

4504 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4505 if not isinstance(output_shapes, (list, tuple)): 

4506 raise TypeError( 

4507 "Expected list for 'output_shapes' argument to " 

4508 "'optimize_dataset_v2' Op, not %r." % output_shapes) 

4509 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4510 if optimization_configs is None: 

4511 optimization_configs = [] 

4512 if not isinstance(optimization_configs, (list, tuple)): 

4513 raise TypeError( 

4514 "Expected list for 'optimization_configs' argument to " 

4515 "'optimize_dataset_v2' Op, not %r." % optimization_configs) 

4516 optimization_configs = [_execute.make_str(_s, "optimization_configs") for _s in optimization_configs] 

4517 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4518 "OptimizeDatasetV2", input_dataset=input_dataset, 

4519 optimizations_enabled=optimizations_enabled, 

4520 optimizations_disabled=optimizations_disabled, 

4521 optimizations_default=optimizations_default, 

4522 output_types=output_types, 

4523 output_shapes=output_shapes, 

4524 optimization_configs=optimization_configs, 

4525 name=name) 

4526 _result = _outputs[:] 

4527 if _execute.must_record_gradient(): 

4528 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

4529 _op.get_attr("output_shapes"), "optimization_configs", 

4530 _op.get_attr("optimization_configs")) 

4531 _inputs_flat = _op.inputs 

4532 _execute.record_gradient( 

4533 "OptimizeDatasetV2", _inputs_flat, _attrs, _result) 

4534 _result, = _result 

4535 return _result 

4536 

4537OptimizeDatasetV2 = tf_export("raw_ops.OptimizeDatasetV2")(_ops.to_raw_op(optimize_dataset_v2)) 

4538 

4539 

4540def optimize_dataset_v2_eager_fallback(input_dataset, optimizations_enabled, optimizations_disabled, optimizations_default, output_types, output_shapes, optimization_configs, name, ctx): 

4541 if not isinstance(output_types, (list, tuple)): 

4542 raise TypeError( 

4543 "Expected list for 'output_types' argument to " 

4544 "'optimize_dataset_v2' Op, not %r." % output_types) 

4545 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4546 if not isinstance(output_shapes, (list, tuple)): 

4547 raise TypeError( 

4548 "Expected list for 'output_shapes' argument to " 

4549 "'optimize_dataset_v2' Op, not %r." % output_shapes) 

4550 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4551 if optimization_configs is None: 

4552 optimization_configs = [] 

4553 if not isinstance(optimization_configs, (list, tuple)): 

4554 raise TypeError( 

4555 "Expected list for 'optimization_configs' argument to " 

4556 "'optimize_dataset_v2' Op, not %r." % optimization_configs) 

4557 optimization_configs = [_execute.make_str(_s, "optimization_configs") for _s in optimization_configs] 

4558 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

4559 optimizations_enabled = _ops.convert_to_tensor(optimizations_enabled, _dtypes.string) 

4560 optimizations_disabled = _ops.convert_to_tensor(optimizations_disabled, _dtypes.string) 

4561 optimizations_default = _ops.convert_to_tensor(optimizations_default, _dtypes.string) 

4562 _inputs_flat = [input_dataset, optimizations_enabled, optimizations_disabled, optimizations_default] 

4563 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

4564 "optimization_configs", optimization_configs) 

4565 _result = _execute.execute(b"OptimizeDatasetV2", 1, inputs=_inputs_flat, 

4566 attrs=_attrs, ctx=ctx, name=name) 

4567 if _execute.must_record_gradient(): 

4568 _execute.record_gradient( 

4569 "OptimizeDatasetV2", _inputs_flat, _attrs, _result) 

4570 _result, = _result 

4571 return _result 

4572 

4573 

4574def options_dataset(input_dataset, serialized_options, output_types, output_shapes, metadata="", name=None): 

4575 r"""Creates a dataset by attaching tf.data.Options to `input_dataset`. 

4576 

4577 Args: 

4578 input_dataset: A `Tensor` of type `variant`. 

4579 A variant tensor representing the input dataset. 

4580 serialized_options: A `string`. 

4581 A `tf.string` scalar `tf.Tensor` of serialized `tf.data.Options` protocol buffer. 

4582 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4583 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4584 metadata: An optional `string`. Defaults to `""`. 

4585 name: A name for the operation (optional). 

4586 

4587 Returns: 

4588 A `Tensor` of type `variant`. 

4589 """ 

4590 _ctx = _context._context or _context.context() 

4591 tld = _ctx._thread_local_data 

4592 if tld.is_eager: 

4593 try: 

4594 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4595 _ctx, "OptionsDataset", name, input_dataset, "serialized_options", 

4596 serialized_options, "output_types", output_types, "output_shapes", 

4597 output_shapes, "metadata", metadata) 

4598 return _result 

4599 except _core._NotOkStatusException as e: 

4600 _ops.raise_from_not_ok_status(e, name) 

4601 except _core._FallbackException: 

4602 pass 

4603 try: 

4604 return options_dataset_eager_fallback( 

4605 input_dataset, serialized_options=serialized_options, 

4606 output_types=output_types, output_shapes=output_shapes, 

4607 metadata=metadata, name=name, ctx=_ctx) 

4608 except _core._SymbolicException: 

4609 pass # Add nodes to the TensorFlow graph. 

4610 # Add nodes to the TensorFlow graph. 

4611 serialized_options = _execute.make_str(serialized_options, "serialized_options") 

4612 if not isinstance(output_types, (list, tuple)): 

4613 raise TypeError( 

4614 "Expected list for 'output_types' argument to " 

4615 "'options_dataset' Op, not %r." % output_types) 

4616 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4617 if not isinstance(output_shapes, (list, tuple)): 

4618 raise TypeError( 

4619 "Expected list for 'output_shapes' argument to " 

4620 "'options_dataset' Op, not %r." % output_shapes) 

4621 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4622 if metadata is None: 

4623 metadata = "" 

4624 metadata = _execute.make_str(metadata, "metadata") 

4625 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4626 "OptionsDataset", input_dataset=input_dataset, 

4627 serialized_options=serialized_options, 

4628 output_types=output_types, 

4629 output_shapes=output_shapes, metadata=metadata, 

4630 name=name) 

4631 _result = _outputs[:] 

4632 if _execute.must_record_gradient(): 

4633 _attrs = ("serialized_options", _op.get_attr("serialized_options"), 

4634 "output_types", _op.get_attr("output_types"), "output_shapes", 

4635 _op.get_attr("output_shapes"), "metadata", 

4636 _op.get_attr("metadata")) 

4637 _inputs_flat = _op.inputs 

4638 _execute.record_gradient( 

4639 "OptionsDataset", _inputs_flat, _attrs, _result) 

4640 _result, = _result 

4641 return _result 

4642 

4643OptionsDataset = tf_export("raw_ops.OptionsDataset")(_ops.to_raw_op(options_dataset)) 

4644 

4645 

4646def options_dataset_eager_fallback(input_dataset, serialized_options, output_types, output_shapes, metadata, name, ctx): 

4647 serialized_options = _execute.make_str(serialized_options, "serialized_options") 

4648 if not isinstance(output_types, (list, tuple)): 

4649 raise TypeError( 

4650 "Expected list for 'output_types' argument to " 

4651 "'options_dataset' Op, not %r." % output_types) 

4652 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4653 if not isinstance(output_shapes, (list, tuple)): 

4654 raise TypeError( 

4655 "Expected list for 'output_shapes' argument to " 

4656 "'options_dataset' Op, not %r." % output_shapes) 

4657 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4658 if metadata is None: 

4659 metadata = "" 

4660 metadata = _execute.make_str(metadata, "metadata") 

4661 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

4662 _inputs_flat = [input_dataset] 

4663 _attrs = ("serialized_options", serialized_options, "output_types", 

4664 output_types, "output_shapes", output_shapes, "metadata", metadata) 

4665 _result = _execute.execute(b"OptionsDataset", 1, inputs=_inputs_flat, 

4666 attrs=_attrs, ctx=ctx, name=name) 

4667 if _execute.must_record_gradient(): 

4668 _execute.record_gradient( 

4669 "OptionsDataset", _inputs_flat, _attrs, _result) 

4670 _result, = _result 

4671 return _result 

4672 

4673 

4674def padded_batch_dataset(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, metadata="", name=None): 

4675 r"""Creates a dataset that batches and pads `batch_size` elements from the input. 

4676 

4677 Args: 

4678 input_dataset: A `Tensor` of type `variant`. 

4679 batch_size: A `Tensor` of type `int64`. 

4680 A scalar representing the number of elements to accumulate in a 

4681 batch. 

4682 padded_shapes: A list of at least 1 `Tensor` objects with type `int64`. 

4683 A list of int64 tensors representing the desired padded shapes 

4684 of the corresponding output components. These shapes may be partially 

4685 specified, using `-1` to indicate that a particular dimension should be 

4686 padded to the maximum size of all batch elements. 

4687 padding_values: A list of `Tensor` objects. 

4688 A list of scalars containing the padding value to use for 

4689 each of the outputs. 

4690 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4691 metadata: An optional `string`. Defaults to `""`. 

4692 name: A name for the operation (optional). 

4693 

4694 Returns: 

4695 A `Tensor` of type `variant`. 

4696 """ 

4697 _ctx = _context._context or _context.context() 

4698 tld = _ctx._thread_local_data 

4699 if tld.is_eager: 

4700 try: 

4701 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4702 _ctx, "PaddedBatchDataset", name, input_dataset, batch_size, 

4703 padded_shapes, padding_values, "output_shapes", output_shapes, 

4704 "metadata", metadata) 

4705 return _result 

4706 except _core._NotOkStatusException as e: 

4707 _ops.raise_from_not_ok_status(e, name) 

4708 except _core._FallbackException: 

4709 pass 

4710 try: 

4711 return padded_batch_dataset_eager_fallback( 

4712 input_dataset, batch_size, padded_shapes, padding_values, 

4713 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

4714 except _core._SymbolicException: 

4715 pass # Add nodes to the TensorFlow graph. 

4716 # Add nodes to the TensorFlow graph. 

4717 if not isinstance(padded_shapes, (list, tuple)): 

4718 raise TypeError( 

4719 "Expected list for 'padded_shapes' argument to " 

4720 "'padded_batch_dataset' Op, not %r." % padded_shapes) 

4721 _attr_N = len(padded_shapes) 

4722 if not isinstance(output_shapes, (list, tuple)): 

4723 raise TypeError( 

4724 "Expected list for 'output_shapes' argument to " 

4725 "'padded_batch_dataset' Op, not %r." % output_shapes) 

4726 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4727 if metadata is None: 

4728 metadata = "" 

4729 metadata = _execute.make_str(metadata, "metadata") 

4730 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4731 "PaddedBatchDataset", input_dataset=input_dataset, 

4732 batch_size=batch_size, 

4733 padded_shapes=padded_shapes, 

4734 padding_values=padding_values, 

4735 output_shapes=output_shapes, metadata=metadata, 

4736 name=name) 

4737 _result = _outputs[:] 

4738 if _execute.must_record_gradient(): 

4739 _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes", 

4740 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"), 

4741 "metadata", _op.get_attr("metadata")) 

4742 _inputs_flat = _op.inputs 

4743 _execute.record_gradient( 

4744 "PaddedBatchDataset", _inputs_flat, _attrs, _result) 

4745 _result, = _result 

4746 return _result 

4747 

4748PaddedBatchDataset = tf_export("raw_ops.PaddedBatchDataset")(_ops.to_raw_op(padded_batch_dataset)) 

4749 

4750 

4751def padded_batch_dataset_eager_fallback(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, metadata, name, ctx): 

4752 if not isinstance(padded_shapes, (list, tuple)): 

4753 raise TypeError( 

4754 "Expected list for 'padded_shapes' argument to " 

4755 "'padded_batch_dataset' Op, not %r." % padded_shapes) 

4756 _attr_N = len(padded_shapes) 

4757 if not isinstance(output_shapes, (list, tuple)): 

4758 raise TypeError( 

4759 "Expected list for 'output_shapes' argument to " 

4760 "'padded_batch_dataset' Op, not %r." % output_shapes) 

4761 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4762 if metadata is None: 

4763 metadata = "" 

4764 metadata = _execute.make_str(metadata, "metadata") 

4765 _attr_Toutput_types, padding_values = _execute.convert_to_mixed_eager_tensors(padding_values, ctx) 

4766 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

4767 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64) 

4768 padded_shapes = _ops.convert_n_to_tensor(padded_shapes, _dtypes.int64) 

4769 _inputs_flat = [input_dataset, batch_size] + list(padded_shapes) + list(padding_values) 

4770 _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes", 

4771 output_shapes, "N", _attr_N, "metadata", metadata) 

4772 _result = _execute.execute(b"PaddedBatchDataset", 1, inputs=_inputs_flat, 

4773 attrs=_attrs, ctx=ctx, name=name) 

4774 if _execute.must_record_gradient(): 

4775 _execute.record_gradient( 

4776 "PaddedBatchDataset", _inputs_flat, _attrs, _result) 

4777 _result, = _result 

4778 return _result 

4779 

4780 

4781def padded_batch_dataset_v2(input_dataset, batch_size, padded_shapes, padding_values, drop_remainder, output_shapes, parallel_copy=False, metadata="", name=None): 

4782 r"""Creates a dataset that batches and pads `batch_size` elements from the input. 

4783 

4784 Args: 

4785 input_dataset: A `Tensor` of type `variant`. 

4786 batch_size: A `Tensor` of type `int64`. 

4787 A scalar representing the number of elements to accumulate in a 

4788 batch. 

4789 padded_shapes: A list of at least 1 `Tensor` objects with type `int64`. 

4790 A list of int64 tensors representing the desired padded shapes 

4791 of the corresponding output components. These shapes may be partially 

4792 specified, using `-1` to indicate that a particular dimension should be 

4793 padded to the maximum size of all batch elements. 

4794 padding_values: A list of `Tensor` objects. 

4795 A list of scalars containing the padding value to use for 

4796 each of the outputs. 

4797 drop_remainder: A `Tensor` of type `bool`. 

4798 A scalar representing whether the last batch should be dropped in case its size 

4799 is smaller than desired. 

4800 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4801 parallel_copy: An optional `bool`. Defaults to `False`. 

4802 metadata: An optional `string`. Defaults to `""`. 

4803 name: A name for the operation (optional). 

4804 

4805 Returns: 

4806 A `Tensor` of type `variant`. 

4807 """ 

4808 _ctx = _context._context or _context.context() 

4809 tld = _ctx._thread_local_data 

4810 if tld.is_eager: 

4811 try: 

4812 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4813 _ctx, "PaddedBatchDatasetV2", name, input_dataset, batch_size, 

4814 padded_shapes, padding_values, drop_remainder, "parallel_copy", 

4815 parallel_copy, "output_shapes", output_shapes, "metadata", metadata) 

4816 return _result 

4817 except _core._NotOkStatusException as e: 

4818 _ops.raise_from_not_ok_status(e, name) 

4819 except _core._FallbackException: 

4820 pass 

4821 try: 

4822 return padded_batch_dataset_v2_eager_fallback( 

4823 input_dataset, batch_size, padded_shapes, padding_values, 

4824 drop_remainder, parallel_copy=parallel_copy, 

4825 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

4826 except _core._SymbolicException: 

4827 pass # Add nodes to the TensorFlow graph. 

4828 # Add nodes to the TensorFlow graph. 

4829 if not isinstance(padded_shapes, (list, tuple)): 

4830 raise TypeError( 

4831 "Expected list for 'padded_shapes' argument to " 

4832 "'padded_batch_dataset_v2' Op, not %r." % padded_shapes) 

4833 _attr_N = len(padded_shapes) 

4834 if not isinstance(output_shapes, (list, tuple)): 

4835 raise TypeError( 

4836 "Expected list for 'output_shapes' argument to " 

4837 "'padded_batch_dataset_v2' Op, not %r." % output_shapes) 

4838 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4839 if parallel_copy is None: 

4840 parallel_copy = False 

4841 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

4842 if metadata is None: 

4843 metadata = "" 

4844 metadata = _execute.make_str(metadata, "metadata") 

4845 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4846 "PaddedBatchDatasetV2", input_dataset=input_dataset, 

4847 batch_size=batch_size, 

4848 padded_shapes=padded_shapes, 

4849 padding_values=padding_values, 

4850 drop_remainder=drop_remainder, 

4851 output_shapes=output_shapes, 

4852 parallel_copy=parallel_copy, 

4853 metadata=metadata, name=name) 

4854 _result = _outputs[:] 

4855 if _execute.must_record_gradient(): 

4856 _attrs = ("parallel_copy", _op._get_attr_bool("parallel_copy"), 

4857 "Toutput_types", _op.get_attr("Toutput_types"), "output_shapes", 

4858 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"), 

4859 "metadata", _op.get_attr("metadata")) 

4860 _inputs_flat = _op.inputs 

4861 _execute.record_gradient( 

4862 "PaddedBatchDatasetV2", _inputs_flat, _attrs, _result) 

4863 _result, = _result 

4864 return _result 

4865 

4866PaddedBatchDatasetV2 = tf_export("raw_ops.PaddedBatchDatasetV2")(_ops.to_raw_op(padded_batch_dataset_v2)) 

4867 

4868 

4869def padded_batch_dataset_v2_eager_fallback(input_dataset, batch_size, padded_shapes, padding_values, drop_remainder, output_shapes, parallel_copy, metadata, name, ctx): 

4870 if not isinstance(padded_shapes, (list, tuple)): 

4871 raise TypeError( 

4872 "Expected list for 'padded_shapes' argument to " 

4873 "'padded_batch_dataset_v2' Op, not %r." % padded_shapes) 

4874 _attr_N = len(padded_shapes) 

4875 if not isinstance(output_shapes, (list, tuple)): 

4876 raise TypeError( 

4877 "Expected list for 'output_shapes' argument to " 

4878 "'padded_batch_dataset_v2' Op, not %r." % output_shapes) 

4879 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4880 if parallel_copy is None: 

4881 parallel_copy = False 

4882 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

4883 if metadata is None: 

4884 metadata = "" 

4885 metadata = _execute.make_str(metadata, "metadata") 

4886 _attr_Toutput_types, padding_values = _execute.convert_to_mixed_eager_tensors(padding_values, ctx) 

4887 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

4888 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64) 

4889 padded_shapes = _ops.convert_n_to_tensor(padded_shapes, _dtypes.int64) 

4890 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool) 

4891 _inputs_flat = [input_dataset, batch_size] + list(padded_shapes) + list(padding_values) + [drop_remainder] 

4892 _attrs = ("parallel_copy", parallel_copy, "Toutput_types", 

4893 _attr_Toutput_types, "output_shapes", output_shapes, "N", _attr_N, 

4894 "metadata", metadata) 

4895 _result = _execute.execute(b"PaddedBatchDatasetV2", 1, inputs=_inputs_flat, 

4896 attrs=_attrs, ctx=ctx, name=name) 

4897 if _execute.must_record_gradient(): 

4898 _execute.record_gradient( 

4899 "PaddedBatchDatasetV2", _inputs_flat, _attrs, _result) 

4900 _result, = _result 

4901 return _result 

4902 

4903 

4904def parallel_batch_dataset(input_dataset, batch_size, num_parallel_calls, drop_remainder, output_types, output_shapes, parallel_copy=False, deterministic="default", metadata="", name=None): 

4905 r"""TODO: add doc. 

4906 

4907 Args: 

4908 input_dataset: A `Tensor` of type `variant`. 

4909 batch_size: A `Tensor` of type `int64`. 

4910 num_parallel_calls: A `Tensor` of type `int64`. 

4911 drop_remainder: A `Tensor` of type `bool`. 

4912 output_types: A list of `tf.DTypes` that has length `>= 1`. 

4913 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

4914 parallel_copy: An optional `bool`. Defaults to `False`. 

4915 deterministic: An optional `string`. Defaults to `"default"`. 

4916 metadata: An optional `string`. Defaults to `""`. 

4917 name: A name for the operation (optional). 

4918 

4919 Returns: 

4920 A `Tensor` of type `variant`. 

4921 """ 

4922 _ctx = _context._context or _context.context() 

4923 tld = _ctx._thread_local_data 

4924 if tld.is_eager: 

4925 try: 

4926 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

4927 _ctx, "ParallelBatchDataset", name, input_dataset, batch_size, 

4928 num_parallel_calls, drop_remainder, "parallel_copy", parallel_copy, 

4929 "output_types", output_types, "output_shapes", output_shapes, 

4930 "deterministic", deterministic, "metadata", metadata) 

4931 return _result 

4932 except _core._NotOkStatusException as e: 

4933 _ops.raise_from_not_ok_status(e, name) 

4934 except _core._FallbackException: 

4935 pass 

4936 try: 

4937 return parallel_batch_dataset_eager_fallback( 

4938 input_dataset, batch_size, num_parallel_calls, drop_remainder, 

4939 parallel_copy=parallel_copy, output_types=output_types, 

4940 output_shapes=output_shapes, deterministic=deterministic, 

4941 metadata=metadata, name=name, ctx=_ctx) 

4942 except _core._SymbolicException: 

4943 pass # Add nodes to the TensorFlow graph. 

4944 # Add nodes to the TensorFlow graph. 

4945 if not isinstance(output_types, (list, tuple)): 

4946 raise TypeError( 

4947 "Expected list for 'output_types' argument to " 

4948 "'parallel_batch_dataset' Op, not %r." % output_types) 

4949 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4950 if not isinstance(output_shapes, (list, tuple)): 

4951 raise TypeError( 

4952 "Expected list for 'output_shapes' argument to " 

4953 "'parallel_batch_dataset' Op, not %r." % output_shapes) 

4954 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

4955 if parallel_copy is None: 

4956 parallel_copy = False 

4957 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

4958 if deterministic is None: 

4959 deterministic = "default" 

4960 deterministic = _execute.make_str(deterministic, "deterministic") 

4961 if metadata is None: 

4962 metadata = "" 

4963 metadata = _execute.make_str(metadata, "metadata") 

4964 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

4965 "ParallelBatchDataset", input_dataset=input_dataset, 

4966 batch_size=batch_size, 

4967 num_parallel_calls=num_parallel_calls, 

4968 drop_remainder=drop_remainder, 

4969 output_types=output_types, 

4970 output_shapes=output_shapes, 

4971 parallel_copy=parallel_copy, 

4972 deterministic=deterministic, 

4973 metadata=metadata, name=name) 

4974 _result = _outputs[:] 

4975 if _execute.must_record_gradient(): 

4976 _attrs = ("parallel_copy", _op._get_attr_bool("parallel_copy"), 

4977 "output_types", _op.get_attr("output_types"), "output_shapes", 

4978 _op.get_attr("output_shapes"), "deterministic", 

4979 _op.get_attr("deterministic"), "metadata", 

4980 _op.get_attr("metadata")) 

4981 _inputs_flat = _op.inputs 

4982 _execute.record_gradient( 

4983 "ParallelBatchDataset", _inputs_flat, _attrs, _result) 

4984 _result, = _result 

4985 return _result 

4986 

4987ParallelBatchDataset = tf_export("raw_ops.ParallelBatchDataset")(_ops.to_raw_op(parallel_batch_dataset)) 

4988 

4989 

4990def parallel_batch_dataset_eager_fallback(input_dataset, batch_size, num_parallel_calls, drop_remainder, output_types, output_shapes, parallel_copy, deterministic, metadata, name, ctx): 

4991 if not isinstance(output_types, (list, tuple)): 

4992 raise TypeError( 

4993 "Expected list for 'output_types' argument to " 

4994 "'parallel_batch_dataset' Op, not %r." % output_types) 

4995 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

4996 if not isinstance(output_shapes, (list, tuple)): 

4997 raise TypeError( 

4998 "Expected list for 'output_shapes' argument to " 

4999 "'parallel_batch_dataset' Op, not %r." % output_shapes) 

5000 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5001 if parallel_copy is None: 

5002 parallel_copy = False 

5003 parallel_copy = _execute.make_bool(parallel_copy, "parallel_copy") 

5004 if deterministic is None: 

5005 deterministic = "default" 

5006 deterministic = _execute.make_str(deterministic, "deterministic") 

5007 if metadata is None: 

5008 metadata = "" 

5009 metadata = _execute.make_str(metadata, "metadata") 

5010 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5011 batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64) 

5012 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5013 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool) 

5014 _inputs_flat = [input_dataset, batch_size, num_parallel_calls, drop_remainder] 

5015 _attrs = ("parallel_copy", parallel_copy, "output_types", output_types, 

5016 "output_shapes", output_shapes, "deterministic", deterministic, "metadata", 

5017 metadata) 

5018 _result = _execute.execute(b"ParallelBatchDataset", 1, inputs=_inputs_flat, 

5019 attrs=_attrs, ctx=ctx, name=name) 

5020 if _execute.must_record_gradient(): 

5021 _execute.record_gradient( 

5022 "ParallelBatchDataset", _inputs_flat, _attrs, _result) 

5023 _result, = _result 

5024 return _result 

5025 

5026 

5027def parallel_filter_dataset(input_dataset, other_arguments, num_parallel_calls, predicate, output_types, output_shapes, deterministic="default", metadata="", name=None): 

5028 r"""Creates a dataset containing elements of `input_dataset` matching `predicate`. 

5029 

5030 The `predicate` function must return a scalar boolean and accept the 

5031 following arguments: 

5032 

5033 * One tensor for each component of an element of `input_dataset`. 

5034 * One tensor for each value in `other_arguments`. 

5035 

5036 Unlike a "FilterDataset", which applies `predicate` sequentially, this dataset 

5037 invokes up to `num_parallel_calls` copies of `predicate` in parallel. 

5038 

5039 Args: 

5040 input_dataset: A `Tensor` of type `variant`. 

5041 other_arguments: A list of `Tensor` objects. 

5042 A list of tensors, typically values that were captured when 

5043 building a closure for `predicate`. 

5044 num_parallel_calls: A `Tensor` of type `int64`. 

5045 The number of concurrent invocations of `predicate` that process 

5046 elements from `input_dataset` in parallel. 

5047 predicate: A function decorated with @Defun. 

5048 A function returning a scalar boolean. 

5049 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5050 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5051 deterministic: An optional `string`. Defaults to `"default"`. 

5052 A string indicating the op-level determinism to use. Deterministic controls 

5053 whether the interleave is allowed to return elements out of order if the next 

5054 element to be returned isn't available, but a later element is. Options are 

5055 "true", "false", and "default". "default" indicates that determinism should be 

5056 decided by the `experimental_deterministic` parameter of `tf.data.Options`. 

5057 metadata: An optional `string`. Defaults to `""`. 

5058 name: A name for the operation (optional). 

5059 

5060 Returns: 

5061 A `Tensor` of type `variant`. 

5062 """ 

5063 _ctx = _context._context or _context.context() 

5064 tld = _ctx._thread_local_data 

5065 if tld.is_eager: 

5066 try: 

5067 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5068 _ctx, "ParallelFilterDataset", name, input_dataset, other_arguments, 

5069 num_parallel_calls, "predicate", predicate, "deterministic", 

5070 deterministic, "output_types", output_types, "output_shapes", 

5071 output_shapes, "metadata", metadata) 

5072 return _result 

5073 except _core._NotOkStatusException as e: 

5074 _ops.raise_from_not_ok_status(e, name) 

5075 except _core._FallbackException: 

5076 pass 

5077 try: 

5078 return parallel_filter_dataset_eager_fallback( 

5079 input_dataset, other_arguments, num_parallel_calls, 

5080 predicate=predicate, deterministic=deterministic, 

5081 output_types=output_types, output_shapes=output_shapes, 

5082 metadata=metadata, name=name, ctx=_ctx) 

5083 except _core._SymbolicException: 

5084 pass # Add nodes to the TensorFlow graph. 

5085 # Add nodes to the TensorFlow graph. 

5086 if not isinstance(output_types, (list, tuple)): 

5087 raise TypeError( 

5088 "Expected list for 'output_types' argument to " 

5089 "'parallel_filter_dataset' Op, not %r." % output_types) 

5090 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5091 if not isinstance(output_shapes, (list, tuple)): 

5092 raise TypeError( 

5093 "Expected list for 'output_shapes' argument to " 

5094 "'parallel_filter_dataset' Op, not %r." % output_shapes) 

5095 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5096 if deterministic is None: 

5097 deterministic = "default" 

5098 deterministic = _execute.make_str(deterministic, "deterministic") 

5099 if metadata is None: 

5100 metadata = "" 

5101 metadata = _execute.make_str(metadata, "metadata") 

5102 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5103 "ParallelFilterDataset", input_dataset=input_dataset, 

5104 other_arguments=other_arguments, 

5105 num_parallel_calls=num_parallel_calls, 

5106 predicate=predicate, 

5107 output_types=output_types, 

5108 output_shapes=output_shapes, 

5109 deterministic=deterministic, 

5110 metadata=metadata, name=name) 

5111 _result = _outputs[:] 

5112 if _execute.must_record_gradient(): 

5113 _attrs = ("predicate", _op.get_attr("predicate"), "deterministic", 

5114 _op.get_attr("deterministic"), "Targuments", 

5115 _op.get_attr("Targuments"), "output_types", 

5116 _op.get_attr("output_types"), "output_shapes", 

5117 _op.get_attr("output_shapes"), "metadata", 

5118 _op.get_attr("metadata")) 

5119 _inputs_flat = _op.inputs 

5120 _execute.record_gradient( 

5121 "ParallelFilterDataset", _inputs_flat, _attrs, _result) 

5122 _result, = _result 

5123 return _result 

5124 

5125ParallelFilterDataset = tf_export("raw_ops.ParallelFilterDataset")(_ops.to_raw_op(parallel_filter_dataset)) 

5126 

5127 

5128def parallel_filter_dataset_eager_fallback(input_dataset, other_arguments, num_parallel_calls, predicate, output_types, output_shapes, deterministic, metadata, name, ctx): 

5129 if not isinstance(output_types, (list, tuple)): 

5130 raise TypeError( 

5131 "Expected list for 'output_types' argument to " 

5132 "'parallel_filter_dataset' Op, not %r." % output_types) 

5133 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5134 if not isinstance(output_shapes, (list, tuple)): 

5135 raise TypeError( 

5136 "Expected list for 'output_shapes' argument to " 

5137 "'parallel_filter_dataset' Op, not %r." % output_shapes) 

5138 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5139 if deterministic is None: 

5140 deterministic = "default" 

5141 deterministic = _execute.make_str(deterministic, "deterministic") 

5142 if metadata is None: 

5143 metadata = "" 

5144 metadata = _execute.make_str(metadata, "metadata") 

5145 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5146 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5147 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5148 _inputs_flat = [input_dataset] + list(other_arguments) + [num_parallel_calls] 

5149 _attrs = ("predicate", predicate, "deterministic", deterministic, 

5150 "Targuments", _attr_Targuments, "output_types", output_types, 

5151 "output_shapes", output_shapes, "metadata", metadata) 

5152 _result = _execute.execute(b"ParallelFilterDataset", 1, inputs=_inputs_flat, 

5153 attrs=_attrs, ctx=ctx, name=name) 

5154 if _execute.must_record_gradient(): 

5155 _execute.record_gradient( 

5156 "ParallelFilterDataset", _inputs_flat, _attrs, _result) 

5157 _result, = _result 

5158 return _result 

5159 

5160 

5161def parallel_interleave_dataset_v2(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, sloppy=False, metadata="", name=None): 

5162 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

5163 

5164 The resulting dataset is similar to the `InterleaveDataset`, except that the 

5165 dataset will fetch records from the interleaved datasets in parallel. 

5166 

5167 The `tf.data` Python API creates instances of this op from 

5168 `Dataset.interleave()` when the `num_parallel_calls` parameter of that method 

5169 is set to any value other than `None`. 

5170 

5171 By default, the output of this dataset will be deterministic, which may result 

5172 in the dataset blocking if the next data item to be returned isn't available. 

5173 In order to avoid head-of-line blocking, one can set the 

5174 `experimental_deterministic` parameter of `tf.data.Options` to `False`, 

5175 which can improve performance at the expense of non-determinism. 

5176 

5177 Args: 

5178 input_dataset: A `Tensor` of type `variant`. 

5179 Dataset that produces a stream of arguments for the function `f`. 

5180 other_arguments: A list of `Tensor` objects. 

5181 Additional arguments to pass to `f` beyond those produced by `input_dataset`. 

5182 Evaluated once when the dataset is instantiated. 

5183 cycle_length: A `Tensor` of type `int64`. 

5184 Number of datasets (each created by applying `f` to the elements of 

5185 `input_dataset`) among which the `ParallelInterleaveDatasetV2` will cycle in a 

5186 round-robin fashion. 

5187 block_length: A `Tensor` of type `int64`. 

5188 Number of elements at a time to produce from each interleaved invocation of a 

5189 dataset returned by `f`. 

5190 num_parallel_calls: A `Tensor` of type `int64`. 

5191 Determines the number of threads that should be used for fetching data from 

5192 input datasets in parallel. The Python API `tf.data.experimental.AUTOTUNE` 

5193 constant can be used to indicate that the level of parallelism should be autotuned. 

5194 f: A function decorated with @Defun. 

5195 A function mapping elements of `input_dataset`, concatenated with 

5196 `other_arguments`, to a Dataset variant that contains elements matching 

5197 `output_types` and `output_shapes`. 

5198 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5199 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5200 sloppy: An optional `bool`. Defaults to `False`. 

5201 metadata: An optional `string`. Defaults to `""`. 

5202 name: A name for the operation (optional). 

5203 

5204 Returns: 

5205 A `Tensor` of type `variant`. 

5206 """ 

5207 _ctx = _context._context or _context.context() 

5208 tld = _ctx._thread_local_data 

5209 if tld.is_eager: 

5210 try: 

5211 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5212 _ctx, "ParallelInterleaveDatasetV2", name, input_dataset, 

5213 other_arguments, cycle_length, block_length, num_parallel_calls, "f", 

5214 f, "output_types", output_types, "output_shapes", output_shapes, 

5215 "sloppy", sloppy, "metadata", metadata) 

5216 return _result 

5217 except _core._NotOkStatusException as e: 

5218 _ops.raise_from_not_ok_status(e, name) 

5219 except _core._FallbackException: 

5220 pass 

5221 try: 

5222 return parallel_interleave_dataset_v2_eager_fallback( 

5223 input_dataset, other_arguments, cycle_length, block_length, 

5224 num_parallel_calls, f=f, output_types=output_types, 

5225 output_shapes=output_shapes, sloppy=sloppy, metadata=metadata, 

5226 name=name, ctx=_ctx) 

5227 except _core._SymbolicException: 

5228 pass # Add nodes to the TensorFlow graph. 

5229 # Add nodes to the TensorFlow graph. 

5230 if not isinstance(output_types, (list, tuple)): 

5231 raise TypeError( 

5232 "Expected list for 'output_types' argument to " 

5233 "'parallel_interleave_dataset_v2' Op, not %r." % output_types) 

5234 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5235 if not isinstance(output_shapes, (list, tuple)): 

5236 raise TypeError( 

5237 "Expected list for 'output_shapes' argument to " 

5238 "'parallel_interleave_dataset_v2' Op, not %r." % output_shapes) 

5239 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5240 if sloppy is None: 

5241 sloppy = False 

5242 sloppy = _execute.make_bool(sloppy, "sloppy") 

5243 if metadata is None: 

5244 metadata = "" 

5245 metadata = _execute.make_str(metadata, "metadata") 

5246 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5247 "ParallelInterleaveDatasetV2", input_dataset=input_dataset, 

5248 other_arguments=other_arguments, 

5249 cycle_length=cycle_length, 

5250 block_length=block_length, 

5251 num_parallel_calls=num_parallel_calls, 

5252 f=f, output_types=output_types, 

5253 output_shapes=output_shapes, 

5254 sloppy=sloppy, metadata=metadata, 

5255 name=name) 

5256 _result = _outputs[:] 

5257 if _execute.must_record_gradient(): 

5258 _attrs = ("f", _op.get_attr("f"), "Targuments", 

5259 _op.get_attr("Targuments"), "output_types", 

5260 _op.get_attr("output_types"), "output_shapes", 

5261 _op.get_attr("output_shapes"), "sloppy", 

5262 _op._get_attr_bool("sloppy"), "metadata", 

5263 _op.get_attr("metadata")) 

5264 _inputs_flat = _op.inputs 

5265 _execute.record_gradient( 

5266 "ParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result) 

5267 _result, = _result 

5268 return _result 

5269 

5270ParallelInterleaveDatasetV2 = tf_export("raw_ops.ParallelInterleaveDatasetV2")(_ops.to_raw_op(parallel_interleave_dataset_v2)) 

5271 

5272 

5273def parallel_interleave_dataset_v2_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, sloppy, metadata, name, ctx): 

5274 if not isinstance(output_types, (list, tuple)): 

5275 raise TypeError( 

5276 "Expected list for 'output_types' argument to " 

5277 "'parallel_interleave_dataset_v2' Op, not %r." % output_types) 

5278 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5279 if not isinstance(output_shapes, (list, tuple)): 

5280 raise TypeError( 

5281 "Expected list for 'output_shapes' argument to " 

5282 "'parallel_interleave_dataset_v2' Op, not %r." % output_shapes) 

5283 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5284 if sloppy is None: 

5285 sloppy = False 

5286 sloppy = _execute.make_bool(sloppy, "sloppy") 

5287 if metadata is None: 

5288 metadata = "" 

5289 metadata = _execute.make_str(metadata, "metadata") 

5290 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5291 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5292 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64) 

5293 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64) 

5294 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5295 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, num_parallel_calls] 

5296 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

5297 output_types, "output_shapes", output_shapes, "sloppy", sloppy, "metadata", 

5298 metadata) 

5299 _result = _execute.execute(b"ParallelInterleaveDatasetV2", 1, 

5300 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

5301 name=name) 

5302 if _execute.must_record_gradient(): 

5303 _execute.record_gradient( 

5304 "ParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result) 

5305 _result, = _result 

5306 return _result 

5307 

5308 

5309def parallel_interleave_dataset_v3(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, deterministic="default", metadata="", name=None): 

5310 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

5311 

5312 The resulting dataset is similar to the `InterleaveDataset`, except that the 

5313 dataset will fetch records from the interleaved datasets in parallel. 

5314 

5315 The `tf.data` Python API creates instances of this op from 

5316 `Dataset.interleave()` when the `num_parallel_calls` parameter of that method 

5317 is set to any value other than `None`. 

5318 

5319 By default, the output of this dataset will be deterministic, which may result 

5320 in the dataset blocking if the next data item to be returned isn't available. 

5321 In order to avoid head-of-line blocking, one can either set the `deterministic` 

5322 attribute to "false", or leave it as "default" and set the 

5323 `experimental_deterministic` parameter of `tf.data.Options` to `False`. 

5324 This can improve performance at the expense of non-determinism. 

5325 

5326 Args: 

5327 input_dataset: A `Tensor` of type `variant`. 

5328 Dataset that produces a stream of arguments for the function `f`. 

5329 other_arguments: A list of `Tensor` objects. 

5330 Additional arguments to pass to `f` beyond those produced by `input_dataset`. 

5331 Evaluated once when the dataset is instantiated. 

5332 cycle_length: A `Tensor` of type `int64`. 

5333 Number of datasets (each created by applying `f` to the elements of 

5334 `input_dataset`) among which the `ParallelInterleaveDatasetV2` will cycle in a 

5335 round-robin fashion. 

5336 block_length: A `Tensor` of type `int64`. 

5337 Number of elements at a time to produce from each interleaved invocation of a 

5338 dataset returned by `f`. 

5339 num_parallel_calls: A `Tensor` of type `int64`. 

5340 Determines the number of threads that should be used for fetching data from 

5341 input datasets in parallel. The Python API `tf.data.experimental.AUTOTUNE` 

5342 constant can be used to indicate that the level of parallelism should be autotuned. 

5343 f: A function decorated with @Defun. 

5344 A function mapping elements of `input_dataset`, concatenated with 

5345 `other_arguments`, to a Dataset variant that contains elements matching 

5346 `output_types` and `output_shapes`. 

5347 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5348 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5349 deterministic: An optional `string`. Defaults to `"default"`. 

5350 A string indicating the op-level determinism to use. Deterministic controls 

5351 whether the interleave is allowed to return elements out of order if the next 

5352 element to be returned isn't available, but a later element is. Options are 

5353 "true", "false", and "default". "default" indicates that determinism should be 

5354 decided by the `experimental_deterministic` parameter of `tf.data.Options`. 

5355 metadata: An optional `string`. Defaults to `""`. 

5356 name: A name for the operation (optional). 

5357 

5358 Returns: 

5359 A `Tensor` of type `variant`. 

5360 """ 

5361 _ctx = _context._context or _context.context() 

5362 tld = _ctx._thread_local_data 

5363 if tld.is_eager: 

5364 try: 

5365 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5366 _ctx, "ParallelInterleaveDatasetV3", name, input_dataset, 

5367 other_arguments, cycle_length, block_length, num_parallel_calls, "f", 

5368 f, "deterministic", deterministic, "output_types", output_types, 

5369 "output_shapes", output_shapes, "metadata", metadata) 

5370 return _result 

5371 except _core._NotOkStatusException as e: 

5372 _ops.raise_from_not_ok_status(e, name) 

5373 except _core._FallbackException: 

5374 pass 

5375 try: 

5376 return parallel_interleave_dataset_v3_eager_fallback( 

5377 input_dataset, other_arguments, cycle_length, block_length, 

5378 num_parallel_calls, f=f, deterministic=deterministic, 

5379 output_types=output_types, output_shapes=output_shapes, 

5380 metadata=metadata, name=name, ctx=_ctx) 

5381 except _core._SymbolicException: 

5382 pass # Add nodes to the TensorFlow graph. 

5383 # Add nodes to the TensorFlow graph. 

5384 if not isinstance(output_types, (list, tuple)): 

5385 raise TypeError( 

5386 "Expected list for 'output_types' argument to " 

5387 "'parallel_interleave_dataset_v3' Op, not %r." % output_types) 

5388 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5389 if not isinstance(output_shapes, (list, tuple)): 

5390 raise TypeError( 

5391 "Expected list for 'output_shapes' argument to " 

5392 "'parallel_interleave_dataset_v3' Op, not %r." % output_shapes) 

5393 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5394 if deterministic is None: 

5395 deterministic = "default" 

5396 deterministic = _execute.make_str(deterministic, "deterministic") 

5397 if metadata is None: 

5398 metadata = "" 

5399 metadata = _execute.make_str(metadata, "metadata") 

5400 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5401 "ParallelInterleaveDatasetV3", input_dataset=input_dataset, 

5402 other_arguments=other_arguments, 

5403 cycle_length=cycle_length, 

5404 block_length=block_length, 

5405 num_parallel_calls=num_parallel_calls, 

5406 f=f, output_types=output_types, 

5407 output_shapes=output_shapes, 

5408 deterministic=deterministic, 

5409 metadata=metadata, name=name) 

5410 _result = _outputs[:] 

5411 if _execute.must_record_gradient(): 

5412 _attrs = ("f", _op.get_attr("f"), "deterministic", 

5413 _op.get_attr("deterministic"), "Targuments", 

5414 _op.get_attr("Targuments"), "output_types", 

5415 _op.get_attr("output_types"), "output_shapes", 

5416 _op.get_attr("output_shapes"), "metadata", 

5417 _op.get_attr("metadata")) 

5418 _inputs_flat = _op.inputs 

5419 _execute.record_gradient( 

5420 "ParallelInterleaveDatasetV3", _inputs_flat, _attrs, _result) 

5421 _result, = _result 

5422 return _result 

5423 

5424ParallelInterleaveDatasetV3 = tf_export("raw_ops.ParallelInterleaveDatasetV3")(_ops.to_raw_op(parallel_interleave_dataset_v3)) 

5425 

5426 

5427def parallel_interleave_dataset_v3_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, deterministic, metadata, name, ctx): 

5428 if not isinstance(output_types, (list, tuple)): 

5429 raise TypeError( 

5430 "Expected list for 'output_types' argument to " 

5431 "'parallel_interleave_dataset_v3' Op, not %r." % output_types) 

5432 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5433 if not isinstance(output_shapes, (list, tuple)): 

5434 raise TypeError( 

5435 "Expected list for 'output_shapes' argument to " 

5436 "'parallel_interleave_dataset_v3' Op, not %r." % output_shapes) 

5437 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5438 if deterministic is None: 

5439 deterministic = "default" 

5440 deterministic = _execute.make_str(deterministic, "deterministic") 

5441 if metadata is None: 

5442 metadata = "" 

5443 metadata = _execute.make_str(metadata, "metadata") 

5444 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5445 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5446 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64) 

5447 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64) 

5448 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5449 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, num_parallel_calls] 

5450 _attrs = ("f", f, "deterministic", deterministic, "Targuments", 

5451 _attr_Targuments, "output_types", output_types, "output_shapes", 

5452 output_shapes, "metadata", metadata) 

5453 _result = _execute.execute(b"ParallelInterleaveDatasetV3", 1, 

5454 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

5455 name=name) 

5456 if _execute.must_record_gradient(): 

5457 _execute.record_gradient( 

5458 "ParallelInterleaveDatasetV3", _inputs_flat, _attrs, _result) 

5459 _result, = _result 

5460 return _result 

5461 

5462 

5463def parallel_interleave_dataset_v4(input_dataset, other_arguments, cycle_length, block_length, buffer_output_elements, prefetch_input_elements, num_parallel_calls, f, output_types, output_shapes, deterministic="default", metadata="", name=None): 

5464 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

5465 

5466 The resulting dataset is similar to the `InterleaveDataset`, except that the 

5467 dataset will fetch records from the interleaved datasets in parallel. 

5468 

5469 The `tf.data` Python API creates instances of this op from 

5470 `Dataset.interleave()` when the `num_parallel_calls` parameter of that method 

5471 is set to any value other than `None`. 

5472 

5473 By default, the output of this dataset will be deterministic, which may result 

5474 in the dataset blocking if the next data item to be returned isn't available. 

5475 In order to avoid head-of-line blocking, one can either set the `deterministic` 

5476 attribute to "false", or leave it as "default" and set the 

5477 `experimental_deterministic` parameter of `tf.data.Options` to `False`. 

5478 This can improve performance at the expense of non-determinism. 

5479 

5480 Args: 

5481 input_dataset: A `Tensor` of type `variant`. 

5482 Dataset that produces a stream of arguments for the function `f`. 

5483 other_arguments: A list of `Tensor` objects. 

5484 Additional arguments to pass to `f` beyond those produced by `input_dataset`. 

5485 Evaluated once when the dataset is instantiated. 

5486 cycle_length: A `Tensor` of type `int64`. 

5487 Number of datasets (each created by applying `f` to the elements of 

5488 `input_dataset`) among which the `ParallelInterleaveDatasetV2` will cycle in a 

5489 round-robin fashion. 

5490 block_length: A `Tensor` of type `int64`. 

5491 Number of elements at a time to produce from each interleaved invocation of a 

5492 dataset returned by `f`. 

5493 buffer_output_elements: A `Tensor` of type `int64`. 

5494 The number of elements each iterator being interleaved should buffer (similar 

5495 to the `.prefetch()` transformation for each interleaved iterator). 

5496 prefetch_input_elements: A `Tensor` of type `int64`. 

5497 Determines the number of iterators to prefetch, allowing buffers to warm up and 

5498 data to be pre-fetched without blocking the main thread. 

5499 num_parallel_calls: A `Tensor` of type `int64`. 

5500 Determines the number of threads that should be used for fetching data from 

5501 input datasets in parallel. The Python API `tf.data.experimental.AUTOTUNE` 

5502 constant can be used to indicate that the level of parallelism should be autotuned. 

5503 f: A function decorated with @Defun. 

5504 A function mapping elements of `input_dataset`, concatenated with 

5505 `other_arguments`, to a Dataset variant that contains elements matching 

5506 `output_types` and `output_shapes`. 

5507 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5508 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5509 deterministic: An optional `string`. Defaults to `"default"`. 

5510 A string indicating the op-level determinism to use. Deterministic controls 

5511 whether the interleave is allowed to return elements out of order if the next 

5512 element to be returned isn't available, but a later element is. Options are 

5513 "true", "false", and "default". "default" indicates that determinism should be 

5514 decided by the `experimental_deterministic` parameter of `tf.data.Options`. 

5515 metadata: An optional `string`. Defaults to `""`. 

5516 name: A name for the operation (optional). 

5517 

5518 Returns: 

5519 A `Tensor` of type `variant`. 

5520 """ 

5521 _ctx = _context._context or _context.context() 

5522 tld = _ctx._thread_local_data 

5523 if tld.is_eager: 

5524 try: 

5525 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5526 _ctx, "ParallelInterleaveDatasetV4", name, input_dataset, 

5527 other_arguments, cycle_length, block_length, buffer_output_elements, 

5528 prefetch_input_elements, num_parallel_calls, "f", f, "deterministic", 

5529 deterministic, "output_types", output_types, "output_shapes", 

5530 output_shapes, "metadata", metadata) 

5531 return _result 

5532 except _core._NotOkStatusException as e: 

5533 _ops.raise_from_not_ok_status(e, name) 

5534 except _core._FallbackException: 

5535 pass 

5536 try: 

5537 return parallel_interleave_dataset_v4_eager_fallback( 

5538 input_dataset, other_arguments, cycle_length, block_length, 

5539 buffer_output_elements, prefetch_input_elements, num_parallel_calls, 

5540 f=f, deterministic=deterministic, output_types=output_types, 

5541 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

5542 except _core._SymbolicException: 

5543 pass # Add nodes to the TensorFlow graph. 

5544 # Add nodes to the TensorFlow graph. 

5545 if not isinstance(output_types, (list, tuple)): 

5546 raise TypeError( 

5547 "Expected list for 'output_types' argument to " 

5548 "'parallel_interleave_dataset_v4' Op, not %r." % output_types) 

5549 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5550 if not isinstance(output_shapes, (list, tuple)): 

5551 raise TypeError( 

5552 "Expected list for 'output_shapes' argument to " 

5553 "'parallel_interleave_dataset_v4' Op, not %r." % output_shapes) 

5554 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5555 if deterministic is None: 

5556 deterministic = "default" 

5557 deterministic = _execute.make_str(deterministic, "deterministic") 

5558 if metadata is None: 

5559 metadata = "" 

5560 metadata = _execute.make_str(metadata, "metadata") 

5561 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5562 "ParallelInterleaveDatasetV4", input_dataset=input_dataset, 

5563 other_arguments=other_arguments, 

5564 cycle_length=cycle_length, 

5565 block_length=block_length, 

5566 buffer_output_elements=buffer_output_elements, 

5567 prefetch_input_elements=prefetch_input_elements, 

5568 num_parallel_calls=num_parallel_calls, 

5569 f=f, output_types=output_types, 

5570 output_shapes=output_shapes, 

5571 deterministic=deterministic, 

5572 metadata=metadata, name=name) 

5573 _result = _outputs[:] 

5574 if _execute.must_record_gradient(): 

5575 _attrs = ("f", _op.get_attr("f"), "deterministic", 

5576 _op.get_attr("deterministic"), "Targuments", 

5577 _op.get_attr("Targuments"), "output_types", 

5578 _op.get_attr("output_types"), "output_shapes", 

5579 _op.get_attr("output_shapes"), "metadata", 

5580 _op.get_attr("metadata")) 

5581 _inputs_flat = _op.inputs 

5582 _execute.record_gradient( 

5583 "ParallelInterleaveDatasetV4", _inputs_flat, _attrs, _result) 

5584 _result, = _result 

5585 return _result 

5586 

5587ParallelInterleaveDatasetV4 = tf_export("raw_ops.ParallelInterleaveDatasetV4")(_ops.to_raw_op(parallel_interleave_dataset_v4)) 

5588 

5589 

5590def parallel_interleave_dataset_v4_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, buffer_output_elements, prefetch_input_elements, num_parallel_calls, f, output_types, output_shapes, deterministic, metadata, name, ctx): 

5591 if not isinstance(output_types, (list, tuple)): 

5592 raise TypeError( 

5593 "Expected list for 'output_types' argument to " 

5594 "'parallel_interleave_dataset_v4' Op, not %r." % output_types) 

5595 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5596 if not isinstance(output_shapes, (list, tuple)): 

5597 raise TypeError( 

5598 "Expected list for 'output_shapes' argument to " 

5599 "'parallel_interleave_dataset_v4' Op, not %r." % output_shapes) 

5600 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5601 if deterministic is None: 

5602 deterministic = "default" 

5603 deterministic = _execute.make_str(deterministic, "deterministic") 

5604 if metadata is None: 

5605 metadata = "" 

5606 metadata = _execute.make_str(metadata, "metadata") 

5607 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5608 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5609 cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64) 

5610 block_length = _ops.convert_to_tensor(block_length, _dtypes.int64) 

5611 buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64) 

5612 prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64) 

5613 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5614 _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, buffer_output_elements, prefetch_input_elements, num_parallel_calls] 

5615 _attrs = ("f", f, "deterministic", deterministic, "Targuments", 

5616 _attr_Targuments, "output_types", output_types, "output_shapes", 

5617 output_shapes, "metadata", metadata) 

5618 _result = _execute.execute(b"ParallelInterleaveDatasetV4", 1, 

5619 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

5620 name=name) 

5621 if _execute.must_record_gradient(): 

5622 _execute.record_gradient( 

5623 "ParallelInterleaveDatasetV4", _inputs_flat, _attrs, _result) 

5624 _result, = _result 

5625 return _result 

5626 

5627 

5628def parallel_map_dataset(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism=True, sloppy=False, preserve_cardinality=False, metadata="", name=None): 

5629 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

5630 

5631 Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up 

5632 to `num_parallel_calls` copies of `f` in parallel. 

5633 

5634 Args: 

5635 input_dataset: A `Tensor` of type `variant`. 

5636 other_arguments: A list of `Tensor` objects. 

5637 num_parallel_calls: A `Tensor` of type `int32`. 

5638 The number of concurrent invocations of `f` that process 

5639 elements from `input_dataset` in parallel. 

5640 f: A function decorated with @Defun. 

5641 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5642 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5643 use_inter_op_parallelism: An optional `bool`. Defaults to `True`. 

5644 sloppy: An optional `bool`. Defaults to `False`. 

5645 preserve_cardinality: An optional `bool`. Defaults to `False`. 

5646 metadata: An optional `string`. Defaults to `""`. 

5647 name: A name for the operation (optional). 

5648 

5649 Returns: 

5650 A `Tensor` of type `variant`. 

5651 """ 

5652 _ctx = _context._context or _context.context() 

5653 tld = _ctx._thread_local_data 

5654 if tld.is_eager: 

5655 try: 

5656 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5657 _ctx, "ParallelMapDataset", name, input_dataset, other_arguments, 

5658 num_parallel_calls, "f", f, "output_types", output_types, 

5659 "output_shapes", output_shapes, "use_inter_op_parallelism", 

5660 use_inter_op_parallelism, "sloppy", sloppy, "preserve_cardinality", 

5661 preserve_cardinality, "metadata", metadata) 

5662 return _result 

5663 except _core._NotOkStatusException as e: 

5664 _ops.raise_from_not_ok_status(e, name) 

5665 except _core._FallbackException: 

5666 pass 

5667 try: 

5668 return parallel_map_dataset_eager_fallback( 

5669 input_dataset, other_arguments, num_parallel_calls, f=f, 

5670 output_types=output_types, output_shapes=output_shapes, 

5671 use_inter_op_parallelism=use_inter_op_parallelism, sloppy=sloppy, 

5672 preserve_cardinality=preserve_cardinality, metadata=metadata, 

5673 name=name, ctx=_ctx) 

5674 except _core._SymbolicException: 

5675 pass # Add nodes to the TensorFlow graph. 

5676 # Add nodes to the TensorFlow graph. 

5677 if not isinstance(output_types, (list, tuple)): 

5678 raise TypeError( 

5679 "Expected list for 'output_types' argument to " 

5680 "'parallel_map_dataset' Op, not %r." % output_types) 

5681 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5682 if not isinstance(output_shapes, (list, tuple)): 

5683 raise TypeError( 

5684 "Expected list for 'output_shapes' argument to " 

5685 "'parallel_map_dataset' Op, not %r." % output_shapes) 

5686 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5687 if use_inter_op_parallelism is None: 

5688 use_inter_op_parallelism = True 

5689 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

5690 if sloppy is None: 

5691 sloppy = False 

5692 sloppy = _execute.make_bool(sloppy, "sloppy") 

5693 if preserve_cardinality is None: 

5694 preserve_cardinality = False 

5695 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

5696 if metadata is None: 

5697 metadata = "" 

5698 metadata = _execute.make_str(metadata, "metadata") 

5699 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5700 "ParallelMapDataset", input_dataset=input_dataset, 

5701 other_arguments=other_arguments, 

5702 num_parallel_calls=num_parallel_calls, f=f, 

5703 output_types=output_types, 

5704 output_shapes=output_shapes, 

5705 use_inter_op_parallelism=use_inter_op_parallelism, 

5706 sloppy=sloppy, 

5707 preserve_cardinality=preserve_cardinality, 

5708 metadata=metadata, name=name) 

5709 _result = _outputs[:] 

5710 if _execute.must_record_gradient(): 

5711 _attrs = ("f", _op.get_attr("f"), "Targuments", 

5712 _op.get_attr("Targuments"), "output_types", 

5713 _op.get_attr("output_types"), "output_shapes", 

5714 _op.get_attr("output_shapes"), "use_inter_op_parallelism", 

5715 _op._get_attr_bool("use_inter_op_parallelism"), "sloppy", 

5716 _op._get_attr_bool("sloppy"), "preserve_cardinality", 

5717 _op._get_attr_bool("preserve_cardinality"), "metadata", 

5718 _op.get_attr("metadata")) 

5719 _inputs_flat = _op.inputs 

5720 _execute.record_gradient( 

5721 "ParallelMapDataset", _inputs_flat, _attrs, _result) 

5722 _result, = _result 

5723 return _result 

5724 

5725ParallelMapDataset = tf_export("raw_ops.ParallelMapDataset")(_ops.to_raw_op(parallel_map_dataset)) 

5726 

5727 

5728def parallel_map_dataset_eager_fallback(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism, sloppy, preserve_cardinality, metadata, name, ctx): 

5729 if not isinstance(output_types, (list, tuple)): 

5730 raise TypeError( 

5731 "Expected list for 'output_types' argument to " 

5732 "'parallel_map_dataset' Op, not %r." % output_types) 

5733 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5734 if not isinstance(output_shapes, (list, tuple)): 

5735 raise TypeError( 

5736 "Expected list for 'output_shapes' argument to " 

5737 "'parallel_map_dataset' Op, not %r." % output_shapes) 

5738 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5739 if use_inter_op_parallelism is None: 

5740 use_inter_op_parallelism = True 

5741 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

5742 if sloppy is None: 

5743 sloppy = False 

5744 sloppy = _execute.make_bool(sloppy, "sloppy") 

5745 if preserve_cardinality is None: 

5746 preserve_cardinality = False 

5747 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

5748 if metadata is None: 

5749 metadata = "" 

5750 metadata = _execute.make_str(metadata, "metadata") 

5751 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5752 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5753 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int32) 

5754 _inputs_flat = [input_dataset] + list(other_arguments) + [num_parallel_calls] 

5755 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

5756 output_types, "output_shapes", output_shapes, "use_inter_op_parallelism", 

5757 use_inter_op_parallelism, "sloppy", sloppy, "preserve_cardinality", 

5758 preserve_cardinality, "metadata", metadata) 

5759 _result = _execute.execute(b"ParallelMapDataset", 1, inputs=_inputs_flat, 

5760 attrs=_attrs, ctx=ctx, name=name) 

5761 if _execute.must_record_gradient(): 

5762 _execute.record_gradient( 

5763 "ParallelMapDataset", _inputs_flat, _attrs, _result) 

5764 _result, = _result 

5765 return _result 

5766 

5767 

5768def parallel_map_dataset_v2(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism=True, deterministic="default", preserve_cardinality=False, metadata="", name=None): 

5769 r"""Creates a dataset that applies `f` to the outputs of `input_dataset`. 

5770 

5771 Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up 

5772 to `num_parallel_calls` copies of `f` in parallel. 

5773 

5774 Args: 

5775 input_dataset: A `Tensor` of type `variant`. 

5776 other_arguments: A list of `Tensor` objects. 

5777 num_parallel_calls: A `Tensor` of type `int64`. 

5778 The number of concurrent invocations of `f` that process 

5779 elements from `input_dataset` in parallel. 

5780 f: A function decorated with @Defun. 

5781 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5782 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5783 use_inter_op_parallelism: An optional `bool`. Defaults to `True`. 

5784 deterministic: An optional `string`. Defaults to `"default"`. 

5785 preserve_cardinality: An optional `bool`. Defaults to `False`. 

5786 metadata: An optional `string`. Defaults to `""`. 

5787 name: A name for the operation (optional). 

5788 

5789 Returns: 

5790 A `Tensor` of type `variant`. 

5791 """ 

5792 _ctx = _context._context or _context.context() 

5793 tld = _ctx._thread_local_data 

5794 if tld.is_eager: 

5795 try: 

5796 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5797 _ctx, "ParallelMapDatasetV2", name, input_dataset, other_arguments, 

5798 num_parallel_calls, "f", f, "output_types", output_types, 

5799 "output_shapes", output_shapes, "use_inter_op_parallelism", 

5800 use_inter_op_parallelism, "deterministic", deterministic, 

5801 "preserve_cardinality", preserve_cardinality, "metadata", metadata) 

5802 return _result 

5803 except _core._NotOkStatusException as e: 

5804 _ops.raise_from_not_ok_status(e, name) 

5805 except _core._FallbackException: 

5806 pass 

5807 try: 

5808 return parallel_map_dataset_v2_eager_fallback( 

5809 input_dataset, other_arguments, num_parallel_calls, f=f, 

5810 output_types=output_types, output_shapes=output_shapes, 

5811 use_inter_op_parallelism=use_inter_op_parallelism, 

5812 deterministic=deterministic, 

5813 preserve_cardinality=preserve_cardinality, metadata=metadata, 

5814 name=name, ctx=_ctx) 

5815 except _core._SymbolicException: 

5816 pass # Add nodes to the TensorFlow graph. 

5817 # Add nodes to the TensorFlow graph. 

5818 if not isinstance(output_types, (list, tuple)): 

5819 raise TypeError( 

5820 "Expected list for 'output_types' argument to " 

5821 "'parallel_map_dataset_v2' Op, not %r." % output_types) 

5822 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5823 if not isinstance(output_shapes, (list, tuple)): 

5824 raise TypeError( 

5825 "Expected list for 'output_shapes' argument to " 

5826 "'parallel_map_dataset_v2' Op, not %r." % output_shapes) 

5827 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5828 if use_inter_op_parallelism is None: 

5829 use_inter_op_parallelism = True 

5830 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

5831 if deterministic is None: 

5832 deterministic = "default" 

5833 deterministic = _execute.make_str(deterministic, "deterministic") 

5834 if preserve_cardinality is None: 

5835 preserve_cardinality = False 

5836 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

5837 if metadata is None: 

5838 metadata = "" 

5839 metadata = _execute.make_str(metadata, "metadata") 

5840 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5841 "ParallelMapDatasetV2", input_dataset=input_dataset, 

5842 other_arguments=other_arguments, 

5843 num_parallel_calls=num_parallel_calls, f=f, 

5844 output_types=output_types, 

5845 output_shapes=output_shapes, 

5846 use_inter_op_parallelism=use_inter_op_parallelism, 

5847 deterministic=deterministic, 

5848 preserve_cardinality=preserve_cardinality, 

5849 metadata=metadata, name=name) 

5850 _result = _outputs[:] 

5851 if _execute.must_record_gradient(): 

5852 _attrs = ("f", _op.get_attr("f"), "Targuments", 

5853 _op.get_attr("Targuments"), "output_types", 

5854 _op.get_attr("output_types"), "output_shapes", 

5855 _op.get_attr("output_shapes"), "use_inter_op_parallelism", 

5856 _op._get_attr_bool("use_inter_op_parallelism"), "deterministic", 

5857 _op.get_attr("deterministic"), "preserve_cardinality", 

5858 _op._get_attr_bool("preserve_cardinality"), "metadata", 

5859 _op.get_attr("metadata")) 

5860 _inputs_flat = _op.inputs 

5861 _execute.record_gradient( 

5862 "ParallelMapDatasetV2", _inputs_flat, _attrs, _result) 

5863 _result, = _result 

5864 return _result 

5865 

5866ParallelMapDatasetV2 = tf_export("raw_ops.ParallelMapDatasetV2")(_ops.to_raw_op(parallel_map_dataset_v2)) 

5867 

5868 

5869def parallel_map_dataset_v2_eager_fallback(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism, deterministic, preserve_cardinality, metadata, name, ctx): 

5870 if not isinstance(output_types, (list, tuple)): 

5871 raise TypeError( 

5872 "Expected list for 'output_types' argument to " 

5873 "'parallel_map_dataset_v2' Op, not %r." % output_types) 

5874 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5875 if not isinstance(output_shapes, (list, tuple)): 

5876 raise TypeError( 

5877 "Expected list for 'output_shapes' argument to " 

5878 "'parallel_map_dataset_v2' Op, not %r." % output_shapes) 

5879 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5880 if use_inter_op_parallelism is None: 

5881 use_inter_op_parallelism = True 

5882 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

5883 if deterministic is None: 

5884 deterministic = "default" 

5885 deterministic = _execute.make_str(deterministic, "deterministic") 

5886 if preserve_cardinality is None: 

5887 preserve_cardinality = False 

5888 preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality") 

5889 if metadata is None: 

5890 metadata = "" 

5891 metadata = _execute.make_str(metadata, "metadata") 

5892 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

5893 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

5894 num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64) 

5895 _inputs_flat = [input_dataset] + list(other_arguments) + [num_parallel_calls] 

5896 _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types", 

5897 output_types, "output_shapes", output_shapes, "use_inter_op_parallelism", 

5898 use_inter_op_parallelism, "deterministic", deterministic, 

5899 "preserve_cardinality", preserve_cardinality, "metadata", metadata) 

5900 _result = _execute.execute(b"ParallelMapDatasetV2", 1, inputs=_inputs_flat, 

5901 attrs=_attrs, ctx=ctx, name=name) 

5902 if _execute.must_record_gradient(): 

5903 _execute.record_gradient( 

5904 "ParallelMapDatasetV2", _inputs_flat, _attrs, _result) 

5905 _result, = _result 

5906 return _result 

5907 

5908 

5909def prefetch_dataset(input_dataset, buffer_size, output_types, output_shapes, slack_period=0, legacy_autotune=True, buffer_size_min=0, metadata="", name=None): 

5910 r"""Creates a dataset that asynchronously prefetches elements from `input_dataset`. 

5911 

5912 Args: 

5913 input_dataset: A `Tensor` of type `variant`. 

5914 buffer_size: A `Tensor` of type `int64`. 

5915 The maximum number of elements to buffer in an iterator over 

5916 this dataset. 

5917 output_types: A list of `tf.DTypes` that has length `>= 1`. 

5918 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

5919 slack_period: An optional `int`. Defaults to `0`. 

5920 legacy_autotune: An optional `bool`. Defaults to `True`. 

5921 buffer_size_min: An optional `int`. Defaults to `0`. 

5922 metadata: An optional `string`. Defaults to `""`. 

5923 name: A name for the operation (optional). 

5924 

5925 Returns: 

5926 A `Tensor` of type `variant`. 

5927 """ 

5928 _ctx = _context._context or _context.context() 

5929 tld = _ctx._thread_local_data 

5930 if tld.is_eager: 

5931 try: 

5932 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

5933 _ctx, "PrefetchDataset", name, input_dataset, buffer_size, 

5934 "output_types", output_types, "output_shapes", output_shapes, 

5935 "slack_period", slack_period, "legacy_autotune", legacy_autotune, 

5936 "buffer_size_min", buffer_size_min, "metadata", metadata) 

5937 return _result 

5938 except _core._NotOkStatusException as e: 

5939 _ops.raise_from_not_ok_status(e, name) 

5940 except _core._FallbackException: 

5941 pass 

5942 try: 

5943 return prefetch_dataset_eager_fallback( 

5944 input_dataset, buffer_size, output_types=output_types, 

5945 output_shapes=output_shapes, slack_period=slack_period, 

5946 legacy_autotune=legacy_autotune, buffer_size_min=buffer_size_min, 

5947 metadata=metadata, name=name, ctx=_ctx) 

5948 except _core._SymbolicException: 

5949 pass # Add nodes to the TensorFlow graph. 

5950 # Add nodes to the TensorFlow graph. 

5951 if not isinstance(output_types, (list, tuple)): 

5952 raise TypeError( 

5953 "Expected list for 'output_types' argument to " 

5954 "'prefetch_dataset' Op, not %r." % output_types) 

5955 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

5956 if not isinstance(output_shapes, (list, tuple)): 

5957 raise TypeError( 

5958 "Expected list for 'output_shapes' argument to " 

5959 "'prefetch_dataset' Op, not %r." % output_shapes) 

5960 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

5961 if slack_period is None: 

5962 slack_period = 0 

5963 slack_period = _execute.make_int(slack_period, "slack_period") 

5964 if legacy_autotune is None: 

5965 legacy_autotune = True 

5966 legacy_autotune = _execute.make_bool(legacy_autotune, "legacy_autotune") 

5967 if buffer_size_min is None: 

5968 buffer_size_min = 0 

5969 buffer_size_min = _execute.make_int(buffer_size_min, "buffer_size_min") 

5970 if metadata is None: 

5971 metadata = "" 

5972 metadata = _execute.make_str(metadata, "metadata") 

5973 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

5974 "PrefetchDataset", input_dataset=input_dataset, 

5975 buffer_size=buffer_size, output_types=output_types, 

5976 output_shapes=output_shapes, 

5977 slack_period=slack_period, 

5978 legacy_autotune=legacy_autotune, 

5979 buffer_size_min=buffer_size_min, metadata=metadata, 

5980 name=name) 

5981 _result = _outputs[:] 

5982 if _execute.must_record_gradient(): 

5983 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

5984 _op.get_attr("output_shapes"), "slack_period", 

5985 _op._get_attr_int("slack_period"), "legacy_autotune", 

5986 _op._get_attr_bool("legacy_autotune"), "buffer_size_min", 

5987 _op._get_attr_int("buffer_size_min"), "metadata", 

5988 _op.get_attr("metadata")) 

5989 _inputs_flat = _op.inputs 

5990 _execute.record_gradient( 

5991 "PrefetchDataset", _inputs_flat, _attrs, _result) 

5992 _result, = _result 

5993 return _result 

5994 

5995PrefetchDataset = tf_export("raw_ops.PrefetchDataset")(_ops.to_raw_op(prefetch_dataset)) 

5996 

5997 

5998def prefetch_dataset_eager_fallback(input_dataset, buffer_size, output_types, output_shapes, slack_period, legacy_autotune, buffer_size_min, metadata, name, ctx): 

5999 if not isinstance(output_types, (list, tuple)): 

6000 raise TypeError( 

6001 "Expected list for 'output_types' argument to " 

6002 "'prefetch_dataset' Op, not %r." % output_types) 

6003 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6004 if not isinstance(output_shapes, (list, tuple)): 

6005 raise TypeError( 

6006 "Expected list for 'output_shapes' argument to " 

6007 "'prefetch_dataset' Op, not %r." % output_shapes) 

6008 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6009 if slack_period is None: 

6010 slack_period = 0 

6011 slack_period = _execute.make_int(slack_period, "slack_period") 

6012 if legacy_autotune is None: 

6013 legacy_autotune = True 

6014 legacy_autotune = _execute.make_bool(legacy_autotune, "legacy_autotune") 

6015 if buffer_size_min is None: 

6016 buffer_size_min = 0 

6017 buffer_size_min = _execute.make_int(buffer_size_min, "buffer_size_min") 

6018 if metadata is None: 

6019 metadata = "" 

6020 metadata = _execute.make_str(metadata, "metadata") 

6021 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6022 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

6023 _inputs_flat = [input_dataset, buffer_size] 

6024 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

6025 "slack_period", slack_period, "legacy_autotune", legacy_autotune, 

6026 "buffer_size_min", buffer_size_min, "metadata", metadata) 

6027 _result = _execute.execute(b"PrefetchDataset", 1, inputs=_inputs_flat, 

6028 attrs=_attrs, ctx=ctx, name=name) 

6029 if _execute.must_record_gradient(): 

6030 _execute.record_gradient( 

6031 "PrefetchDataset", _inputs_flat, _attrs, _result) 

6032 _result, = _result 

6033 return _result 

6034 

6035 

6036def range_dataset(start, stop, step, output_types, output_shapes, metadata="", replicate_on_split=False, name=None): 

6037 r"""Creates a dataset with a range of values. Corresponds to python's xrange. 

6038 

6039 Args: 

6040 start: A `Tensor` of type `int64`. 

6041 corresponds to start in python's xrange(). 

6042 stop: A `Tensor` of type `int64`. 

6043 corresponds to stop in python's xrange(). 

6044 step: A `Tensor` of type `int64`. 

6045 corresponds to step in python's xrange(). 

6046 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6047 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6048 metadata: An optional `string`. Defaults to `""`. 

6049 replicate_on_split: An optional `bool`. Defaults to `False`. 

6050 name: A name for the operation (optional). 

6051 

6052 Returns: 

6053 A `Tensor` of type `variant`. 

6054 """ 

6055 _ctx = _context._context or _context.context() 

6056 tld = _ctx._thread_local_data 

6057 if tld.is_eager: 

6058 try: 

6059 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6060 _ctx, "RangeDataset", name, start, stop, step, "output_types", 

6061 output_types, "output_shapes", output_shapes, "metadata", metadata, 

6062 "replicate_on_split", replicate_on_split) 

6063 return _result 

6064 except _core._NotOkStatusException as e: 

6065 _ops.raise_from_not_ok_status(e, name) 

6066 except _core._FallbackException: 

6067 pass 

6068 try: 

6069 return range_dataset_eager_fallback( 

6070 start, stop, step, output_types=output_types, 

6071 output_shapes=output_shapes, metadata=metadata, 

6072 replicate_on_split=replicate_on_split, name=name, ctx=_ctx) 

6073 except _core._SymbolicException: 

6074 pass # Add nodes to the TensorFlow graph. 

6075 # Add nodes to the TensorFlow graph. 

6076 if not isinstance(output_types, (list, tuple)): 

6077 raise TypeError( 

6078 "Expected list for 'output_types' argument to " 

6079 "'range_dataset' Op, not %r." % output_types) 

6080 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6081 if not isinstance(output_shapes, (list, tuple)): 

6082 raise TypeError( 

6083 "Expected list for 'output_shapes' argument to " 

6084 "'range_dataset' Op, not %r." % output_shapes) 

6085 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6086 if metadata is None: 

6087 metadata = "" 

6088 metadata = _execute.make_str(metadata, "metadata") 

6089 if replicate_on_split is None: 

6090 replicate_on_split = False 

6091 replicate_on_split = _execute.make_bool(replicate_on_split, "replicate_on_split") 

6092 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6093 "RangeDataset", start=start, stop=stop, step=step, 

6094 output_types=output_types, 

6095 output_shapes=output_shapes, metadata=metadata, 

6096 replicate_on_split=replicate_on_split, name=name) 

6097 _result = _outputs[:] 

6098 if _execute.must_record_gradient(): 

6099 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

6100 _op.get_attr("output_shapes"), "metadata", 

6101 _op.get_attr("metadata"), "replicate_on_split", 

6102 _op._get_attr_bool("replicate_on_split")) 

6103 _inputs_flat = _op.inputs 

6104 _execute.record_gradient( 

6105 "RangeDataset", _inputs_flat, _attrs, _result) 

6106 _result, = _result 

6107 return _result 

6108 

6109RangeDataset = tf_export("raw_ops.RangeDataset")(_ops.to_raw_op(range_dataset)) 

6110 

6111 

6112def range_dataset_eager_fallback(start, stop, step, output_types, output_shapes, metadata, replicate_on_split, name, ctx): 

6113 if not isinstance(output_types, (list, tuple)): 

6114 raise TypeError( 

6115 "Expected list for 'output_types' argument to " 

6116 "'range_dataset' Op, not %r." % output_types) 

6117 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6118 if not isinstance(output_shapes, (list, tuple)): 

6119 raise TypeError( 

6120 "Expected list for 'output_shapes' argument to " 

6121 "'range_dataset' Op, not %r." % output_shapes) 

6122 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6123 if metadata is None: 

6124 metadata = "" 

6125 metadata = _execute.make_str(metadata, "metadata") 

6126 if replicate_on_split is None: 

6127 replicate_on_split = False 

6128 replicate_on_split = _execute.make_bool(replicate_on_split, "replicate_on_split") 

6129 start = _ops.convert_to_tensor(start, _dtypes.int64) 

6130 stop = _ops.convert_to_tensor(stop, _dtypes.int64) 

6131 step = _ops.convert_to_tensor(step, _dtypes.int64) 

6132 _inputs_flat = [start, stop, step] 

6133 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

6134 "metadata", metadata, "replicate_on_split", replicate_on_split) 

6135 _result = _execute.execute(b"RangeDataset", 1, inputs=_inputs_flat, 

6136 attrs=_attrs, ctx=ctx, name=name) 

6137 if _execute.must_record_gradient(): 

6138 _execute.record_gradient( 

6139 "RangeDataset", _inputs_flat, _attrs, _result) 

6140 _result, = _result 

6141 return _result 

6142 

6143 

6144def reduce_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, metadata="", name=None): 

6145 r"""Reduces the input dataset to a singleton using a reduce function. 

6146 

6147 Args: 

6148 input_dataset: A `Tensor` of type `variant`. 

6149 A variant tensor representing the input dataset. 

6150 initial_state: A list of `Tensor` objects. 

6151 A nested structure of tensors, representing the initial state of the 

6152 transformation. 

6153 other_arguments: A list of `Tensor` objects. 

6154 f: A function decorated with @Defun. 

6155 A function that maps `(old_state, input_element)` to `new_state`. It must take 

6156 two arguments and return a nested structures of tensors. The structure of 

6157 `new_state` must match the structure of `initial_state`. 

6158 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6159 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6160 use_inter_op_parallelism: An optional `bool`. Defaults to `True`. 

6161 metadata: An optional `string`. Defaults to `""`. 

6162 name: A name for the operation (optional). 

6163 

6164 Returns: 

6165 A list of `Tensor` objects of type `output_types`. 

6166 """ 

6167 _ctx = _context._context or _context.context() 

6168 tld = _ctx._thread_local_data 

6169 if tld.is_eager: 

6170 try: 

6171 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6172 _ctx, "ReduceDataset", name, input_dataset, initial_state, 

6173 other_arguments, "f", f, "output_types", output_types, 

6174 "output_shapes", output_shapes, "use_inter_op_parallelism", 

6175 use_inter_op_parallelism, "metadata", metadata) 

6176 return _result 

6177 except _core._NotOkStatusException as e: 

6178 _ops.raise_from_not_ok_status(e, name) 

6179 except _core._FallbackException: 

6180 pass 

6181 try: 

6182 return reduce_dataset_eager_fallback( 

6183 input_dataset, initial_state, other_arguments, f=f, 

6184 output_types=output_types, output_shapes=output_shapes, 

6185 use_inter_op_parallelism=use_inter_op_parallelism, 

6186 metadata=metadata, name=name, ctx=_ctx) 

6187 except _core._SymbolicException: 

6188 pass # Add nodes to the TensorFlow graph. 

6189 # Add nodes to the TensorFlow graph. 

6190 if not isinstance(output_types, (list, tuple)): 

6191 raise TypeError( 

6192 "Expected list for 'output_types' argument to " 

6193 "'reduce_dataset' Op, not %r." % output_types) 

6194 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6195 if not isinstance(output_shapes, (list, tuple)): 

6196 raise TypeError( 

6197 "Expected list for 'output_shapes' argument to " 

6198 "'reduce_dataset' Op, not %r." % output_shapes) 

6199 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6200 if use_inter_op_parallelism is None: 

6201 use_inter_op_parallelism = True 

6202 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

6203 if metadata is None: 

6204 metadata = "" 

6205 metadata = _execute.make_str(metadata, "metadata") 

6206 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6207 "ReduceDataset", input_dataset=input_dataset, 

6208 initial_state=initial_state, 

6209 other_arguments=other_arguments, f=f, 

6210 output_types=output_types, 

6211 output_shapes=output_shapes, 

6212 use_inter_op_parallelism=use_inter_op_parallelism, 

6213 metadata=metadata, name=name) 

6214 _result = _outputs[:] 

6215 if not _result: 

6216 return _op 

6217 if _execute.must_record_gradient(): 

6218 _attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"), 

6219 "Targuments", _op.get_attr("Targuments"), "output_types", 

6220 _op.get_attr("output_types"), "output_shapes", 

6221 _op.get_attr("output_shapes"), "use_inter_op_parallelism", 

6222 _op._get_attr_bool("use_inter_op_parallelism"), "metadata", 

6223 _op.get_attr("metadata")) 

6224 _inputs_flat = _op.inputs 

6225 _execute.record_gradient( 

6226 "ReduceDataset", _inputs_flat, _attrs, _result) 

6227 return _result 

6228 

6229ReduceDataset = tf_export("raw_ops.ReduceDataset")(_ops.to_raw_op(reduce_dataset)) 

6230 

6231 

6232def reduce_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism, metadata, name, ctx): 

6233 if not isinstance(output_types, (list, tuple)): 

6234 raise TypeError( 

6235 "Expected list for 'output_types' argument to " 

6236 "'reduce_dataset' Op, not %r." % output_types) 

6237 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6238 if not isinstance(output_shapes, (list, tuple)): 

6239 raise TypeError( 

6240 "Expected list for 'output_shapes' argument to " 

6241 "'reduce_dataset' Op, not %r." % output_shapes) 

6242 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6243 if use_inter_op_parallelism is None: 

6244 use_inter_op_parallelism = True 

6245 use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism") 

6246 if metadata is None: 

6247 metadata = "" 

6248 metadata = _execute.make_str(metadata, "metadata") 

6249 _attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, ctx) 

6250 _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, ctx) 

6251 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6252 _inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments) 

6253 _attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments, 

6254 "output_types", output_types, "output_shapes", output_shapes, 

6255 "use_inter_op_parallelism", use_inter_op_parallelism, "metadata", metadata) 

6256 _result = _execute.execute(b"ReduceDataset", len(output_types), 

6257 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

6258 name=name) 

6259 if _execute.must_record_gradient(): 

6260 _execute.record_gradient( 

6261 "ReduceDataset", _inputs_flat, _attrs, _result) 

6262 return _result 

6263 

6264 

6265def repeat_dataset(input_dataset, count, output_types, output_shapes, metadata="", name=None): 

6266 r"""Creates a dataset that emits the outputs of `input_dataset` `count` times. 

6267 

6268 Args: 

6269 input_dataset: A `Tensor` of type `variant`. 

6270 count: A `Tensor` of type `int64`. 

6271 A scalar representing the number of times that `input_dataset` should 

6272 be repeated. A value of `-1` indicates that it should be repeated infinitely. 

6273 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6274 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6275 metadata: An optional `string`. Defaults to `""`. 

6276 name: A name for the operation (optional). 

6277 

6278 Returns: 

6279 A `Tensor` of type `variant`. 

6280 """ 

6281 _ctx = _context._context or _context.context() 

6282 tld = _ctx._thread_local_data 

6283 if tld.is_eager: 

6284 try: 

6285 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6286 _ctx, "RepeatDataset", name, input_dataset, count, "output_types", 

6287 output_types, "output_shapes", output_shapes, "metadata", metadata) 

6288 return _result 

6289 except _core._NotOkStatusException as e: 

6290 _ops.raise_from_not_ok_status(e, name) 

6291 except _core._FallbackException: 

6292 pass 

6293 try: 

6294 return repeat_dataset_eager_fallback( 

6295 input_dataset, count, output_types=output_types, 

6296 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

6297 except _core._SymbolicException: 

6298 pass # Add nodes to the TensorFlow graph. 

6299 # Add nodes to the TensorFlow graph. 

6300 if not isinstance(output_types, (list, tuple)): 

6301 raise TypeError( 

6302 "Expected list for 'output_types' argument to " 

6303 "'repeat_dataset' Op, not %r." % output_types) 

6304 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6305 if not isinstance(output_shapes, (list, tuple)): 

6306 raise TypeError( 

6307 "Expected list for 'output_shapes' argument to " 

6308 "'repeat_dataset' Op, not %r." % output_shapes) 

6309 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6310 if metadata is None: 

6311 metadata = "" 

6312 metadata = _execute.make_str(metadata, "metadata") 

6313 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6314 "RepeatDataset", input_dataset=input_dataset, count=count, 

6315 output_types=output_types, 

6316 output_shapes=output_shapes, metadata=metadata, 

6317 name=name) 

6318 _result = _outputs[:] 

6319 if _execute.must_record_gradient(): 

6320 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

6321 _op.get_attr("output_shapes"), "metadata", 

6322 _op.get_attr("metadata")) 

6323 _inputs_flat = _op.inputs 

6324 _execute.record_gradient( 

6325 "RepeatDataset", _inputs_flat, _attrs, _result) 

6326 _result, = _result 

6327 return _result 

6328 

6329RepeatDataset = tf_export("raw_ops.RepeatDataset")(_ops.to_raw_op(repeat_dataset)) 

6330 

6331 

6332def repeat_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, metadata, name, ctx): 

6333 if not isinstance(output_types, (list, tuple)): 

6334 raise TypeError( 

6335 "Expected list for 'output_types' argument to " 

6336 "'repeat_dataset' Op, not %r." % output_types) 

6337 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6338 if not isinstance(output_shapes, (list, tuple)): 

6339 raise TypeError( 

6340 "Expected list for 'output_shapes' argument to " 

6341 "'repeat_dataset' Op, not %r." % output_shapes) 

6342 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6343 if metadata is None: 

6344 metadata = "" 

6345 metadata = _execute.make_str(metadata, "metadata") 

6346 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6347 count = _ops.convert_to_tensor(count, _dtypes.int64) 

6348 _inputs_flat = [input_dataset, count] 

6349 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

6350 "metadata", metadata) 

6351 _result = _execute.execute(b"RepeatDataset", 1, inputs=_inputs_flat, 

6352 attrs=_attrs, ctx=ctx, name=name) 

6353 if _execute.must_record_gradient(): 

6354 _execute.record_gradient( 

6355 "RepeatDataset", _inputs_flat, _attrs, _result) 

6356 _result, = _result 

6357 return _result 

6358 

6359 

6360def rewrite_dataset(input_dataset, rewrite_name, output_types, output_shapes, name=None): 

6361 r"""TODO: add doc. 

6362 

6363 Args: 

6364 input_dataset: A `Tensor` of type `variant`. 

6365 rewrite_name: A `Tensor` of type `string`. 

6366 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6367 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6368 name: A name for the operation (optional). 

6369 

6370 Returns: 

6371 A `Tensor` of type `variant`. 

6372 """ 

6373 _ctx = _context._context or _context.context() 

6374 tld = _ctx._thread_local_data 

6375 if tld.is_eager: 

6376 try: 

6377 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6378 _ctx, "RewriteDataset", name, input_dataset, rewrite_name, 

6379 "output_types", output_types, "output_shapes", output_shapes) 

6380 return _result 

6381 except _core._NotOkStatusException as e: 

6382 _ops.raise_from_not_ok_status(e, name) 

6383 except _core._FallbackException: 

6384 pass 

6385 try: 

6386 return rewrite_dataset_eager_fallback( 

6387 input_dataset, rewrite_name, output_types=output_types, 

6388 output_shapes=output_shapes, name=name, ctx=_ctx) 

6389 except _core._SymbolicException: 

6390 pass # Add nodes to the TensorFlow graph. 

6391 # Add nodes to the TensorFlow graph. 

6392 if not isinstance(output_types, (list, tuple)): 

6393 raise TypeError( 

6394 "Expected list for 'output_types' argument to " 

6395 "'rewrite_dataset' Op, not %r." % output_types) 

6396 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6397 if not isinstance(output_shapes, (list, tuple)): 

6398 raise TypeError( 

6399 "Expected list for 'output_shapes' argument to " 

6400 "'rewrite_dataset' Op, not %r." % output_shapes) 

6401 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6402 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6403 "RewriteDataset", input_dataset=input_dataset, 

6404 rewrite_name=rewrite_name, 

6405 output_types=output_types, 

6406 output_shapes=output_shapes, name=name) 

6407 _result = _outputs[:] 

6408 if _execute.must_record_gradient(): 

6409 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

6410 _op.get_attr("output_shapes")) 

6411 _inputs_flat = _op.inputs 

6412 _execute.record_gradient( 

6413 "RewriteDataset", _inputs_flat, _attrs, _result) 

6414 _result, = _result 

6415 return _result 

6416 

6417RewriteDataset = tf_export("raw_ops.RewriteDataset")(_ops.to_raw_op(rewrite_dataset)) 

6418 

6419 

6420def rewrite_dataset_eager_fallback(input_dataset, rewrite_name, output_types, output_shapes, name, ctx): 

6421 if not isinstance(output_types, (list, tuple)): 

6422 raise TypeError( 

6423 "Expected list for 'output_types' argument to " 

6424 "'rewrite_dataset' Op, not %r." % output_types) 

6425 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6426 if not isinstance(output_shapes, (list, tuple)): 

6427 raise TypeError( 

6428 "Expected list for 'output_shapes' argument to " 

6429 "'rewrite_dataset' Op, not %r." % output_shapes) 

6430 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6431 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6432 rewrite_name = _ops.convert_to_tensor(rewrite_name, _dtypes.string) 

6433 _inputs_flat = [input_dataset, rewrite_name] 

6434 _attrs = ("output_types", output_types, "output_shapes", output_shapes) 

6435 _result = _execute.execute(b"RewriteDataset", 1, inputs=_inputs_flat, 

6436 attrs=_attrs, ctx=ctx, name=name) 

6437 if _execute.must_record_gradient(): 

6438 _execute.record_gradient( 

6439 "RewriteDataset", _inputs_flat, _attrs, _result) 

6440 _result, = _result 

6441 return _result 

6442 

6443 

6444def serialize_iterator(resource_handle, external_state_policy=0, name=None): 

6445 r"""Converts the given `resource_handle` representing an iterator to a variant tensor. 

6446 

6447 Args: 

6448 resource_handle: A `Tensor` of type `resource`. 

6449 A handle to an iterator resource. 

6450 external_state_policy: An optional `int`. Defaults to `0`. 

6451 name: A name for the operation (optional). 

6452 

6453 Returns: 

6454 A `Tensor` of type `variant`. 

6455 """ 

6456 _ctx = _context._context or _context.context() 

6457 tld = _ctx._thread_local_data 

6458 if tld.is_eager: 

6459 try: 

6460 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6461 _ctx, "SerializeIterator", name, resource_handle, 

6462 "external_state_policy", external_state_policy) 

6463 return _result 

6464 except _core._NotOkStatusException as e: 

6465 _ops.raise_from_not_ok_status(e, name) 

6466 except _core._FallbackException: 

6467 pass 

6468 try: 

6469 return serialize_iterator_eager_fallback( 

6470 resource_handle, external_state_policy=external_state_policy, 

6471 name=name, ctx=_ctx) 

6472 except _core._SymbolicException: 

6473 pass # Add nodes to the TensorFlow graph. 

6474 # Add nodes to the TensorFlow graph. 

6475 if external_state_policy is None: 

6476 external_state_policy = 0 

6477 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy") 

6478 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6479 "SerializeIterator", resource_handle=resource_handle, 

6480 external_state_policy=external_state_policy, 

6481 name=name) 

6482 _result = _outputs[:] 

6483 if _execute.must_record_gradient(): 

6484 _attrs = ("external_state_policy", 

6485 _op._get_attr_int("external_state_policy")) 

6486 _inputs_flat = _op.inputs 

6487 _execute.record_gradient( 

6488 "SerializeIterator", _inputs_flat, _attrs, _result) 

6489 _result, = _result 

6490 return _result 

6491 

6492SerializeIterator = tf_export("raw_ops.SerializeIterator")(_ops.to_raw_op(serialize_iterator)) 

6493 

6494 

6495def serialize_iterator_eager_fallback(resource_handle, external_state_policy, name, ctx): 

6496 if external_state_policy is None: 

6497 external_state_policy = 0 

6498 external_state_policy = _execute.make_int(external_state_policy, "external_state_policy") 

6499 resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource) 

6500 _inputs_flat = [resource_handle] 

6501 _attrs = ("external_state_policy", external_state_policy) 

6502 _result = _execute.execute(b"SerializeIterator", 1, inputs=_inputs_flat, 

6503 attrs=_attrs, ctx=ctx, name=name) 

6504 if _execute.must_record_gradient(): 

6505 _execute.record_gradient( 

6506 "SerializeIterator", _inputs_flat, _attrs, _result) 

6507 _result, = _result 

6508 return _result 

6509 

6510 

6511def shard_dataset(input_dataset, num_shards, index, output_types, output_shapes, require_non_empty=False, metadata="", name=None): 

6512 r"""Creates a `Dataset` that includes only 1/`num_shards` of this dataset. 

6513 

6514 Args: 

6515 input_dataset: A `Tensor` of type `variant`. 

6516 num_shards: A `Tensor` of type `int64`. 

6517 An integer representing the number of shards operating in parallel. 

6518 index: A `Tensor` of type `int64`. 

6519 An integer representing the current worker index. 

6520 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6521 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6522 require_non_empty: An optional `bool`. Defaults to `False`. 

6523 metadata: An optional `string`. Defaults to `""`. 

6524 name: A name for the operation (optional). 

6525 

6526 Returns: 

6527 A `Tensor` of type `variant`. 

6528 """ 

6529 _ctx = _context._context or _context.context() 

6530 tld = _ctx._thread_local_data 

6531 if tld.is_eager: 

6532 try: 

6533 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6534 _ctx, "ShardDataset", name, input_dataset, num_shards, index, 

6535 "require_non_empty", require_non_empty, "output_types", output_types, 

6536 "output_shapes", output_shapes, "metadata", metadata) 

6537 return _result 

6538 except _core._NotOkStatusException as e: 

6539 _ops.raise_from_not_ok_status(e, name) 

6540 except _core._FallbackException: 

6541 pass 

6542 try: 

6543 return shard_dataset_eager_fallback( 

6544 input_dataset, num_shards, index, 

6545 require_non_empty=require_non_empty, output_types=output_types, 

6546 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

6547 except _core._SymbolicException: 

6548 pass # Add nodes to the TensorFlow graph. 

6549 # Add nodes to the TensorFlow graph. 

6550 if not isinstance(output_types, (list, tuple)): 

6551 raise TypeError( 

6552 "Expected list for 'output_types' argument to " 

6553 "'shard_dataset' Op, not %r." % output_types) 

6554 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6555 if not isinstance(output_shapes, (list, tuple)): 

6556 raise TypeError( 

6557 "Expected list for 'output_shapes' argument to " 

6558 "'shard_dataset' Op, not %r." % output_shapes) 

6559 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6560 if require_non_empty is None: 

6561 require_non_empty = False 

6562 require_non_empty = _execute.make_bool(require_non_empty, "require_non_empty") 

6563 if metadata is None: 

6564 metadata = "" 

6565 metadata = _execute.make_str(metadata, "metadata") 

6566 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6567 "ShardDataset", input_dataset=input_dataset, num_shards=num_shards, 

6568 index=index, output_types=output_types, 

6569 output_shapes=output_shapes, 

6570 require_non_empty=require_non_empty, 

6571 metadata=metadata, name=name) 

6572 _result = _outputs[:] 

6573 if _execute.must_record_gradient(): 

6574 _attrs = ("require_non_empty", _op._get_attr_bool("require_non_empty"), 

6575 "output_types", _op.get_attr("output_types"), "output_shapes", 

6576 _op.get_attr("output_shapes"), "metadata", 

6577 _op.get_attr("metadata")) 

6578 _inputs_flat = _op.inputs 

6579 _execute.record_gradient( 

6580 "ShardDataset", _inputs_flat, _attrs, _result) 

6581 _result, = _result 

6582 return _result 

6583 

6584ShardDataset = tf_export("raw_ops.ShardDataset")(_ops.to_raw_op(shard_dataset)) 

6585 

6586 

6587def shard_dataset_eager_fallback(input_dataset, num_shards, index, output_types, output_shapes, require_non_empty, metadata, name, ctx): 

6588 if not isinstance(output_types, (list, tuple)): 

6589 raise TypeError( 

6590 "Expected list for 'output_types' argument to " 

6591 "'shard_dataset' Op, not %r." % output_types) 

6592 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6593 if not isinstance(output_shapes, (list, tuple)): 

6594 raise TypeError( 

6595 "Expected list for 'output_shapes' argument to " 

6596 "'shard_dataset' Op, not %r." % output_shapes) 

6597 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6598 if require_non_empty is None: 

6599 require_non_empty = False 

6600 require_non_empty = _execute.make_bool(require_non_empty, "require_non_empty") 

6601 if metadata is None: 

6602 metadata = "" 

6603 metadata = _execute.make_str(metadata, "metadata") 

6604 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6605 num_shards = _ops.convert_to_tensor(num_shards, _dtypes.int64) 

6606 index = _ops.convert_to_tensor(index, _dtypes.int64) 

6607 _inputs_flat = [input_dataset, num_shards, index] 

6608 _attrs = ("require_non_empty", require_non_empty, "output_types", 

6609 output_types, "output_shapes", output_shapes, "metadata", metadata) 

6610 _result = _execute.execute(b"ShardDataset", 1, inputs=_inputs_flat, 

6611 attrs=_attrs, ctx=ctx, name=name) 

6612 if _execute.must_record_gradient(): 

6613 _execute.record_gradient( 

6614 "ShardDataset", _inputs_flat, _attrs, _result) 

6615 _result, = _result 

6616 return _result 

6617 

6618 

6619def shuffle_and_repeat_dataset(input_dataset, buffer_size, seed, seed2, count, output_types, output_shapes, reshuffle_each_iteration=True, metadata="", name=None): 

6620 r"""Creates a dataset that shuffles and repeats elements from `input_dataset` 

6621 

6622 pseudorandomly. 

6623 

6624 Args: 

6625 input_dataset: A `Tensor` of type `variant`. 

6626 buffer_size: A `Tensor` of type `int64`. 

6627 The number of output elements to buffer in an iterator over 

6628 this dataset. Compare with the `min_after_dequeue` attr when creating a 

6629 `RandomShuffleQueue`. 

6630 seed: A `Tensor` of type `int64`. 

6631 A scalar seed for the random number generator. If either `seed` or 

6632 `seed2` is set to be non-zero, the random number generator is seeded 

6633 by the given seed. Otherwise, a random seed is used. 

6634 seed2: A `Tensor` of type `int64`. 

6635 A second scalar seed to avoid seed collision. 

6636 count: A `Tensor` of type `int64`. 

6637 A scalar representing the number of times the underlying dataset 

6638 should be repeated. The default is `-1`, which results in infinite repetition. 

6639 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6640 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6641 reshuffle_each_iteration: An optional `bool`. Defaults to `True`. 

6642 metadata: An optional `string`. Defaults to `""`. 

6643 name: A name for the operation (optional). 

6644 

6645 Returns: 

6646 A `Tensor` of type `variant`. 

6647 """ 

6648 _ctx = _context._context or _context.context() 

6649 tld = _ctx._thread_local_data 

6650 if tld.is_eager: 

6651 try: 

6652 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6653 _ctx, "ShuffleAndRepeatDataset", name, input_dataset, buffer_size, 

6654 seed, seed2, count, "output_types", output_types, "output_shapes", 

6655 output_shapes, "reshuffle_each_iteration", reshuffle_each_iteration, 

6656 "metadata", metadata) 

6657 return _result 

6658 except _core._NotOkStatusException as e: 

6659 _ops.raise_from_not_ok_status(e, name) 

6660 except _core._FallbackException: 

6661 pass 

6662 try: 

6663 return shuffle_and_repeat_dataset_eager_fallback( 

6664 input_dataset, buffer_size, seed, seed2, count, 

6665 output_types=output_types, output_shapes=output_shapes, 

6666 reshuffle_each_iteration=reshuffle_each_iteration, 

6667 metadata=metadata, name=name, ctx=_ctx) 

6668 except _core._SymbolicException: 

6669 pass # Add nodes to the TensorFlow graph. 

6670 # Add nodes to the TensorFlow graph. 

6671 if not isinstance(output_types, (list, tuple)): 

6672 raise TypeError( 

6673 "Expected list for 'output_types' argument to " 

6674 "'shuffle_and_repeat_dataset' Op, not %r." % output_types) 

6675 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6676 if not isinstance(output_shapes, (list, tuple)): 

6677 raise TypeError( 

6678 "Expected list for 'output_shapes' argument to " 

6679 "'shuffle_and_repeat_dataset' Op, not %r." % output_shapes) 

6680 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6681 if reshuffle_each_iteration is None: 

6682 reshuffle_each_iteration = True 

6683 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6684 if metadata is None: 

6685 metadata = "" 

6686 metadata = _execute.make_str(metadata, "metadata") 

6687 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6688 "ShuffleAndRepeatDataset", input_dataset=input_dataset, 

6689 buffer_size=buffer_size, seed=seed, 

6690 seed2=seed2, count=count, 

6691 output_types=output_types, 

6692 output_shapes=output_shapes, 

6693 reshuffle_each_iteration=reshuffle_each_iteration, 

6694 metadata=metadata, name=name) 

6695 _result = _outputs[:] 

6696 if _execute.must_record_gradient(): 

6697 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

6698 _op.get_attr("output_shapes"), "reshuffle_each_iteration", 

6699 _op._get_attr_bool("reshuffle_each_iteration"), "metadata", 

6700 _op.get_attr("metadata")) 

6701 _inputs_flat = _op.inputs 

6702 _execute.record_gradient( 

6703 "ShuffleAndRepeatDataset", _inputs_flat, _attrs, _result) 

6704 _result, = _result 

6705 return _result 

6706 

6707ShuffleAndRepeatDataset = tf_export("raw_ops.ShuffleAndRepeatDataset")(_ops.to_raw_op(shuffle_and_repeat_dataset)) 

6708 

6709 

6710def shuffle_and_repeat_dataset_eager_fallback(input_dataset, buffer_size, seed, seed2, count, output_types, output_shapes, reshuffle_each_iteration, metadata, name, ctx): 

6711 if not isinstance(output_types, (list, tuple)): 

6712 raise TypeError( 

6713 "Expected list for 'output_types' argument to " 

6714 "'shuffle_and_repeat_dataset' Op, not %r." % output_types) 

6715 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6716 if not isinstance(output_shapes, (list, tuple)): 

6717 raise TypeError( 

6718 "Expected list for 'output_shapes' argument to " 

6719 "'shuffle_and_repeat_dataset' Op, not %r." % output_shapes) 

6720 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6721 if reshuffle_each_iteration is None: 

6722 reshuffle_each_iteration = True 

6723 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6724 if metadata is None: 

6725 metadata = "" 

6726 metadata = _execute.make_str(metadata, "metadata") 

6727 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6728 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

6729 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

6730 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

6731 count = _ops.convert_to_tensor(count, _dtypes.int64) 

6732 _inputs_flat = [input_dataset, buffer_size, seed, seed2, count] 

6733 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

6734 "reshuffle_each_iteration", reshuffle_each_iteration, "metadata", metadata) 

6735 _result = _execute.execute(b"ShuffleAndRepeatDataset", 1, 

6736 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

6737 name=name) 

6738 if _execute.must_record_gradient(): 

6739 _execute.record_gradient( 

6740 "ShuffleAndRepeatDataset", _inputs_flat, _attrs, _result) 

6741 _result, = _result 

6742 return _result 

6743 

6744 

6745def shuffle_and_repeat_dataset_v2(input_dataset, buffer_size, seed, seed2, count, seed_generator, output_types, output_shapes, reshuffle_each_iteration=True, metadata="", name=None): 

6746 r"""TODO: add doc. 

6747 

6748 Args: 

6749 input_dataset: A `Tensor` of type `variant`. 

6750 buffer_size: A `Tensor` of type `int64`. 

6751 seed: A `Tensor` of type `int64`. 

6752 seed2: A `Tensor` of type `int64`. 

6753 count: A `Tensor` of type `int64`. 

6754 seed_generator: A `Tensor` of type `resource`. 

6755 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6756 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6757 reshuffle_each_iteration: An optional `bool`. Defaults to `True`. 

6758 metadata: An optional `string`. Defaults to `""`. 

6759 name: A name for the operation (optional). 

6760 

6761 Returns: 

6762 A `Tensor` of type `variant`. 

6763 """ 

6764 _ctx = _context._context or _context.context() 

6765 tld = _ctx._thread_local_data 

6766 if tld.is_eager: 

6767 try: 

6768 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6769 _ctx, "ShuffleAndRepeatDatasetV2", name, input_dataset, buffer_size, 

6770 seed, seed2, count, seed_generator, "reshuffle_each_iteration", 

6771 reshuffle_each_iteration, "output_types", output_types, 

6772 "output_shapes", output_shapes, "metadata", metadata) 

6773 return _result 

6774 except _core._NotOkStatusException as e: 

6775 _ops.raise_from_not_ok_status(e, name) 

6776 except _core._FallbackException: 

6777 pass 

6778 try: 

6779 return shuffle_and_repeat_dataset_v2_eager_fallback( 

6780 input_dataset, buffer_size, seed, seed2, count, seed_generator, 

6781 reshuffle_each_iteration=reshuffle_each_iteration, 

6782 output_types=output_types, output_shapes=output_shapes, 

6783 metadata=metadata, name=name, ctx=_ctx) 

6784 except _core._SymbolicException: 

6785 pass # Add nodes to the TensorFlow graph. 

6786 # Add nodes to the TensorFlow graph. 

6787 if not isinstance(output_types, (list, tuple)): 

6788 raise TypeError( 

6789 "Expected list for 'output_types' argument to " 

6790 "'shuffle_and_repeat_dataset_v2' Op, not %r." % output_types) 

6791 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6792 if not isinstance(output_shapes, (list, tuple)): 

6793 raise TypeError( 

6794 "Expected list for 'output_shapes' argument to " 

6795 "'shuffle_and_repeat_dataset_v2' Op, not %r." % output_shapes) 

6796 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6797 if reshuffle_each_iteration is None: 

6798 reshuffle_each_iteration = True 

6799 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6800 if metadata is None: 

6801 metadata = "" 

6802 metadata = _execute.make_str(metadata, "metadata") 

6803 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6804 "ShuffleAndRepeatDatasetV2", input_dataset=input_dataset, 

6805 buffer_size=buffer_size, seed=seed, 

6806 seed2=seed2, count=count, 

6807 seed_generator=seed_generator, 

6808 output_types=output_types, 

6809 output_shapes=output_shapes, 

6810 reshuffle_each_iteration=reshuffle_each_iteration, 

6811 metadata=metadata, name=name) 

6812 _result = _outputs[:] 

6813 if _execute.must_record_gradient(): 

6814 _attrs = ("reshuffle_each_iteration", 

6815 _op._get_attr_bool("reshuffle_each_iteration"), "output_types", 

6816 _op.get_attr("output_types"), "output_shapes", 

6817 _op.get_attr("output_shapes"), "metadata", 

6818 _op.get_attr("metadata")) 

6819 _inputs_flat = _op.inputs 

6820 _execute.record_gradient( 

6821 "ShuffleAndRepeatDatasetV2", _inputs_flat, _attrs, _result) 

6822 _result, = _result 

6823 return _result 

6824 

6825ShuffleAndRepeatDatasetV2 = tf_export("raw_ops.ShuffleAndRepeatDatasetV2")(_ops.to_raw_op(shuffle_and_repeat_dataset_v2)) 

6826 

6827 

6828def shuffle_and_repeat_dataset_v2_eager_fallback(input_dataset, buffer_size, seed, seed2, count, seed_generator, output_types, output_shapes, reshuffle_each_iteration, metadata, name, ctx): 

6829 if not isinstance(output_types, (list, tuple)): 

6830 raise TypeError( 

6831 "Expected list for 'output_types' argument to " 

6832 "'shuffle_and_repeat_dataset_v2' Op, not %r." % output_types) 

6833 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6834 if not isinstance(output_shapes, (list, tuple)): 

6835 raise TypeError( 

6836 "Expected list for 'output_shapes' argument to " 

6837 "'shuffle_and_repeat_dataset_v2' Op, not %r." % output_shapes) 

6838 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6839 if reshuffle_each_iteration is None: 

6840 reshuffle_each_iteration = True 

6841 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6842 if metadata is None: 

6843 metadata = "" 

6844 metadata = _execute.make_str(metadata, "metadata") 

6845 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6846 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

6847 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

6848 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

6849 count = _ops.convert_to_tensor(count, _dtypes.int64) 

6850 seed_generator = _ops.convert_to_tensor(seed_generator, _dtypes.resource) 

6851 _inputs_flat = [input_dataset, buffer_size, seed, seed2, count, seed_generator] 

6852 _attrs = ("reshuffle_each_iteration", reshuffle_each_iteration, 

6853 "output_types", output_types, "output_shapes", output_shapes, "metadata", 

6854 metadata) 

6855 _result = _execute.execute(b"ShuffleAndRepeatDatasetV2", 1, 

6856 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

6857 name=name) 

6858 if _execute.must_record_gradient(): 

6859 _execute.record_gradient( 

6860 "ShuffleAndRepeatDatasetV2", _inputs_flat, _attrs, _result) 

6861 _result, = _result 

6862 return _result 

6863 

6864 

6865def shuffle_dataset(input_dataset, buffer_size, seed, seed2, output_types, output_shapes, reshuffle_each_iteration=True, metadata="", name=None): 

6866 r"""Creates a dataset that shuffles elements from `input_dataset` pseudorandomly. 

6867 

6868 Args: 

6869 input_dataset: A `Tensor` of type `variant`. 

6870 buffer_size: A `Tensor` of type `int64`. 

6871 The number of output elements to buffer in an iterator over 

6872 this dataset. Compare with the `min_after_dequeue` attr when creating a 

6873 `RandomShuffleQueue`. 

6874 seed: A `Tensor` of type `int64`. 

6875 A scalar seed for the random number generator. If either `seed` or 

6876 `seed2` is set to be non-zero, the random number generator is seeded 

6877 by the given seed. Otherwise, a random seed is used. 

6878 seed2: A `Tensor` of type `int64`. 

6879 A second scalar seed to avoid seed collision. 

6880 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6881 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6882 reshuffle_each_iteration: An optional `bool`. Defaults to `True`. 

6883 If true, each iterator over this dataset will be given 

6884 a different pseudorandomly generated seed, based on a sequence seeded by the 

6885 `seed` and `seed2` inputs. If false, each iterator will be given the same 

6886 seed, and repeated iteration over this dataset will yield the exact same 

6887 sequence of results. 

6888 metadata: An optional `string`. Defaults to `""`. 

6889 name: A name for the operation (optional). 

6890 

6891 Returns: 

6892 A `Tensor` of type `variant`. 

6893 """ 

6894 _ctx = _context._context or _context.context() 

6895 tld = _ctx._thread_local_data 

6896 if tld.is_eager: 

6897 try: 

6898 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

6899 _ctx, "ShuffleDataset", name, input_dataset, buffer_size, seed, seed2, 

6900 "reshuffle_each_iteration", reshuffle_each_iteration, "output_types", 

6901 output_types, "output_shapes", output_shapes, "metadata", metadata) 

6902 return _result 

6903 except _core._NotOkStatusException as e: 

6904 _ops.raise_from_not_ok_status(e, name) 

6905 except _core._FallbackException: 

6906 pass 

6907 try: 

6908 return shuffle_dataset_eager_fallback( 

6909 input_dataset, buffer_size, seed, seed2, 

6910 reshuffle_each_iteration=reshuffle_each_iteration, 

6911 output_types=output_types, output_shapes=output_shapes, 

6912 metadata=metadata, name=name, ctx=_ctx) 

6913 except _core._SymbolicException: 

6914 pass # Add nodes to the TensorFlow graph. 

6915 # Add nodes to the TensorFlow graph. 

6916 if not isinstance(output_types, (list, tuple)): 

6917 raise TypeError( 

6918 "Expected list for 'output_types' argument to " 

6919 "'shuffle_dataset' Op, not %r." % output_types) 

6920 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6921 if not isinstance(output_shapes, (list, tuple)): 

6922 raise TypeError( 

6923 "Expected list for 'output_shapes' argument to " 

6924 "'shuffle_dataset' Op, not %r." % output_shapes) 

6925 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6926 if reshuffle_each_iteration is None: 

6927 reshuffle_each_iteration = True 

6928 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6929 if metadata is None: 

6930 metadata = "" 

6931 metadata = _execute.make_str(metadata, "metadata") 

6932 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

6933 "ShuffleDataset", input_dataset=input_dataset, 

6934 buffer_size=buffer_size, seed=seed, seed2=seed2, 

6935 output_types=output_types, 

6936 output_shapes=output_shapes, 

6937 reshuffle_each_iteration=reshuffle_each_iteration, 

6938 metadata=metadata, name=name) 

6939 _result = _outputs[:] 

6940 if _execute.must_record_gradient(): 

6941 _attrs = ("reshuffle_each_iteration", 

6942 _op._get_attr_bool("reshuffle_each_iteration"), "output_types", 

6943 _op.get_attr("output_types"), "output_shapes", 

6944 _op.get_attr("output_shapes"), "metadata", 

6945 _op.get_attr("metadata")) 

6946 _inputs_flat = _op.inputs 

6947 _execute.record_gradient( 

6948 "ShuffleDataset", _inputs_flat, _attrs, _result) 

6949 _result, = _result 

6950 return _result 

6951 

6952ShuffleDataset = tf_export("raw_ops.ShuffleDataset")(_ops.to_raw_op(shuffle_dataset)) 

6953 

6954 

6955def shuffle_dataset_eager_fallback(input_dataset, buffer_size, seed, seed2, output_types, output_shapes, reshuffle_each_iteration, metadata, name, ctx): 

6956 if not isinstance(output_types, (list, tuple)): 

6957 raise TypeError( 

6958 "Expected list for 'output_types' argument to " 

6959 "'shuffle_dataset' Op, not %r." % output_types) 

6960 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

6961 if not isinstance(output_shapes, (list, tuple)): 

6962 raise TypeError( 

6963 "Expected list for 'output_shapes' argument to " 

6964 "'shuffle_dataset' Op, not %r." % output_shapes) 

6965 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

6966 if reshuffle_each_iteration is None: 

6967 reshuffle_each_iteration = True 

6968 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

6969 if metadata is None: 

6970 metadata = "" 

6971 metadata = _execute.make_str(metadata, "metadata") 

6972 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

6973 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

6974 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

6975 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

6976 _inputs_flat = [input_dataset, buffer_size, seed, seed2] 

6977 _attrs = ("reshuffle_each_iteration", reshuffle_each_iteration, 

6978 "output_types", output_types, "output_shapes", output_shapes, "metadata", 

6979 metadata) 

6980 _result = _execute.execute(b"ShuffleDataset", 1, inputs=_inputs_flat, 

6981 attrs=_attrs, ctx=ctx, name=name) 

6982 if _execute.must_record_gradient(): 

6983 _execute.record_gradient( 

6984 "ShuffleDataset", _inputs_flat, _attrs, _result) 

6985 _result, = _result 

6986 return _result 

6987 

6988 

6989def shuffle_dataset_v2(input_dataset, buffer_size, seed_generator, output_types, output_shapes, metadata="", name=None): 

6990 r"""TODO: add doc. 

6991 

6992 Args: 

6993 input_dataset: A `Tensor` of type `variant`. 

6994 buffer_size: A `Tensor` of type `int64`. 

6995 seed_generator: A `Tensor` of type `resource`. 

6996 output_types: A list of `tf.DTypes` that has length `>= 1`. 

6997 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

6998 metadata: An optional `string`. Defaults to `""`. 

6999 name: A name for the operation (optional). 

7000 

7001 Returns: 

7002 A `Tensor` of type `variant`. 

7003 """ 

7004 _ctx = _context._context or _context.context() 

7005 tld = _ctx._thread_local_data 

7006 if tld.is_eager: 

7007 try: 

7008 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7009 _ctx, "ShuffleDatasetV2", name, input_dataset, buffer_size, 

7010 seed_generator, "output_types", output_types, "output_shapes", 

7011 output_shapes, "metadata", metadata) 

7012 return _result 

7013 except _core._NotOkStatusException as e: 

7014 _ops.raise_from_not_ok_status(e, name) 

7015 except _core._FallbackException: 

7016 pass 

7017 try: 

7018 return shuffle_dataset_v2_eager_fallback( 

7019 input_dataset, buffer_size, seed_generator, 

7020 output_types=output_types, output_shapes=output_shapes, 

7021 metadata=metadata, name=name, ctx=_ctx) 

7022 except _core._SymbolicException: 

7023 pass # Add nodes to the TensorFlow graph. 

7024 # Add nodes to the TensorFlow graph. 

7025 if not isinstance(output_types, (list, tuple)): 

7026 raise TypeError( 

7027 "Expected list for 'output_types' argument to " 

7028 "'shuffle_dataset_v2' Op, not %r." % output_types) 

7029 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7030 if not isinstance(output_shapes, (list, tuple)): 

7031 raise TypeError( 

7032 "Expected list for 'output_shapes' argument to " 

7033 "'shuffle_dataset_v2' Op, not %r." % output_shapes) 

7034 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7035 if metadata is None: 

7036 metadata = "" 

7037 metadata = _execute.make_str(metadata, "metadata") 

7038 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7039 "ShuffleDatasetV2", input_dataset=input_dataset, 

7040 buffer_size=buffer_size, 

7041 seed_generator=seed_generator, 

7042 output_types=output_types, 

7043 output_shapes=output_shapes, metadata=metadata, 

7044 name=name) 

7045 _result = _outputs[:] 

7046 if _execute.must_record_gradient(): 

7047 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

7048 _op.get_attr("output_shapes"), "metadata", 

7049 _op.get_attr("metadata")) 

7050 _inputs_flat = _op.inputs 

7051 _execute.record_gradient( 

7052 "ShuffleDatasetV2", _inputs_flat, _attrs, _result) 

7053 _result, = _result 

7054 return _result 

7055 

7056ShuffleDatasetV2 = tf_export("raw_ops.ShuffleDatasetV2")(_ops.to_raw_op(shuffle_dataset_v2)) 

7057 

7058 

7059def shuffle_dataset_v2_eager_fallback(input_dataset, buffer_size, seed_generator, output_types, output_shapes, metadata, name, ctx): 

7060 if not isinstance(output_types, (list, tuple)): 

7061 raise TypeError( 

7062 "Expected list for 'output_types' argument to " 

7063 "'shuffle_dataset_v2' Op, not %r." % output_types) 

7064 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7065 if not isinstance(output_shapes, (list, tuple)): 

7066 raise TypeError( 

7067 "Expected list for 'output_shapes' argument to " 

7068 "'shuffle_dataset_v2' Op, not %r." % output_shapes) 

7069 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7070 if metadata is None: 

7071 metadata = "" 

7072 metadata = _execute.make_str(metadata, "metadata") 

7073 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

7074 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

7075 seed_generator = _ops.convert_to_tensor(seed_generator, _dtypes.resource) 

7076 _inputs_flat = [input_dataset, buffer_size, seed_generator] 

7077 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

7078 "metadata", metadata) 

7079 _result = _execute.execute(b"ShuffleDatasetV2", 1, inputs=_inputs_flat, 

7080 attrs=_attrs, ctx=ctx, name=name) 

7081 if _execute.must_record_gradient(): 

7082 _execute.record_gradient( 

7083 "ShuffleDatasetV2", _inputs_flat, _attrs, _result) 

7084 _result, = _result 

7085 return _result 

7086 

7087 

7088def shuffle_dataset_v3(input_dataset, buffer_size, seed, seed2, seed_generator, output_types, output_shapes, reshuffle_each_iteration=True, metadata="", name=None): 

7089 r"""TODO: add doc. 

7090 

7091 Args: 

7092 input_dataset: A `Tensor` of type `variant`. 

7093 buffer_size: A `Tensor` of type `int64`. 

7094 seed: A `Tensor` of type `int64`. 

7095 seed2: A `Tensor` of type `int64`. 

7096 seed_generator: A `Tensor` of type `resource`. 

7097 output_types: A list of `tf.DTypes` that has length `>= 1`. 

7098 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7099 reshuffle_each_iteration: An optional `bool`. Defaults to `True`. 

7100 metadata: An optional `string`. Defaults to `""`. 

7101 name: A name for the operation (optional). 

7102 

7103 Returns: 

7104 A `Tensor` of type `variant`. 

7105 """ 

7106 _ctx = _context._context or _context.context() 

7107 tld = _ctx._thread_local_data 

7108 if tld.is_eager: 

7109 try: 

7110 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7111 _ctx, "ShuffleDatasetV3", name, input_dataset, buffer_size, seed, 

7112 seed2, seed_generator, "reshuffle_each_iteration", 

7113 reshuffle_each_iteration, "output_types", output_types, 

7114 "output_shapes", output_shapes, "metadata", metadata) 

7115 return _result 

7116 except _core._NotOkStatusException as e: 

7117 _ops.raise_from_not_ok_status(e, name) 

7118 except _core._FallbackException: 

7119 pass 

7120 try: 

7121 return shuffle_dataset_v3_eager_fallback( 

7122 input_dataset, buffer_size, seed, seed2, seed_generator, 

7123 reshuffle_each_iteration=reshuffle_each_iteration, 

7124 output_types=output_types, output_shapes=output_shapes, 

7125 metadata=metadata, name=name, ctx=_ctx) 

7126 except _core._SymbolicException: 

7127 pass # Add nodes to the TensorFlow graph. 

7128 # Add nodes to the TensorFlow graph. 

7129 if not isinstance(output_types, (list, tuple)): 

7130 raise TypeError( 

7131 "Expected list for 'output_types' argument to " 

7132 "'shuffle_dataset_v3' Op, not %r." % output_types) 

7133 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7134 if not isinstance(output_shapes, (list, tuple)): 

7135 raise TypeError( 

7136 "Expected list for 'output_shapes' argument to " 

7137 "'shuffle_dataset_v3' Op, not %r." % output_shapes) 

7138 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7139 if reshuffle_each_iteration is None: 

7140 reshuffle_each_iteration = True 

7141 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

7142 if metadata is None: 

7143 metadata = "" 

7144 metadata = _execute.make_str(metadata, "metadata") 

7145 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7146 "ShuffleDatasetV3", input_dataset=input_dataset, 

7147 buffer_size=buffer_size, seed=seed, seed2=seed2, 

7148 seed_generator=seed_generator, 

7149 output_types=output_types, 

7150 output_shapes=output_shapes, 

7151 reshuffle_each_iteration=reshuffle_each_iteration, 

7152 metadata=metadata, name=name) 

7153 _result = _outputs[:] 

7154 if _execute.must_record_gradient(): 

7155 _attrs = ("reshuffle_each_iteration", 

7156 _op._get_attr_bool("reshuffle_each_iteration"), "output_types", 

7157 _op.get_attr("output_types"), "output_shapes", 

7158 _op.get_attr("output_shapes"), "metadata", 

7159 _op.get_attr("metadata")) 

7160 _inputs_flat = _op.inputs 

7161 _execute.record_gradient( 

7162 "ShuffleDatasetV3", _inputs_flat, _attrs, _result) 

7163 _result, = _result 

7164 return _result 

7165 

7166ShuffleDatasetV3 = tf_export("raw_ops.ShuffleDatasetV3")(_ops.to_raw_op(shuffle_dataset_v3)) 

7167 

7168 

7169def shuffle_dataset_v3_eager_fallback(input_dataset, buffer_size, seed, seed2, seed_generator, output_types, output_shapes, reshuffle_each_iteration, metadata, name, ctx): 

7170 if not isinstance(output_types, (list, tuple)): 

7171 raise TypeError( 

7172 "Expected list for 'output_types' argument to " 

7173 "'shuffle_dataset_v3' Op, not %r." % output_types) 

7174 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7175 if not isinstance(output_shapes, (list, tuple)): 

7176 raise TypeError( 

7177 "Expected list for 'output_shapes' argument to " 

7178 "'shuffle_dataset_v3' Op, not %r." % output_shapes) 

7179 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7180 if reshuffle_each_iteration is None: 

7181 reshuffle_each_iteration = True 

7182 reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration") 

7183 if metadata is None: 

7184 metadata = "" 

7185 metadata = _execute.make_str(metadata, "metadata") 

7186 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

7187 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

7188 seed = _ops.convert_to_tensor(seed, _dtypes.int64) 

7189 seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64) 

7190 seed_generator = _ops.convert_to_tensor(seed_generator, _dtypes.resource) 

7191 _inputs_flat = [input_dataset, buffer_size, seed, seed2, seed_generator] 

7192 _attrs = ("reshuffle_each_iteration", reshuffle_each_iteration, 

7193 "output_types", output_types, "output_shapes", output_shapes, "metadata", 

7194 metadata) 

7195 _result = _execute.execute(b"ShuffleDatasetV3", 1, inputs=_inputs_flat, 

7196 attrs=_attrs, ctx=ctx, name=name) 

7197 if _execute.must_record_gradient(): 

7198 _execute.record_gradient( 

7199 "ShuffleDatasetV3", _inputs_flat, _attrs, _result) 

7200 _result, = _result 

7201 return _result 

7202 

7203 

7204def skip_dataset(input_dataset, count, output_types, output_shapes, metadata="", name=None): 

7205 r"""Creates a dataset that skips `count` elements from the `input_dataset`. 

7206 

7207 Args: 

7208 input_dataset: A `Tensor` of type `variant`. 

7209 count: A `Tensor` of type `int64`. 

7210 A scalar representing the number of elements from the `input_dataset` 

7211 that should be skipped. If count is -1, skips everything. 

7212 output_types: A list of `tf.DTypes` that has length `>= 1`. 

7213 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7214 metadata: An optional `string`. Defaults to `""`. 

7215 name: A name for the operation (optional). 

7216 

7217 Returns: 

7218 A `Tensor` of type `variant`. 

7219 """ 

7220 _ctx = _context._context or _context.context() 

7221 tld = _ctx._thread_local_data 

7222 if tld.is_eager: 

7223 try: 

7224 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7225 _ctx, "SkipDataset", name, input_dataset, count, "output_types", 

7226 output_types, "output_shapes", output_shapes, "metadata", metadata) 

7227 return _result 

7228 except _core._NotOkStatusException as e: 

7229 _ops.raise_from_not_ok_status(e, name) 

7230 except _core._FallbackException: 

7231 pass 

7232 try: 

7233 return skip_dataset_eager_fallback( 

7234 input_dataset, count, output_types=output_types, 

7235 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

7236 except _core._SymbolicException: 

7237 pass # Add nodes to the TensorFlow graph. 

7238 # Add nodes to the TensorFlow graph. 

7239 if not isinstance(output_types, (list, tuple)): 

7240 raise TypeError( 

7241 "Expected list for 'output_types' argument to " 

7242 "'skip_dataset' Op, not %r." % output_types) 

7243 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7244 if not isinstance(output_shapes, (list, tuple)): 

7245 raise TypeError( 

7246 "Expected list for 'output_shapes' argument to " 

7247 "'skip_dataset' Op, not %r." % output_shapes) 

7248 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7249 if metadata is None: 

7250 metadata = "" 

7251 metadata = _execute.make_str(metadata, "metadata") 

7252 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7253 "SkipDataset", input_dataset=input_dataset, count=count, 

7254 output_types=output_types, output_shapes=output_shapes, 

7255 metadata=metadata, name=name) 

7256 _result = _outputs[:] 

7257 if _execute.must_record_gradient(): 

7258 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

7259 _op.get_attr("output_shapes"), "metadata", 

7260 _op.get_attr("metadata")) 

7261 _inputs_flat = _op.inputs 

7262 _execute.record_gradient( 

7263 "SkipDataset", _inputs_flat, _attrs, _result) 

7264 _result, = _result 

7265 return _result 

7266 

7267SkipDataset = tf_export("raw_ops.SkipDataset")(_ops.to_raw_op(skip_dataset)) 

7268 

7269 

7270def skip_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, metadata, name, ctx): 

7271 if not isinstance(output_types, (list, tuple)): 

7272 raise TypeError( 

7273 "Expected list for 'output_types' argument to " 

7274 "'skip_dataset' Op, not %r." % output_types) 

7275 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7276 if not isinstance(output_shapes, (list, tuple)): 

7277 raise TypeError( 

7278 "Expected list for 'output_shapes' argument to " 

7279 "'skip_dataset' Op, not %r." % output_shapes) 

7280 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7281 if metadata is None: 

7282 metadata = "" 

7283 metadata = _execute.make_str(metadata, "metadata") 

7284 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

7285 count = _ops.convert_to_tensor(count, _dtypes.int64) 

7286 _inputs_flat = [input_dataset, count] 

7287 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

7288 "metadata", metadata) 

7289 _result = _execute.execute(b"SkipDataset", 1, inputs=_inputs_flat, 

7290 attrs=_attrs, ctx=ctx, name=name) 

7291 if _execute.must_record_gradient(): 

7292 _execute.record_gradient( 

7293 "SkipDataset", _inputs_flat, _attrs, _result) 

7294 _result, = _result 

7295 return _result 

7296 

7297 

7298def sparse_tensor_slice_dataset(indices, values, dense_shape, name=None): 

7299 r"""Creates a dataset that splits a SparseTensor into elements row-wise. 

7300 

7301 Args: 

7302 indices: A `Tensor` of type `int64`. 

7303 values: A `Tensor`. 

7304 dense_shape: A `Tensor` of type `int64`. 

7305 name: A name for the operation (optional). 

7306 

7307 Returns: 

7308 A `Tensor` of type `variant`. 

7309 """ 

7310 _ctx = _context._context or _context.context() 

7311 tld = _ctx._thread_local_data 

7312 if tld.is_eager: 

7313 try: 

7314 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7315 _ctx, "SparseTensorSliceDataset", name, indices, values, dense_shape) 

7316 return _result 

7317 except _core._NotOkStatusException as e: 

7318 _ops.raise_from_not_ok_status(e, name) 

7319 except _core._FallbackException: 

7320 pass 

7321 try: 

7322 return sparse_tensor_slice_dataset_eager_fallback( 

7323 indices, values, dense_shape, name=name, ctx=_ctx) 

7324 except _core._SymbolicException: 

7325 pass # Add nodes to the TensorFlow graph. 

7326 # Add nodes to the TensorFlow graph. 

7327 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7328 "SparseTensorSliceDataset", indices=indices, values=values, 

7329 dense_shape=dense_shape, name=name) 

7330 _result = _outputs[:] 

7331 if _execute.must_record_gradient(): 

7332 _attrs = ("Tvalues", _op._get_attr_type("Tvalues")) 

7333 _inputs_flat = _op.inputs 

7334 _execute.record_gradient( 

7335 "SparseTensorSliceDataset", _inputs_flat, _attrs, _result) 

7336 _result, = _result 

7337 return _result 

7338 

7339SparseTensorSliceDataset = tf_export("raw_ops.SparseTensorSliceDataset")(_ops.to_raw_op(sparse_tensor_slice_dataset)) 

7340 

7341 

7342def sparse_tensor_slice_dataset_eager_fallback(indices, values, dense_shape, name, ctx): 

7343 _attr_Tvalues, (values,) = _execute.args_to_matching_eager([values], ctx, []) 

7344 indices = _ops.convert_to_tensor(indices, _dtypes.int64) 

7345 dense_shape = _ops.convert_to_tensor(dense_shape, _dtypes.int64) 

7346 _inputs_flat = [indices, values, dense_shape] 

7347 _attrs = ("Tvalues", _attr_Tvalues) 

7348 _result = _execute.execute(b"SparseTensorSliceDataset", 1, 

7349 inputs=_inputs_flat, attrs=_attrs, ctx=ctx, 

7350 name=name) 

7351 if _execute.must_record_gradient(): 

7352 _execute.record_gradient( 

7353 "SparseTensorSliceDataset", _inputs_flat, _attrs, _result) 

7354 _result, = _result 

7355 return _result 

7356 

7357 

7358def tf_record_dataset(filenames, compression_type, buffer_size, metadata="", name=None): 

7359 r"""Creates a dataset that emits the records from one or more TFRecord files. 

7360 

7361 Args: 

7362 filenames: A `Tensor` of type `string`. 

7363 A scalar or vector containing the name(s) of the file(s) to be 

7364 read. 

7365 compression_type: A `Tensor` of type `string`. 

7366 A scalar containing either (i) the empty string (no 

7367 compression), (ii) "ZLIB", or (iii) "GZIP". 

7368 buffer_size: A `Tensor` of type `int64`. 

7369 A scalar representing the number of bytes to buffer. A value of 

7370 0 means no buffering will be performed. 

7371 metadata: An optional `string`. Defaults to `""`. 

7372 name: A name for the operation (optional). 

7373 

7374 Returns: 

7375 A `Tensor` of type `variant`. 

7376 """ 

7377 _ctx = _context._context or _context.context() 

7378 tld = _ctx._thread_local_data 

7379 if tld.is_eager: 

7380 try: 

7381 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7382 _ctx, "TFRecordDataset", name, filenames, compression_type, 

7383 buffer_size, "metadata", metadata) 

7384 return _result 

7385 except _core._NotOkStatusException as e: 

7386 _ops.raise_from_not_ok_status(e, name) 

7387 except _core._FallbackException: 

7388 pass 

7389 try: 

7390 return tf_record_dataset_eager_fallback( 

7391 filenames, compression_type, buffer_size, metadata=metadata, 

7392 name=name, ctx=_ctx) 

7393 except _core._SymbolicException: 

7394 pass # Add nodes to the TensorFlow graph. 

7395 # Add nodes to the TensorFlow graph. 

7396 if metadata is None: 

7397 metadata = "" 

7398 metadata = _execute.make_str(metadata, "metadata") 

7399 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7400 "TFRecordDataset", filenames=filenames, 

7401 compression_type=compression_type, 

7402 buffer_size=buffer_size, metadata=metadata, 

7403 name=name) 

7404 _result = _outputs[:] 

7405 if _execute.must_record_gradient(): 

7406 _attrs = ("metadata", _op.get_attr("metadata")) 

7407 _inputs_flat = _op.inputs 

7408 _execute.record_gradient( 

7409 "TFRecordDataset", _inputs_flat, _attrs, _result) 

7410 _result, = _result 

7411 return _result 

7412 

7413TFRecordDataset = tf_export("raw_ops.TFRecordDataset")(_ops.to_raw_op(tf_record_dataset)) 

7414 

7415 

7416def tf_record_dataset_eager_fallback(filenames, compression_type, buffer_size, metadata, name, ctx): 

7417 if metadata is None: 

7418 metadata = "" 

7419 metadata = _execute.make_str(metadata, "metadata") 

7420 filenames = _ops.convert_to_tensor(filenames, _dtypes.string) 

7421 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string) 

7422 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

7423 _inputs_flat = [filenames, compression_type, buffer_size] 

7424 _attrs = ("metadata", metadata) 

7425 _result = _execute.execute(b"TFRecordDataset", 1, inputs=_inputs_flat, 

7426 attrs=_attrs, ctx=ctx, name=name) 

7427 if _execute.must_record_gradient(): 

7428 _execute.record_gradient( 

7429 "TFRecordDataset", _inputs_flat, _attrs, _result) 

7430 _result, = _result 

7431 return _result 

7432 

7433 

7434def take_dataset(input_dataset, count, output_types, output_shapes, metadata="", name=None): 

7435 r"""Creates a dataset that contains `count` elements from the `input_dataset`. 

7436 

7437 Args: 

7438 input_dataset: A `Tensor` of type `variant`. 

7439 count: A `Tensor` of type `int64`. 

7440 A scalar representing the number of elements from the `input_dataset` 

7441 that should be taken. A value of `-1` indicates that all of `input_dataset` 

7442 is taken. 

7443 output_types: A list of `tf.DTypes` that has length `>= 1`. 

7444 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7445 metadata: An optional `string`. Defaults to `""`. 

7446 name: A name for the operation (optional). 

7447 

7448 Returns: 

7449 A `Tensor` of type `variant`. 

7450 """ 

7451 _ctx = _context._context or _context.context() 

7452 tld = _ctx._thread_local_data 

7453 if tld.is_eager: 

7454 try: 

7455 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7456 _ctx, "TakeDataset", name, input_dataset, count, "output_types", 

7457 output_types, "output_shapes", output_shapes, "metadata", metadata) 

7458 return _result 

7459 except _core._NotOkStatusException as e: 

7460 _ops.raise_from_not_ok_status(e, name) 

7461 except _core._FallbackException: 

7462 pass 

7463 try: 

7464 return take_dataset_eager_fallback( 

7465 input_dataset, count, output_types=output_types, 

7466 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

7467 except _core._SymbolicException: 

7468 pass # Add nodes to the TensorFlow graph. 

7469 # Add nodes to the TensorFlow graph. 

7470 if not isinstance(output_types, (list, tuple)): 

7471 raise TypeError( 

7472 "Expected list for 'output_types' argument to " 

7473 "'take_dataset' Op, not %r." % output_types) 

7474 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7475 if not isinstance(output_shapes, (list, tuple)): 

7476 raise TypeError( 

7477 "Expected list for 'output_shapes' argument to " 

7478 "'take_dataset' Op, not %r." % output_shapes) 

7479 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7480 if metadata is None: 

7481 metadata = "" 

7482 metadata = _execute.make_str(metadata, "metadata") 

7483 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7484 "TakeDataset", input_dataset=input_dataset, count=count, 

7485 output_types=output_types, output_shapes=output_shapes, 

7486 metadata=metadata, name=name) 

7487 _result = _outputs[:] 

7488 if _execute.must_record_gradient(): 

7489 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

7490 _op.get_attr("output_shapes"), "metadata", 

7491 _op.get_attr("metadata")) 

7492 _inputs_flat = _op.inputs 

7493 _execute.record_gradient( 

7494 "TakeDataset", _inputs_flat, _attrs, _result) 

7495 _result, = _result 

7496 return _result 

7497 

7498TakeDataset = tf_export("raw_ops.TakeDataset")(_ops.to_raw_op(take_dataset)) 

7499 

7500 

7501def take_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, metadata, name, ctx): 

7502 if not isinstance(output_types, (list, tuple)): 

7503 raise TypeError( 

7504 "Expected list for 'output_types' argument to " 

7505 "'take_dataset' Op, not %r." % output_types) 

7506 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7507 if not isinstance(output_shapes, (list, tuple)): 

7508 raise TypeError( 

7509 "Expected list for 'output_shapes' argument to " 

7510 "'take_dataset' Op, not %r." % output_shapes) 

7511 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7512 if metadata is None: 

7513 metadata = "" 

7514 metadata = _execute.make_str(metadata, "metadata") 

7515 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

7516 count = _ops.convert_to_tensor(count, _dtypes.int64) 

7517 _inputs_flat = [input_dataset, count] 

7518 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

7519 "metadata", metadata) 

7520 _result = _execute.execute(b"TakeDataset", 1, inputs=_inputs_flat, 

7521 attrs=_attrs, ctx=ctx, name=name) 

7522 if _execute.must_record_gradient(): 

7523 _execute.record_gradient( 

7524 "TakeDataset", _inputs_flat, _attrs, _result) 

7525 _result, = _result 

7526 return _result 

7527 

7528 

7529def tensor_dataset(components, output_shapes, metadata="", name=None): 

7530 r"""Creates a dataset that emits `components` as a tuple of tensors once. 

7531 

7532 Args: 

7533 components: A list of `Tensor` objects. 

7534 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7535 metadata: An optional `string`. Defaults to `""`. 

7536 name: A name for the operation (optional). 

7537 

7538 Returns: 

7539 A `Tensor` of type `variant`. 

7540 """ 

7541 _ctx = _context._context or _context.context() 

7542 tld = _ctx._thread_local_data 

7543 if tld.is_eager: 

7544 try: 

7545 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7546 _ctx, "TensorDataset", name, components, "output_shapes", 

7547 output_shapes, "metadata", metadata) 

7548 return _result 

7549 except _core._NotOkStatusException as e: 

7550 _ops.raise_from_not_ok_status(e, name) 

7551 except _core._FallbackException: 

7552 pass 

7553 try: 

7554 return tensor_dataset_eager_fallback( 

7555 components, output_shapes=output_shapes, metadata=metadata, 

7556 name=name, ctx=_ctx) 

7557 except _core._SymbolicException: 

7558 pass # Add nodes to the TensorFlow graph. 

7559 # Add nodes to the TensorFlow graph. 

7560 if not isinstance(output_shapes, (list, tuple)): 

7561 raise TypeError( 

7562 "Expected list for 'output_shapes' argument to " 

7563 "'tensor_dataset' Op, not %r." % output_shapes) 

7564 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7565 if metadata is None: 

7566 metadata = "" 

7567 metadata = _execute.make_str(metadata, "metadata") 

7568 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7569 "TensorDataset", components=components, output_shapes=output_shapes, 

7570 metadata=metadata, name=name) 

7571 _result = _outputs[:] 

7572 if _execute.must_record_gradient(): 

7573 _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes", 

7574 _op.get_attr("output_shapes"), "metadata", 

7575 _op.get_attr("metadata")) 

7576 _inputs_flat = _op.inputs 

7577 _execute.record_gradient( 

7578 "TensorDataset", _inputs_flat, _attrs, _result) 

7579 _result, = _result 

7580 return _result 

7581 

7582TensorDataset = tf_export("raw_ops.TensorDataset")(_ops.to_raw_op(tensor_dataset)) 

7583 

7584 

7585def tensor_dataset_eager_fallback(components, output_shapes, metadata, name, ctx): 

7586 if not isinstance(output_shapes, (list, tuple)): 

7587 raise TypeError( 

7588 "Expected list for 'output_shapes' argument to " 

7589 "'tensor_dataset' Op, not %r." % output_shapes) 

7590 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7591 if metadata is None: 

7592 metadata = "" 

7593 metadata = _execute.make_str(metadata, "metadata") 

7594 _attr_Toutput_types, components = _execute.convert_to_mixed_eager_tensors(components, ctx) 

7595 _inputs_flat = list(components) 

7596 _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes", 

7597 output_shapes, "metadata", metadata) 

7598 _result = _execute.execute(b"TensorDataset", 1, inputs=_inputs_flat, 

7599 attrs=_attrs, ctx=ctx, name=name) 

7600 if _execute.must_record_gradient(): 

7601 _execute.record_gradient( 

7602 "TensorDataset", _inputs_flat, _attrs, _result) 

7603 _result, = _result 

7604 return _result 

7605 

7606 

7607def tensor_slice_dataset(components, output_shapes, is_files=False, metadata="", replicate_on_split=False, name=None): 

7608 r"""Creates a dataset that emits each dim-0 slice of `components` once. 

7609 

7610 Args: 

7611 components: A list of `Tensor` objects. 

7612 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7613 is_files: An optional `bool`. Defaults to `False`. 

7614 metadata: An optional `string`. Defaults to `""`. 

7615 replicate_on_split: An optional `bool`. Defaults to `False`. 

7616 name: A name for the operation (optional). 

7617 

7618 Returns: 

7619 A `Tensor` of type `variant`. 

7620 """ 

7621 _ctx = _context._context or _context.context() 

7622 tld = _ctx._thread_local_data 

7623 if tld.is_eager: 

7624 try: 

7625 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7626 _ctx, "TensorSliceDataset", name, components, "output_shapes", 

7627 output_shapes, "is_files", is_files, "metadata", metadata, 

7628 "replicate_on_split", replicate_on_split) 

7629 return _result 

7630 except _core._NotOkStatusException as e: 

7631 _ops.raise_from_not_ok_status(e, name) 

7632 except _core._FallbackException: 

7633 pass 

7634 try: 

7635 return tensor_slice_dataset_eager_fallback( 

7636 components, output_shapes=output_shapes, is_files=is_files, 

7637 metadata=metadata, replicate_on_split=replicate_on_split, name=name, 

7638 ctx=_ctx) 

7639 except _core._SymbolicException: 

7640 pass # Add nodes to the TensorFlow graph. 

7641 # Add nodes to the TensorFlow graph. 

7642 if not isinstance(output_shapes, (list, tuple)): 

7643 raise TypeError( 

7644 "Expected list for 'output_shapes' argument to " 

7645 "'tensor_slice_dataset' Op, not %r." % output_shapes) 

7646 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7647 if is_files is None: 

7648 is_files = False 

7649 is_files = _execute.make_bool(is_files, "is_files") 

7650 if metadata is None: 

7651 metadata = "" 

7652 metadata = _execute.make_str(metadata, "metadata") 

7653 if replicate_on_split is None: 

7654 replicate_on_split = False 

7655 replicate_on_split = _execute.make_bool(replicate_on_split, "replicate_on_split") 

7656 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7657 "TensorSliceDataset", components=components, 

7658 output_shapes=output_shapes, is_files=is_files, 

7659 metadata=metadata, 

7660 replicate_on_split=replicate_on_split, 

7661 name=name) 

7662 _result = _outputs[:] 

7663 if _execute.must_record_gradient(): 

7664 _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes", 

7665 _op.get_attr("output_shapes"), "is_files", 

7666 _op._get_attr_bool("is_files"), "metadata", 

7667 _op.get_attr("metadata"), "replicate_on_split", 

7668 _op._get_attr_bool("replicate_on_split")) 

7669 _inputs_flat = _op.inputs 

7670 _execute.record_gradient( 

7671 "TensorSliceDataset", _inputs_flat, _attrs, _result) 

7672 _result, = _result 

7673 return _result 

7674 

7675TensorSliceDataset = tf_export("raw_ops.TensorSliceDataset")(_ops.to_raw_op(tensor_slice_dataset)) 

7676 

7677 

7678def tensor_slice_dataset_eager_fallback(components, output_shapes, is_files, metadata, replicate_on_split, name, ctx): 

7679 if not isinstance(output_shapes, (list, tuple)): 

7680 raise TypeError( 

7681 "Expected list for 'output_shapes' argument to " 

7682 "'tensor_slice_dataset' Op, not %r." % output_shapes) 

7683 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7684 if is_files is None: 

7685 is_files = False 

7686 is_files = _execute.make_bool(is_files, "is_files") 

7687 if metadata is None: 

7688 metadata = "" 

7689 metadata = _execute.make_str(metadata, "metadata") 

7690 if replicate_on_split is None: 

7691 replicate_on_split = False 

7692 replicate_on_split = _execute.make_bool(replicate_on_split, "replicate_on_split") 

7693 _attr_Toutput_types, components = _execute.convert_to_mixed_eager_tensors(components, ctx) 

7694 _inputs_flat = list(components) 

7695 _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes", 

7696 output_shapes, "is_files", is_files, "metadata", metadata, 

7697 "replicate_on_split", replicate_on_split) 

7698 _result = _execute.execute(b"TensorSliceDataset", 1, inputs=_inputs_flat, 

7699 attrs=_attrs, ctx=ctx, name=name) 

7700 if _execute.must_record_gradient(): 

7701 _execute.record_gradient( 

7702 "TensorSliceDataset", _inputs_flat, _attrs, _result) 

7703 _result, = _result 

7704 return _result 

7705 

7706 

7707def text_line_dataset(filenames, compression_type, buffer_size, metadata="", name=None): 

7708 r"""Creates a dataset that emits the lines of one or more text files. 

7709 

7710 Args: 

7711 filenames: A `Tensor` of type `string`. 

7712 A scalar or a vector containing the name(s) of the file(s) to be 

7713 read. 

7714 compression_type: A `Tensor` of type `string`. 

7715 A scalar containing either (i) the empty string (no 

7716 compression), (ii) "ZLIB", or (iii) "GZIP". 

7717 buffer_size: A `Tensor` of type `int64`. 

7718 A scalar containing the number of bytes to buffer. 

7719 metadata: An optional `string`. Defaults to `""`. 

7720 name: A name for the operation (optional). 

7721 

7722 Returns: 

7723 A `Tensor` of type `variant`. 

7724 """ 

7725 _ctx = _context._context or _context.context() 

7726 tld = _ctx._thread_local_data 

7727 if tld.is_eager: 

7728 try: 

7729 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7730 _ctx, "TextLineDataset", name, filenames, compression_type, 

7731 buffer_size, "metadata", metadata) 

7732 return _result 

7733 except _core._NotOkStatusException as e: 

7734 _ops.raise_from_not_ok_status(e, name) 

7735 except _core._FallbackException: 

7736 pass 

7737 try: 

7738 return text_line_dataset_eager_fallback( 

7739 filenames, compression_type, buffer_size, metadata=metadata, 

7740 name=name, ctx=_ctx) 

7741 except _core._SymbolicException: 

7742 pass # Add nodes to the TensorFlow graph. 

7743 # Add nodes to the TensorFlow graph. 

7744 if metadata is None: 

7745 metadata = "" 

7746 metadata = _execute.make_str(metadata, "metadata") 

7747 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7748 "TextLineDataset", filenames=filenames, 

7749 compression_type=compression_type, 

7750 buffer_size=buffer_size, metadata=metadata, 

7751 name=name) 

7752 _result = _outputs[:] 

7753 if _execute.must_record_gradient(): 

7754 _attrs = ("metadata", _op.get_attr("metadata")) 

7755 _inputs_flat = _op.inputs 

7756 _execute.record_gradient( 

7757 "TextLineDataset", _inputs_flat, _attrs, _result) 

7758 _result, = _result 

7759 return _result 

7760 

7761TextLineDataset = tf_export("raw_ops.TextLineDataset")(_ops.to_raw_op(text_line_dataset)) 

7762 

7763 

7764def text_line_dataset_eager_fallback(filenames, compression_type, buffer_size, metadata, name, ctx): 

7765 if metadata is None: 

7766 metadata = "" 

7767 metadata = _execute.make_str(metadata, "metadata") 

7768 filenames = _ops.convert_to_tensor(filenames, _dtypes.string) 

7769 compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string) 

7770 buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64) 

7771 _inputs_flat = [filenames, compression_type, buffer_size] 

7772 _attrs = ("metadata", metadata) 

7773 _result = _execute.execute(b"TextLineDataset", 1, inputs=_inputs_flat, 

7774 attrs=_attrs, ctx=ctx, name=name) 

7775 if _execute.must_record_gradient(): 

7776 _execute.record_gradient( 

7777 "TextLineDataset", _inputs_flat, _attrs, _result) 

7778 _result, = _result 

7779 return _result 

7780 

7781 

7782def unwrap_dataset_variant(input_handle, name=None): 

7783 r"""TODO: add doc. 

7784 

7785 Args: 

7786 input_handle: A `Tensor` of type `variant`. 

7787 name: A name for the operation (optional). 

7788 

7789 Returns: 

7790 A `Tensor` of type `variant`. 

7791 """ 

7792 _ctx = _context._context or _context.context() 

7793 tld = _ctx._thread_local_data 

7794 if tld.is_eager: 

7795 try: 

7796 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7797 _ctx, "UnwrapDatasetVariant", name, input_handle) 

7798 return _result 

7799 except _core._NotOkStatusException as e: 

7800 _ops.raise_from_not_ok_status(e, name) 

7801 except _core._FallbackException: 

7802 pass 

7803 try: 

7804 return unwrap_dataset_variant_eager_fallback( 

7805 input_handle, name=name, ctx=_ctx) 

7806 except _core._SymbolicException: 

7807 pass # Add nodes to the TensorFlow graph. 

7808 # Add nodes to the TensorFlow graph. 

7809 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7810 "UnwrapDatasetVariant", input_handle=input_handle, name=name) 

7811 _result = _outputs[:] 

7812 if _execute.must_record_gradient(): 

7813 _attrs = () 

7814 _inputs_flat = _op.inputs 

7815 _execute.record_gradient( 

7816 "UnwrapDatasetVariant", _inputs_flat, _attrs, _result) 

7817 _result, = _result 

7818 return _result 

7819 

7820UnwrapDatasetVariant = tf_export("raw_ops.UnwrapDatasetVariant")(_ops.to_raw_op(unwrap_dataset_variant)) 

7821 

7822 

7823def unwrap_dataset_variant_eager_fallback(input_handle, name, ctx): 

7824 input_handle = _ops.convert_to_tensor(input_handle, _dtypes.variant) 

7825 _inputs_flat = [input_handle] 

7826 _attrs = None 

7827 _result = _execute.execute(b"UnwrapDatasetVariant", 1, inputs=_inputs_flat, 

7828 attrs=_attrs, ctx=ctx, name=name) 

7829 if _execute.must_record_gradient(): 

7830 _execute.record_gradient( 

7831 "UnwrapDatasetVariant", _inputs_flat, _attrs, _result) 

7832 _result, = _result 

7833 return _result 

7834 

7835 

7836def window_dataset(input_dataset, size, shift, stride, drop_remainder, output_types, output_shapes, metadata="", name=None): 

7837 r""" Combines (nests of) input elements into a dataset of (nests of) windows. 

7838 

7839 A "window" is a finite dataset of flat elements of size `size` (or possibly 

7840 fewer if there are not enough input elements to fill the window and 

7841 `drop_remainder` evaluates to false). 

7842 

7843 The `shift` argument determines the number of input elements by which 

7844 the window moves on each iteration. The first element in the `k`th window 

7845 will be element 

7846 

7847 ``` 

7848 1 + (k-1) * shift 

7849 ``` 

7850 

7851 of the input dataset. In particular, the first element of the first window 

7852 will always be the first element of the input dataset.  

7853 

7854 If the `stride` parameter is greater than 1, then each window will skip 

7855 `(stride - 1)` input elements between each element that appears in the 

7856 window. Output windows will still contain `size` elements regardless of 

7857 the value of `stride`. 

7858 

7859 The `stride` argument determines the stride of the input elements, and the 

7860 `shift` argument determines the shift of the window. 

7861 

7862 For example, letting `{...}` to represent a Dataset: 

7863 

7864 - `tf.data.Dataset.range(7).window(2)` produces 

7865 `{{0, 1}, {2, 3}, {4, 5}, {6}}` 

7866 - `tf.data.Dataset.range(7).window(3, 2, 1, True)` produces 

7867 `{{0, 1, 2}, {2, 3, 4}, {4, 5, 6}}` 

7868 - `tf.data.Dataset.range(7).window(3, 1, 2, True)` produces 

7869 `{{0, 2, 4}, {1, 3, 5}, {2, 4, 6}}` 

7870 

7871 Note that when the `window` transformation is applied to a dataset of 

7872 nested elements, it produces a dataset of nested windows. 

7873 

7874 For example: 

7875 

7876 - `tf.data.Dataset.from_tensor_slices((range(4), range(4))).window(2)` 

7877 produces `{({0, 1}, {0, 1}), ({2, 3}, {2, 3})}` 

7878 - `tf.data.Dataset.from_tensor_slices({"a": range(4)}).window(2)` 

7879 produces `{{"a": {0, 1}}, {"a": {2, 3}}}` 

7880 

7881 Args: 

7882 input_dataset: A `Tensor` of type `variant`. 

7883 size: A `Tensor` of type `int64`. 

7884 An integer scalar, representing the number of elements 

7885 of the input dataset to combine into a window. Must be positive. 

7886 shift: A `Tensor` of type `int64`. 

7887 An integer scalar, representing the number of input elements 

7888 by which the window moves in each iteration. Defaults to `size`. 

7889 Must be positive. 

7890 stride: A `Tensor` of type `int64`. 

7891 An integer scalar, representing the stride of the input elements 

7892 in the sliding window. Must be positive. The default value of 1 means 

7893 "retain every input element". 

7894 drop_remainder: A `Tensor` of type `bool`. 

7895 A Boolean scalar, representing whether the last window should be 

7896 dropped if its size is smaller than `window_size`. 

7897 output_types: A list of `tf.DTypes` that has length `>= 1`. 

7898 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7899 metadata: An optional `string`. Defaults to `""`. 

7900 name: A name for the operation (optional). 

7901 

7902 Returns: 

7903 A `Tensor` of type `variant`. 

7904 """ 

7905 _ctx = _context._context or _context.context() 

7906 tld = _ctx._thread_local_data 

7907 if tld.is_eager: 

7908 try: 

7909 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

7910 _ctx, "WindowDataset", name, input_dataset, size, shift, stride, 

7911 drop_remainder, "output_types", output_types, "output_shapes", 

7912 output_shapes, "metadata", metadata) 

7913 return _result 

7914 except _core._NotOkStatusException as e: 

7915 _ops.raise_from_not_ok_status(e, name) 

7916 except _core._FallbackException: 

7917 pass 

7918 try: 

7919 return window_dataset_eager_fallback( 

7920 input_dataset, size, shift, stride, drop_remainder, 

7921 output_types=output_types, output_shapes=output_shapes, 

7922 metadata=metadata, name=name, ctx=_ctx) 

7923 except _core._SymbolicException: 

7924 pass # Add nodes to the TensorFlow graph. 

7925 # Add nodes to the TensorFlow graph. 

7926 if not isinstance(output_types, (list, tuple)): 

7927 raise TypeError( 

7928 "Expected list for 'output_types' argument to " 

7929 "'window_dataset' Op, not %r." % output_types) 

7930 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7931 if not isinstance(output_shapes, (list, tuple)): 

7932 raise TypeError( 

7933 "Expected list for 'output_shapes' argument to " 

7934 "'window_dataset' Op, not %r." % output_shapes) 

7935 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7936 if metadata is None: 

7937 metadata = "" 

7938 metadata = _execute.make_str(metadata, "metadata") 

7939 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

7940 "WindowDataset", input_dataset=input_dataset, size=size, shift=shift, 

7941 stride=stride, drop_remainder=drop_remainder, 

7942 output_types=output_types, 

7943 output_shapes=output_shapes, metadata=metadata, 

7944 name=name) 

7945 _result = _outputs[:] 

7946 if _execute.must_record_gradient(): 

7947 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

7948 _op.get_attr("output_shapes"), "metadata", 

7949 _op.get_attr("metadata")) 

7950 _inputs_flat = _op.inputs 

7951 _execute.record_gradient( 

7952 "WindowDataset", _inputs_flat, _attrs, _result) 

7953 _result, = _result 

7954 return _result 

7955 

7956WindowDataset = tf_export("raw_ops.WindowDataset")(_ops.to_raw_op(window_dataset)) 

7957 

7958 

7959def window_dataset_eager_fallback(input_dataset, size, shift, stride, drop_remainder, output_types, output_shapes, metadata, name, ctx): 

7960 if not isinstance(output_types, (list, tuple)): 

7961 raise TypeError( 

7962 "Expected list for 'output_types' argument to " 

7963 "'window_dataset' Op, not %r." % output_types) 

7964 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

7965 if not isinstance(output_shapes, (list, tuple)): 

7966 raise TypeError( 

7967 "Expected list for 'output_shapes' argument to " 

7968 "'window_dataset' Op, not %r." % output_shapes) 

7969 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

7970 if metadata is None: 

7971 metadata = "" 

7972 metadata = _execute.make_str(metadata, "metadata") 

7973 input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant) 

7974 size = _ops.convert_to_tensor(size, _dtypes.int64) 

7975 shift = _ops.convert_to_tensor(shift, _dtypes.int64) 

7976 stride = _ops.convert_to_tensor(stride, _dtypes.int64) 

7977 drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool) 

7978 _inputs_flat = [input_dataset, size, shift, stride, drop_remainder] 

7979 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

7980 "metadata", metadata) 

7981 _result = _execute.execute(b"WindowDataset", 1, inputs=_inputs_flat, 

7982 attrs=_attrs, ctx=ctx, name=name) 

7983 if _execute.must_record_gradient(): 

7984 _execute.record_gradient( 

7985 "WindowDataset", _inputs_flat, _attrs, _result) 

7986 _result, = _result 

7987 return _result 

7988 

7989 

7990def window_op(inputs, output_types, output_shapes, name=None): 

7991 r"""TODO: add doc. 

7992 

7993 Args: 

7994 inputs: A list of `Tensor` objects. 

7995 output_types: A list of `tf.DTypes` that has length `>= 1`. 

7996 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

7997 name: A name for the operation (optional). 

7998 

7999 Returns: 

8000 A `Tensor` of type `variant`. 

8001 """ 

8002 _ctx = _context._context or _context.context() 

8003 tld = _ctx._thread_local_data 

8004 if tld.is_eager: 

8005 try: 

8006 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

8007 _ctx, "WindowOp", name, inputs, "output_types", output_types, 

8008 "output_shapes", output_shapes) 

8009 return _result 

8010 except _core._NotOkStatusException as e: 

8011 _ops.raise_from_not_ok_status(e, name) 

8012 except _core._FallbackException: 

8013 pass 

8014 try: 

8015 return window_op_eager_fallback( 

8016 inputs, output_types=output_types, output_shapes=output_shapes, 

8017 name=name, ctx=_ctx) 

8018 except _core._SymbolicException: 

8019 pass # Add nodes to the TensorFlow graph. 

8020 # Add nodes to the TensorFlow graph. 

8021 if not isinstance(output_types, (list, tuple)): 

8022 raise TypeError( 

8023 "Expected list for 'output_types' argument to " 

8024 "'window_op' Op, not %r." % output_types) 

8025 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

8026 if not isinstance(output_shapes, (list, tuple)): 

8027 raise TypeError( 

8028 "Expected list for 'output_shapes' argument to " 

8029 "'window_op' Op, not %r." % output_shapes) 

8030 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

8031 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

8032 "WindowOp", inputs=inputs, output_types=output_types, 

8033 output_shapes=output_shapes, name=name) 

8034 _result = _outputs[:] 

8035 if _execute.must_record_gradient(): 

8036 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

8037 _op.get_attr("output_shapes"), "Tinputs", 

8038 _op.get_attr("Tinputs")) 

8039 _inputs_flat = _op.inputs 

8040 _execute.record_gradient( 

8041 "WindowOp", _inputs_flat, _attrs, _result) 

8042 _result, = _result 

8043 return _result 

8044 

8045WindowOp = tf_export("raw_ops.WindowOp")(_ops.to_raw_op(window_op)) 

8046 

8047 

8048def window_op_eager_fallback(inputs, output_types, output_shapes, name, ctx): 

8049 if not isinstance(output_types, (list, tuple)): 

8050 raise TypeError( 

8051 "Expected list for 'output_types' argument to " 

8052 "'window_op' Op, not %r." % output_types) 

8053 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

8054 if not isinstance(output_shapes, (list, tuple)): 

8055 raise TypeError( 

8056 "Expected list for 'output_shapes' argument to " 

8057 "'window_op' Op, not %r." % output_shapes) 

8058 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

8059 _attr_Tinputs, inputs = _execute.convert_to_mixed_eager_tensors(inputs, ctx) 

8060 _inputs_flat = list(inputs) 

8061 _attrs = ("output_types", output_types, "output_shapes", output_shapes, 

8062 "Tinputs", _attr_Tinputs) 

8063 _result = _execute.execute(b"WindowOp", 1, inputs=_inputs_flat, 

8064 attrs=_attrs, ctx=ctx, name=name) 

8065 if _execute.must_record_gradient(): 

8066 _execute.record_gradient( 

8067 "WindowOp", _inputs_flat, _attrs, _result) 

8068 _result, = _result 

8069 return _result 

8070 

8071 

8072def wrap_dataset_variant(input_handle, name=None): 

8073 r"""TODO: add doc. 

8074 

8075 Args: 

8076 input_handle: A `Tensor` of type `variant`. 

8077 name: A name for the operation (optional). 

8078 

8079 Returns: 

8080 A `Tensor` of type `variant`. 

8081 """ 

8082 _ctx = _context._context or _context.context() 

8083 tld = _ctx._thread_local_data 

8084 if tld.is_eager: 

8085 try: 

8086 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

8087 _ctx, "WrapDatasetVariant", name, input_handle) 

8088 return _result 

8089 except _core._NotOkStatusException as e: 

8090 _ops.raise_from_not_ok_status(e, name) 

8091 except _core._FallbackException: 

8092 pass 

8093 try: 

8094 return wrap_dataset_variant_eager_fallback( 

8095 input_handle, name=name, ctx=_ctx) 

8096 except _core._SymbolicException: 

8097 pass # Add nodes to the TensorFlow graph. 

8098 # Add nodes to the TensorFlow graph. 

8099 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

8100 "WrapDatasetVariant", input_handle=input_handle, name=name) 

8101 _result = _outputs[:] 

8102 if _execute.must_record_gradient(): 

8103 _attrs = () 

8104 _inputs_flat = _op.inputs 

8105 _execute.record_gradient( 

8106 "WrapDatasetVariant", _inputs_flat, _attrs, _result) 

8107 _result, = _result 

8108 return _result 

8109 

8110WrapDatasetVariant = tf_export("raw_ops.WrapDatasetVariant")(_ops.to_raw_op(wrap_dataset_variant)) 

8111 

8112 

8113def wrap_dataset_variant_eager_fallback(input_handle, name, ctx): 

8114 input_handle = _ops.convert_to_tensor(input_handle, _dtypes.variant) 

8115 _inputs_flat = [input_handle] 

8116 _attrs = None 

8117 _result = _execute.execute(b"WrapDatasetVariant", 1, inputs=_inputs_flat, 

8118 attrs=_attrs, ctx=ctx, name=name) 

8119 if _execute.must_record_gradient(): 

8120 _execute.record_gradient( 

8121 "WrapDatasetVariant", _inputs_flat, _attrs, _result) 

8122 _result, = _result 

8123 return _result 

8124 

8125 

8126def zip_dataset(input_datasets, output_types, output_shapes, metadata="", name=None): 

8127 r"""Creates a dataset that zips together `input_datasets`. 

8128 

8129 The elements of the resulting dataset are created by zipping corresponding 

8130 elements from each of the input datasets. 

8131 

8132 The size of the resulting dataset will match the size of the smallest input 

8133 dataset, and no error will be raised if input datasets have different sizes. 

8134 

8135 Args: 

8136 input_datasets: A list of at least 1 `Tensor` objects with type `variant`. 

8137 List of `N` variant Tensors representing datasets to be zipped together. 

8138 output_types: A list of `tf.DTypes` that has length `>= 1`. 

8139 output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. 

8140 metadata: An optional `string`. Defaults to `""`. 

8141 name: A name for the operation (optional). 

8142 

8143 Returns: 

8144 A `Tensor` of type `variant`. 

8145 """ 

8146 _ctx = _context._context or _context.context() 

8147 tld = _ctx._thread_local_data 

8148 if tld.is_eager: 

8149 try: 

8150 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

8151 _ctx, "ZipDataset", name, input_datasets, "output_types", 

8152 output_types, "output_shapes", output_shapes, "metadata", metadata) 

8153 return _result 

8154 except _core._NotOkStatusException as e: 

8155 _ops.raise_from_not_ok_status(e, name) 

8156 except _core._FallbackException: 

8157 pass 

8158 try: 

8159 return zip_dataset_eager_fallback( 

8160 input_datasets, output_types=output_types, 

8161 output_shapes=output_shapes, metadata=metadata, name=name, ctx=_ctx) 

8162 except _core._SymbolicException: 

8163 pass # Add nodes to the TensorFlow graph. 

8164 # Add nodes to the TensorFlow graph. 

8165 if not isinstance(input_datasets, (list, tuple)): 

8166 raise TypeError( 

8167 "Expected list for 'input_datasets' argument to " 

8168 "'zip_dataset' Op, not %r." % input_datasets) 

8169 _attr_N = len(input_datasets) 

8170 if not isinstance(output_types, (list, tuple)): 

8171 raise TypeError( 

8172 "Expected list for 'output_types' argument to " 

8173 "'zip_dataset' Op, not %r." % output_types) 

8174 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

8175 if not isinstance(output_shapes, (list, tuple)): 

8176 raise TypeError( 

8177 "Expected list for 'output_shapes' argument to " 

8178 "'zip_dataset' Op, not %r." % output_shapes) 

8179 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

8180 if metadata is None: 

8181 metadata = "" 

8182 metadata = _execute.make_str(metadata, "metadata") 

8183 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

8184 "ZipDataset", input_datasets=input_datasets, 

8185 output_types=output_types, output_shapes=output_shapes, 

8186 metadata=metadata, name=name) 

8187 _result = _outputs[:] 

8188 if _execute.must_record_gradient(): 

8189 _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes", 

8190 _op.get_attr("output_shapes"), "N", _op._get_attr_int("N"), 

8191 "metadata", _op.get_attr("metadata")) 

8192 _inputs_flat = _op.inputs 

8193 _execute.record_gradient( 

8194 "ZipDataset", _inputs_flat, _attrs, _result) 

8195 _result, = _result 

8196 return _result 

8197 

8198ZipDataset = tf_export("raw_ops.ZipDataset")(_ops.to_raw_op(zip_dataset)) 

8199 

8200 

8201def zip_dataset_eager_fallback(input_datasets, output_types, output_shapes, metadata, name, ctx): 

8202 if not isinstance(input_datasets, (list, tuple)): 

8203 raise TypeError( 

8204 "Expected list for 'input_datasets' argument to " 

8205 "'zip_dataset' Op, not %r." % input_datasets) 

8206 _attr_N = len(input_datasets) 

8207 if not isinstance(output_types, (list, tuple)): 

8208 raise TypeError( 

8209 "Expected list for 'output_types' argument to " 

8210 "'zip_dataset' Op, not %r." % output_types) 

8211 output_types = [_execute.make_type(_t, "output_types") for _t in output_types] 

8212 if not isinstance(output_shapes, (list, tuple)): 

8213 raise TypeError( 

8214 "Expected list for 'output_shapes' argument to " 

8215 "'zip_dataset' Op, not %r." % output_shapes) 

8216 output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] 

8217 if metadata is None: 

8218 metadata = "" 

8219 metadata = _execute.make_str(metadata, "metadata") 

8220 input_datasets = _ops.convert_n_to_tensor(input_datasets, _dtypes.variant) 

8221 _inputs_flat = list(input_datasets) 

8222 _attrs = ("output_types", output_types, "output_shapes", output_shapes, "N", 

8223 _attr_N, "metadata", metadata) 

8224 _result = _execute.execute(b"ZipDataset", 1, inputs=_inputs_flat, 

8225 attrs=_attrs, ctx=ctx, name=name) 

8226 if _execute.must_record_gradient(): 

8227 _execute.record_gradient( 

8228 "ZipDataset", _inputs_flat, _attrs, _result) 

8229 _result, = _result 

8230 return _result 

8231