Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/training/training.py: 100%
112 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-03 07:57 +0000
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
16"""Support for training models.
18See the [Training](https://tensorflow.org/api_guides/python/train) guide.
19"""
21# Optimizers.
22# pylint: disable=g-bad-import-order,unused-import
23from tensorflow.python.ops.sdca_ops import sdca_optimizer
24from tensorflow.python.ops.sdca_ops import sdca_fprint
25from tensorflow.python.ops.sdca_ops import sdca_shrink_l1
26from tensorflow.python.training.adadelta import AdadeltaOptimizer
27from tensorflow.python.training.adagrad import AdagradOptimizer
28from tensorflow.python.training.adagrad_da import AdagradDAOptimizer
29from tensorflow.python.training.proximal_adagrad import ProximalAdagradOptimizer
30from tensorflow.python.training.adam import AdamOptimizer
31from tensorflow.python.training.ftrl import FtrlOptimizer
32from tensorflow.python.training.experimental.loss_scale_optimizer import MixedPrecisionLossScaleOptimizer
33from tensorflow.python.training.experimental.mixed_precision import enable_mixed_precision_graph_rewrite_v1
34from tensorflow.python.training.momentum import MomentumOptimizer
35from tensorflow.python.training.moving_averages import ExponentialMovingAverage
36from tensorflow.python.training.optimizer import Optimizer
37from tensorflow.python.training.rmsprop import RMSPropOptimizer
38from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
39from tensorflow.python.training.proximal_gradient_descent import ProximalGradientDescentOptimizer
40from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizer
42# Utility classes for training.
43from tensorflow.python.training.coordinator import Coordinator
44from tensorflow.python.training.coordinator import LooperThread
45# go/tf-wildcard-import
46# pylint: disable=wildcard-import
47from tensorflow.python.training.queue_runner import *
49# For the module level doc.
50from tensorflow.python.training import input as _input
51from tensorflow.python.training.input import * # pylint: disable=redefined-builtin
52# pylint: enable=wildcard-import
54from tensorflow.python.training.basic_session_run_hooks import get_or_create_steps_per_run_variable
55from tensorflow.python.training.basic_session_run_hooks import SecondOrStepTimer
56from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
57from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
58from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
59from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverListener
60from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
61from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
62from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
63from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
64from tensorflow.python.training.basic_session_run_hooks import GlobalStepWaiterHook
65from tensorflow.python.training.basic_session_run_hooks import FinalOpsHook
66from tensorflow.python.training.basic_session_run_hooks import FeedFnHook
67from tensorflow.python.training.basic_session_run_hooks import ProfilerHook
68from tensorflow.python.training.basic_loops import basic_train_loop
69from tensorflow.python.trackable.python_state import PythonState
70from tensorflow.python.checkpoint.checkpoint import Checkpoint
71from tensorflow.python.checkpoint.checkpoint_view import CheckpointView
72from tensorflow.python.training.checkpoint_utils import init_from_checkpoint
73from tensorflow.python.training.checkpoint_utils import list_variables
74from tensorflow.python.training.checkpoint_utils import load_checkpoint
75from tensorflow.python.training.checkpoint_utils import load_variable
77from tensorflow.python.training.device_setter import replica_device_setter
78from tensorflow.python.training.monitored_session import Scaffold
79from tensorflow.python.training.monitored_session import MonitoredTrainingSession
80from tensorflow.python.training.monitored_session import SessionCreator
81from tensorflow.python.training.monitored_session import ChiefSessionCreator
82from tensorflow.python.training.monitored_session import WorkerSessionCreator
83from tensorflow.python.training.monitored_session import MonitoredSession
84from tensorflow.python.training.monitored_session import SingularMonitoredSession
85from tensorflow.python.training.saver import Saver
86from tensorflow.python.checkpoint.checkpoint_management import checkpoint_exists
87from tensorflow.python.checkpoint.checkpoint_management import generate_checkpoint_state_proto
88from tensorflow.python.checkpoint.checkpoint_management import get_checkpoint_mtimes
89from tensorflow.python.checkpoint.checkpoint_management import get_checkpoint_state
90from tensorflow.python.checkpoint.checkpoint_management import latest_checkpoint
91from tensorflow.python.checkpoint.checkpoint_management import update_checkpoint_state
92from tensorflow.python.training.saver import export_meta_graph
93from tensorflow.python.training.saver import import_meta_graph
94from tensorflow.python.training.saving import saveable_object_util
95from tensorflow.python.training.session_run_hook import SessionRunHook
96from tensorflow.python.training.session_run_hook import SessionRunArgs
97from tensorflow.python.training.session_run_hook import SessionRunContext
98from tensorflow.python.training.session_run_hook import SessionRunValues
99from tensorflow.python.training.session_manager import SessionManager
100from tensorflow.python.training.summary_io import summary_iterator
101from tensorflow.python.training.supervisor import Supervisor
102from tensorflow.python.training.training_util import write_graph
103from tensorflow.python.training.training_util import global_step
104from tensorflow.python.training.training_util import get_global_step
105from tensorflow.python.training.training_util import assert_global_step
106from tensorflow.python.training.training_util import create_global_step
107from tensorflow.python.training.training_util import get_or_create_global_step
108from tensorflow.python.training.warm_starting_util import VocabInfo
109from tensorflow.python.training.warm_starting_util import warm_start
110from tensorflow.python.training.py_checkpoint_reader import NewCheckpointReader
111from tensorflow.python.util.tf_export import tf_export
113# pylint: disable=wildcard-import
114# Training data protos.
115from tensorflow.core.example.example_pb2 import *
116from tensorflow.core.example.feature_pb2 import *
117from tensorflow.core.protobuf.saver_pb2 import *
119# Utility op. Open Source. TODO(touts): move to nn?
120from tensorflow.python.training.learning_rate_decay import *
121# pylint: enable=wildcard-import
123# Distributed computing support.
124from tensorflow.core.protobuf.cluster_pb2 import ClusterDef
125from tensorflow.core.protobuf.cluster_pb2 import JobDef
126from tensorflow.core.protobuf.tensorflow_server_pb2 import ServerDef
127from tensorflow.python.training.server_lib import ClusterSpec
128from tensorflow.python.training.server_lib import Server
130# pylint: disable=undefined-variable
131tf_export("train.BytesList")(BytesList)
132tf_export("train.ClusterDef")(ClusterDef)
133tf_export("train.Example")(Example)
134tf_export("train.Feature")(Feature)
135tf_export("train.Features")(Features)
136tf_export("train.FeatureList")(FeatureList)
137tf_export("train.FeatureLists")(FeatureLists)
138tf_export("train.FloatList")(FloatList)
139tf_export("train.Int64List")(Int64List)
140tf_export("train.JobDef")(JobDef)
141tf_export(v1=["train.SaverDef"])(SaverDef)
142tf_export("train.SequenceExample")(SequenceExample)
143tf_export("train.ServerDef")(ServerDef)
145BytesList.__doc__ = """\
146Used in `tf.train.Example` protos. Holds a list of byte-strings.
148An `Example` proto is a representation of the following python type:
150```
151Dict[str,
152 Union[List[bytes],
153 List[int64],
154 List[float]]]
155```
157This proto implements the `List[bytes]` portion.
159>>> from google.protobuf import text_format
160>>> example = text_format.Parse('''
161... features {
162... feature {key: "my_feature"
163... value {bytes_list {value: ['abc', '12345' ]}}}
164... }''',
165... tf.train.Example())
166>>>
167>>> example.features.feature['my_feature'].bytes_list.value
168["abc", "12345"]
170Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
172>>> tf.io.parse_example(
173... example.SerializeToString(),
174... features = {'my_feature': tf.io.RaggedFeature(dtype=tf.string)})
175{'my_feature': <tf.Tensor: shape=(2,), dtype=string,
176 numpy=array([b'abc', b'12345'], dtype=object)>}
179See the [`tf.train.Example`](https://www.tensorflow.org/tutorials/load_data/tfrecord#tftrainexample)
180guide for usage details.
181"""
183FloatList.__doc__ = """\
184Used in `tf.train.Example` protos. Holds a list of floats.
186An `Example` proto is a representation of the following python type:
188```
189Dict[str,
190 Union[List[bytes],
191 List[int64],
192 List[float]]]
193```
195This proto implements the `List[float]` portion.
197>>> from google.protobuf import text_format
198>>> example = text_format.Parse('''
199... features {
200... feature {key: "my_feature"
201... value {float_list {value: [1., 2., 3., 4. ]}}}
202... }''',
203... tf.train.Example())
204>>>
205>>> example.features.feature['my_feature'].float_list.value
206[1.0, 2.0, 3.0, 4.0]
208Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
210>>> tf.io.parse_example(
211... example.SerializeToString(),
212... features = {'my_feature': tf.io.RaggedFeature(dtype=tf.float32)})
213{'my_feature': <tf.Tensor: shape=(4,), dtype=float32,
214 numpy=array([1., 2., 3., 4.], dtype=float32)>}
216See the [`tf.train.Example`](https://www.tensorflow.org/tutorials/load_data/tfrecord#tftrainexample)
217guide for usage details.
218"""
220Int64List.__doc__ = """\
221Used in `tf.train.Example` protos. Holds a list of Int64s.
223An `Example` proto is a representation of the following python type:
225```
226Dict[str,
227 Union[List[bytes],
228 List[int64],
229 List[float]]]
230```
232This proto implements the `List[int64]` portion.
234>>> from google.protobuf import text_format
235>>> example = text_format.Parse('''
236... features {
237... feature {key: "my_feature"
238... value {int64_list {value: [1, 2, 3, 4]}}}
239... }''',
240... tf.train.Example())
241>>>
242>>> example.features.feature['my_feature'].int64_list.value
243[1, 2, 3, 4]
245Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
247>>> tf.io.parse_example(
248... example.SerializeToString(),
249... features = {'my_feature': tf.io.RaggedFeature(dtype=tf.int64)})
250{'my_feature': <tf.Tensor: shape=(4,), dtype=float32,
251 numpy=array([1, 2, 3, 4], dtype=int64)>}
253See the [`tf.train.Example`](https://www.tensorflow.org/tutorials/load_data/tfrecord#tftrainexample)
254guide for usage details.
255"""
257Feature.__doc__ = """\
258Used in `tf.train.Example` protos. Contains a list of values.
260An `Example` proto is a representation of the following python type:
262```
263Dict[str,
264 Union[List[bytes],
265 List[int64],
266 List[float]]]
267```
269This proto implements the `Union`.
271The contained list can be one of three types:
273 - `tf.train.BytesList`
274 - `tf.train.FloatList`
275 - `tf.train.Int64List`
277>>> int_feature = tf.train.Feature(
278... int64_list=tf.train.Int64List(value=[1, 2, 3, 4]))
279>>> float_feature = tf.train.Feature(
280... float_list=tf.train.FloatList(value=[1., 2., 3., 4.]))
281>>> bytes_feature = tf.train.Feature(
282... bytes_list=tf.train.BytesList(value=[b"abc", b"1234"]))
283>>>
284>>> example = tf.train.Example(
285... features=tf.train.Features(feature={
286... 'my_ints': int_feature,
287... 'my_floats': float_feature,
288... 'my_bytes': bytes_feature,
289... }))
291Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
293>>> tf.io.parse_example(
294... example.SerializeToString(),
295... features = {
296... 'my_ints': tf.io.RaggedFeature(dtype=tf.int64),
297... 'my_floats': tf.io.RaggedFeature(dtype=tf.float32),
298... 'my_bytes': tf.io.RaggedFeature(dtype=tf.string)})
299{'my_bytes': <tf.Tensor: shape=(2,), dtype=string,
300 numpy=array([b'abc', b'1234'], dtype=object)>,
301 'my_floats': <tf.Tensor: shape=(4,), dtype=float32,
302 numpy=array([1., 2., 3., 4.], dtype=float32)>,
303 'my_ints': <tf.Tensor: shape=(4,), dtype=int64,
304 numpy=array([1, 2, 3, 4])>}
306"""
308Features.__doc__ = """\
309Used in `tf.train.Example` protos. Contains the mapping from keys to `Feature`.
311An `Example` proto is a representation of the following python type:
313```
314Dict[str,
315 Union[List[bytes],
316 List[int64],
317 List[float]]]
318```
320This proto implements the `Dict`.
322>>> int_feature = tf.train.Feature(
323... int64_list=tf.train.Int64List(value=[1, 2, 3, 4]))
324>>> float_feature = tf.train.Feature(
325... float_list=tf.train.FloatList(value=[1., 2., 3., 4.]))
326>>> bytes_feature = tf.train.Feature(
327... bytes_list=tf.train.BytesList(value=[b"abc", b"1234"]))
328>>>
329>>> example = tf.train.Example(
330... features=tf.train.Features(feature={
331... 'my_ints': int_feature,
332... 'my_floats': float_feature,
333... 'my_bytes': bytes_feature,
334... }))
336Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
338>>> tf.io.parse_example(
339... example.SerializeToString(),
340... features = {
341... 'my_ints': tf.io.RaggedFeature(dtype=tf.int64),
342... 'my_floats': tf.io.RaggedFeature(dtype=tf.float32),
343... 'my_bytes': tf.io.RaggedFeature(dtype=tf.string)})
344{'my_bytes': <tf.Tensor: shape=(2,), dtype=string,
345 numpy=array([b'abc', b'1234'], dtype=object)>,
346 'my_floats': <tf.Tensor: shape=(4,), dtype=float32,
347 numpy=array([1., 2., 3., 4.], dtype=float32)>,
348 'my_ints': <tf.Tensor: shape=(4,), dtype=int64,
349 numpy=array([1, 2, 3, 4])>}
351"""
353FeatureList.__doc__ = """\
354Mainly used as part of a `tf.train.SequenceExample`.
356Contains a list of `tf.train.Feature`s.
358The `tf.train.SequenceExample` proto can be thought of as a
359proto implementation of the following python type:
361```
362# tf.train.Feature
363Feature = Union[List[bytes],
364 List[int64],
365 List[float]]
367# tf.train.FeatureList
368FeatureList = List[Feature]
370# tf.train.FeatureLists
371FeatureLists = Dict[str, FeatureList]
373class SequenceExample(typing.NamedTuple):
374 context: Dict[str, Feature]
375 feature_lists: FeatureLists
376```
378This proto implements the `List[Feature]` portion.
380"""
382FeatureLists.__doc__ = """\
383Mainly used as part of a `tf.train.SequenceExample`.
385Contains a list of `tf.train.Feature`s.
387The `tf.train.SequenceExample` proto can be thought of as a
388proto implementation of the following python type:
390```
391# tf.train.Feature
392Feature = Union[List[bytes],
393 List[int64],
394 List[float]]
396# tf.train.FeatureList
397FeatureList = List[Feature]
399# tf.train.FeatureLists
400FeatureLists = Dict[str, FeatureList]
402class SequenceExample(typing.NamedTuple):
403 context: Dict[str, Feature]
404 feature_lists: FeatureLists
405```
407This proto implements the `Dict[str, FeatureList]` portion.
408"""
411Example.__doc__ = """\
412An `Example` is a standard proto storing data for training and inference.
414An `Example` proto is a representation of the following python type:
416```
417Dict[str,
418 Union[List[bytes],
419 List[int64],
420 List[float]]]
421```
423It contains a key-value store `Example.features` where each key (string) maps
424to a `tf.train.Feature` message which contains a fixed-type list. This flexible
425and compact format allows the storage of large amounts of typed data, but
426requires that the data shape and use be determined by the configuration files
427and parsers that are used to read and write this format (refer to
428`tf.io.parse_example` for details).
430>>> from google.protobuf import text_format
431>>> example = text_format.Parse('''
432... features {
433... feature {key: "my_feature"
434... value {int64_list {value: [1, 2, 3, 4]}}}
435... }''',
436... tf.train.Example())
438Use `tf.io.parse_example` to extract tensors from a serialized `Example` proto:
440>>> tf.io.parse_example(
441... example.SerializeToString(),
442... features = {'my_feature': tf.io.RaggedFeature(dtype=tf.int64)})
443{'my_feature': <tf.Tensor: shape=(4,), dtype=float32,
444 numpy=array([1, 2, 3, 4], dtype=int64)>}
446While the list of keys, and the contents of each key _could_ be different for
447every `Example`, TensorFlow expects a fixed list of keys, each with a fixed
448`tf.dtype`. A conformant `Example` dataset obeys the following conventions:
450 - If a Feature `K` exists in one example with data type `T`, it must be of
451 type `T` in all other examples when present. It may be omitted.
452 - The number of instances of Feature `K` list data may vary across examples,
453 depending on the requirements of the model.
454 - If a Feature `K` doesn't exist in an example, a `K`-specific default will be
455 used, if configured.
456 - If a Feature `K` exists in an example but contains no items, the intent
457 is considered to be an empty tensor and no default will be used.
459"""
461SequenceExample.__doc__ = """\
462A `SequenceExample` represents a sequence of features and some context.
464It can be thought of as a proto-implementation of the following python type:
466```
467Feature = Union[List[bytes],
468 List[int64],
469 List[float]]
471class SequenceExample(typing.NamedTuple):
472 context: Dict[str, Feature]
473 feature_lists: Dict[str, List[Feature]]
474```
476To implement this as protos it's broken up into sub-messages as follows:
478```
479# tf.train.Feature
480Feature = Union[List[bytes],
481 List[int64],
482 List[float]]
484# tf.train.FeatureList
485FeatureList = List[Feature]
487# tf.train.FeatureLists
488FeatureLists = Dict[str, FeatureList]
490# tf.train.SequenceExample
491class SequenceExample(typing.NamedTuple):
492 context: Dict[str, Feature]
493 feature_lists: FeatureLists
494```
496To parse a `SequenceExample` in TensorFlow refer to the
497`tf.io.parse_sequence_example` function.
499The `context` contains features which apply to the entire
500example. The `feature_lists` contain a key, value map where each key is
501associated with a repeated set of `tf.train.Features` (a `tf.train.FeatureList`).
502A `FeatureList` represents the values of a feature identified by its key
503over time / frames.
505Below is a `SequenceExample` for a movie recommendation application recording a
506sequence of ratings by a user. The time-independent features ("locale",
507"age", "favorites") describing the user are part of the context. The sequence
508of movies the user rated are part of the feature_lists. For each movie in the
509sequence we have information on its name and actors and the user's rating.
510This information is recorded in three separate `feature_list`s.
511In the example below there are only two movies. All three `feature_list`s,
512namely "movie_ratings", "movie_names", and "actors" have a feature value for
513both movies. Note, that "actors" is itself a `bytes_list` with multiple
514strings per movie.
516```
517 context: {
518 feature: {
519 key : "locale"
520 value: {
521 bytes_list: {
522 value: [ "pt_BR" ]
523 }
524 }
525 }
526 feature: {
527 key : "age"
528 value: {
529 float_list: {
530 value: [ 19.0 ]
531 }
532 }
533 }
534 feature: {
535 key : "favorites"
536 value: {
537 bytes_list: {
538 value: [ "Majesty Rose", "Savannah Outen", "One Direction" ]
539 }
540 }
541 }
542 }
543 feature_lists: {
544 feature_list: {
545 key : "movie_ratings"
546 value: {
547 feature: {
548 float_list: {
549 value: [ 4.5 ]
550 }
551 }
552 feature: {
553 float_list: {
554 value: [ 5.0 ]
555 }
556 }
557 }
558 }
559 feature_list: {
560 key : "movie_names"
561 value: {
562 feature: {
563 bytes_list: {
564 value: [ "The Shawshank Redemption" ]
565 }
566 }
567 feature: {
568 bytes_list: {
569 value: [ "Fight Club" ]
570 }
571 }
572 }
573 }
574 feature_list: {
575 key : "actors"
576 value: {
577 feature: {
578 bytes_list: {
579 value: [ "Tim Robbins", "Morgan Freeman" ]
580 }
581 }
582 feature: {
583 bytes_list: {
584 value: [ "Brad Pitt", "Edward Norton", "Helena Bonham Carter" ]
585 }
586 }
587 }
588 }
589 }
590```
592A conformant `SequenceExample` data set obeys the following conventions:
594`context`:
596 - All conformant context features `K` must obey the same conventions as
597 a conformant Example's features (see above).
599`feature_lists`:
601 - A `FeatureList L` may be missing in an example; it is up to the
602 parser configuration to determine if this is allowed or considered
603 an empty list (zero length).
604 - If a `FeatureList L` exists, it may be empty (zero length).
605 - If a `FeatureList L` is non-empty, all features within the `FeatureList`
606 must have the same data type `T`. Even across `SequenceExample`s, the type `T`
607 of the `FeatureList` identified by the same key must be the same. An entry
608 without any values may serve as an empty feature.
609 - If a `FeatureList L` is non-empty, it is up to the parser configuration
610 to determine if all features within the `FeatureList` must
611 have the same size. The same holds for this `FeatureList` across multiple
612 examples.
613 - For sequence modeling ([example](https://github.com/tensorflow/nmt)), the
614 feature lists represent a sequence of frames. In this scenario, all
615 `FeatureList`s in a `SequenceExample` have the same number of `Feature`
616 messages, so that the i-th element in each `FeatureList` is part of the
617 i-th frame (or time step).
619**Examples of conformant and non-conformant examples' `FeatureLists`:**
621Conformant `FeatureLists`:
623```
624 feature_lists: { feature_list: {
625 key: "movie_ratings"
626 value: { feature: { float_list: { value: [ 4.5 ] } }
627 feature: { float_list: { value: [ 5.0 ] } } }
628 } }
629```
631Non-conformant `FeatureLists` (mismatched types):
633```
634 feature_lists: { feature_list: {
635 key: "movie_ratings"
636 value: { feature: { float_list: { value: [ 4.5 ] } }
637 feature: { int64_list: { value: [ 5 ] } } }
638 } }
639```
641Conditionally conformant `FeatureLists`, the parser configuration determines
642if the feature sizes must match:
644```
645 feature_lists: { feature_list: {
646 key: "movie_ratings"
647 value: { feature: { float_list: { value: [ 4.5 ] } }
648 feature: { float_list: { value: [ 5.0, 6.0 ] } } }
649 } }
650```
652**Examples of conformant and non-conformant `SequenceExample`s:**
654Conformant pair of SequenceExample:
656```
657 feature_lists: { feature_list: {
658 key: "movie_ratings"
659 value: { feature: { float_list: { value: [ 4.5 ] } }
660 feature: { float_list: { value: [ 5.0 ] } } }
661 } }
663 feature_lists: { feature_list: {
664 key: "movie_ratings"
665 value: { feature: { float_list: { value: [ 4.5 ] } }
666 feature: { float_list: { value: [ 5.0 ] } }
667 feature: { float_list: { value: [ 2.0 ] } } }
668 } }
669```
671Conformant pair of `SequenceExample`s:
673```
674 feature_lists: { feature_list: {
675 key: "movie_ratings"
676 value: { feature: { float_list: { value: [ 4.5 ] } }
677 feature: { float_list: { value: [ 5.0 ] } } }
678 } }
680 feature_lists: { feature_list: {
681 key: "movie_ratings"
682 value: { }
683 } }
684```
686Conditionally conformant pair of `SequenceExample`s, the parser configuration
687determines if the second `feature_lists` is consistent (zero-length) or
688invalid (missing "movie_ratings"):
690```
691 feature_lists: { feature_list: {
692 key: "movie_ratings"
693 value: { feature: { float_list: { value: [ 4.5 ] } }
694 feature: { float_list: { value: [ 5.0 ] } } }
695 } }
697 feature_lists: { }
698```
700Non-conformant pair of `SequenceExample`s (mismatched types):
702```
703 feature_lists: { feature_list: {
704 key: "movie_ratings"
705 value: { feature: { float_list: { value: [ 4.5 ] } }
706 feature: { float_list: { value: [ 5.0 ] } } }
707 } }
709 feature_lists: { feature_list: {
710 key: "movie_ratings"
711 value: { feature: { int64_list: { value: [ 4 ] } }
712 feature: { int64_list: { value: [ 5 ] } }
713 feature: { int64_list: { value: [ 2 ] } } }
714 } }
715```
717Conditionally conformant pair of `SequenceExample`s; the parser configuration
718determines if the feature sizes must match:
720```
721 feature_lists: { feature_list: {
722 key: "movie_ratings"
723 value: { feature: { float_list: { value: [ 4.5 ] } }
724 feature: { float_list: { value: [ 5.0 ] } } }
725 } }
727 feature_lists: { feature_list: {
728 key: "movie_ratings"
729 value: { feature: { float_list: { value: [ 4.0 ] } }
730 feature: { float_list: { value: [ 5.0, 3.0 ] } }
731 } }
732```
733"""