Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/gen_encode_proto_ops.py: 25%

83 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-03 07:57 +0000

1"""Python wrappers around TensorFlow ops. 

2 

3This file is MACHINE GENERATED! Do not edit. 

4""" 

5 

6import collections 

7 

8from tensorflow.python import pywrap_tfe as pywrap_tfe 

9from tensorflow.python.eager import context as _context 

10from tensorflow.python.eager import core as _core 

11from tensorflow.python.eager import execute as _execute 

12from tensorflow.python.framework import dtypes as _dtypes 

13from tensorflow.security.fuzzing.py import annotation_types as _atypes 

14 

15from tensorflow.python.framework import op_def_registry as _op_def_registry 

16from tensorflow.python.framework import ops as _ops 

17from tensorflow.python.framework import op_def_library as _op_def_library 

18from tensorflow.python.util.deprecation import deprecated_endpoints 

19from tensorflow.python.util import dispatch as _dispatch 

20from tensorflow.python.util.tf_export import tf_export 

21 

22from typing import TypeVar 

23 

24@_dispatch.add_fallback_dispatch_list 

25@_dispatch.add_type_based_api_dispatcher 

26@tf_export('io.encode_proto') 

27def encode_proto(sizes, values, field_names, message_type, descriptor_source="local://", name=None): 

28 r"""The op serializes protobuf messages provided in the input tensors. 

29 

30 The types of the tensors in `values` must match the schema for the fields 

31 specified in `field_names`. All the tensors in `values` must have a common 

32 shape prefix, *batch_shape*. 

33 

34 The `sizes` tensor specifies repeat counts for each field. The repeat count 

35 (last dimension) of a each tensor in `values` must be greater than or equal 

36 to corresponding repeat count in `sizes`. 

37 

38 A `message_type` name must be provided to give context for the field names. 

39 The actual message descriptor can be looked up either in the linked-in 

40 descriptor pool or a filename provided by the caller using the 

41 `descriptor_source` attribute. 

42 

43 For the most part, the mapping between Proto field types and TensorFlow dtypes 

44 is straightforward. However, there are a few special cases: 

45 

46 - A proto field that contains a submessage or group can only be converted 

47 to `DT_STRING` (the serialized submessage). This is to reduce the complexity 

48 of the API. The resulting string can be used as input to another instance of 

49 the decode_proto op. 

50 

51 - TensorFlow lacks support for unsigned integers. The ops represent uint64 

52 types as a `DT_INT64` with the same twos-complement bit pattern (the obvious 

53 way). Unsigned int32 values can be represented exactly by specifying type 

54 `DT_INT64`, or using twos-complement if the caller specifies `DT_INT32` in 

55 the `output_types` attribute. 

56 

57 The `descriptor_source` attribute selects the source of protocol 

58 descriptors to consult when looking up `message_type`. This may be: 

59 

60 - An empty string or "local://", in which case protocol descriptors are 

61 created for C++ (not Python) proto definitions linked to the binary. 

62 

63 - A file, in which case protocol descriptors are created from the file, 

64 which is expected to contain a `FileDescriptorSet` serialized as a string. 

65 NOTE: You can build a `descriptor_source` file using the `--descriptor_set_out` 

66 and `--include_imports` options to the protocol compiler `protoc`. 

67 

68 - A "bytes://<bytes>", in which protocol descriptors are created from `<bytes>`, 

69 which is expected to be a `FileDescriptorSet` serialized as a string. 

70 

71 Args: 

72 sizes: A `Tensor` of type `int32`. 

73 Tensor of int32 with shape `[batch_shape, len(field_names)]`. 

74 values: A list of `Tensor` objects. 

75 List of tensors containing values for the corresponding field. 

76 field_names: A list of `strings`. 

77 List of strings containing proto field names. 

78 message_type: A `string`. Name of the proto message type to decode. 

79 descriptor_source: An optional `string`. Defaults to `"local://"`. 

80 name: A name for the operation (optional). 

81 

82 Returns: 

83 A `Tensor` of type `string`. 

84 """ 

85 _ctx = _context._context or _context.context() 

86 tld = _ctx._thread_local_data 

87 if tld.is_eager: 

88 try: 

89 _result = pywrap_tfe.TFE_Py_FastPathExecute( 

90 _ctx, "EncodeProto", name, sizes, values, "field_names", field_names, 

91 "message_type", message_type, "descriptor_source", descriptor_source) 

92 return _result 

93 except _core._NotOkStatusException as e: 

94 _ops.raise_from_not_ok_status(e, name) 

95 except _core._FallbackException: 

96 pass 

97 try: 

98 _result = _dispatcher_for_encode_proto( 

99 (sizes, values, field_names, message_type, descriptor_source, 

100 name,), None) 

101 if _result is not NotImplemented: 

102 return _result 

103 return encode_proto_eager_fallback( 

104 sizes, values, field_names=field_names, message_type=message_type, 

105 descriptor_source=descriptor_source, name=name, ctx=_ctx) 

106 except _core._SymbolicException: 

107 pass # Add nodes to the TensorFlow graph. 

108 except (TypeError, ValueError): 

109 _result = _dispatch.dispatch( 

110 encode_proto, (), dict(sizes=sizes, values=values, 

111 field_names=field_names, 

112 message_type=message_type, 

113 descriptor_source=descriptor_source, 

114 name=name) 

115 ) 

116 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED: 

117 return _result 

118 raise 

119 else: 

120 _result = _dispatcher_for_encode_proto( 

121 (sizes, values, field_names, message_type, descriptor_source, name,), 

122 None) 

123 if _result is not NotImplemented: 

124 return _result 

125 # Add nodes to the TensorFlow graph. 

126 if not isinstance(field_names, (list, tuple)): 

127 raise TypeError( 

128 "Expected list for 'field_names' argument to " 

129 "'encode_proto' Op, not %r." % field_names) 

130 field_names = [_execute.make_str(_s, "field_names") for _s in field_names] 

131 message_type = _execute.make_str(message_type, "message_type") 

132 if descriptor_source is None: 

133 descriptor_source = "local://" 

134 descriptor_source = _execute.make_str(descriptor_source, "descriptor_source") 

135 try: 

136 _, _, _op, _outputs = _op_def_library._apply_op_helper( 

137 "EncodeProto", sizes=sizes, values=values, field_names=field_names, 

138 message_type=message_type, 

139 descriptor_source=descriptor_source, name=name) 

140 except (TypeError, ValueError): 

141 _result = _dispatch.dispatch( 

142 encode_proto, (), dict(sizes=sizes, values=values, 

143 field_names=field_names, 

144 message_type=message_type, 

145 descriptor_source=descriptor_source, 

146 name=name) 

147 ) 

148 if _result is not _dispatch.OpDispatcher.NOT_SUPPORTED: 

149 return _result 

150 raise 

151 _result = _outputs[:] 

152 if _execute.must_record_gradient(): 

153 _attrs = ("field_names", _op.get_attr("field_names"), "message_type", 

154 _op.get_attr("message_type"), "descriptor_source", 

155 _op.get_attr("descriptor_source"), "Tinput_types", 

156 _op.get_attr("Tinput_types")) 

157 _inputs_flat = _op.inputs 

158 _execute.record_gradient( 

159 "EncodeProto", _inputs_flat, _attrs, _result) 

160 _result, = _result 

161 return _result 

162 

163EncodeProto = tf_export("raw_ops.EncodeProto")(_ops.to_raw_op(encode_proto)) 

164_dispatcher_for_encode_proto = encode_proto._tf_type_based_dispatcher.Dispatch 

165 

166 

167def encode_proto_eager_fallback(sizes, values, field_names, message_type, descriptor_source, name, ctx): 

168 if not isinstance(field_names, (list, tuple)): 

169 raise TypeError( 

170 "Expected list for 'field_names' argument to " 

171 "'encode_proto' Op, not %r." % field_names) 

172 field_names = [_execute.make_str(_s, "field_names") for _s in field_names] 

173 message_type = _execute.make_str(message_type, "message_type") 

174 if descriptor_source is None: 

175 descriptor_source = "local://" 

176 descriptor_source = _execute.make_str(descriptor_source, "descriptor_source") 

177 _attr_Tinput_types, values = _execute.convert_to_mixed_eager_tensors(values, ctx) 

178 sizes = _ops.convert_to_tensor(sizes, _dtypes.int32) 

179 _inputs_flat = [sizes] + list(values) 

180 _attrs = ("field_names", field_names, "message_type", message_type, 

181 "descriptor_source", descriptor_source, "Tinput_types", _attr_Tinput_types) 

182 _result = _execute.execute(b"EncodeProto", 1, inputs=_inputs_flat, 

183 attrs=_attrs, ctx=ctx, name=name) 

184 if _execute.must_record_gradient(): 

185 _execute.record_gradient( 

186 "EncodeProto", _inputs_flat, _attrs, _result) 

187 _result, = _result 

188 return _result 

189