Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery_storage_v1/types/arrow.py: 100%
16 statements
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:10 +0000
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:10 +0000
1# -*- coding: utf-8 -*-
2# Copyright 2022 Google LLC
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
16from __future__ import annotations
18from typing import MutableMapping, MutableSequence
20import proto # type: ignore
23__protobuf__ = proto.module(
24 package="google.cloud.bigquery.storage.v1",
25 manifest={
26 "ArrowSchema",
27 "ArrowRecordBatch",
28 "ArrowSerializationOptions",
29 },
30)
33class ArrowSchema(proto.Message):
34 r"""Arrow schema as specified in
35 https://arrow.apache.org/docs/python/api/datatypes.html and
36 serialized to bytes using IPC:
37 https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc
38 See code samples on how this message can be deserialized.
40 Attributes:
41 serialized_schema (bytes):
42 IPC serialized Arrow schema.
43 """
45 serialized_schema: bytes = proto.Field(
46 proto.BYTES,
47 number=1,
48 )
51class ArrowRecordBatch(proto.Message):
52 r"""Arrow RecordBatch.
54 Attributes:
55 serialized_record_batch (bytes):
56 IPC-serialized Arrow RecordBatch.
57 row_count (int):
58 [Deprecated] The count of rows in
59 ``serialized_record_batch``. Please use the
60 format-independent ReadRowsResponse.row_count instead.
61 """
63 serialized_record_batch: bytes = proto.Field(
64 proto.BYTES,
65 number=1,
66 )
67 row_count: int = proto.Field(
68 proto.INT64,
69 number=2,
70 )
73class ArrowSerializationOptions(proto.Message):
74 r"""Contains options specific to Arrow Serialization.
76 Attributes:
77 buffer_compression (google.cloud.bigquery_storage_v1.types.ArrowSerializationOptions.CompressionCodec):
78 The compression codec to use for Arrow
79 buffers in serialized record batches.
80 """
82 class CompressionCodec(proto.Enum):
83 r"""Compression codec's supported by Arrow.
85 Values:
86 COMPRESSION_UNSPECIFIED (0):
87 If unspecified no compression will be used.
88 LZ4_FRAME (1):
89 LZ4 Frame
90 (https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md)
91 ZSTD (2):
92 Zstandard compression.
93 """
94 COMPRESSION_UNSPECIFIED = 0
95 LZ4_FRAME = 1
96 ZSTD = 2
98 buffer_compression: CompressionCodec = proto.Field(
99 proto.ENUM,
100 number=2,
101 enum=CompressionCodec,
102 )
105__all__ = tuple(sorted(__protobuf__.manifest))