Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/enums.py: 100%
115 statements
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:07 +0000
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:07 +0000
1# Copyright 2019 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15import enum
18class AutoRowIDs(enum.Enum):
19 """How to handle automatic insert IDs when inserting rows as a stream."""
21 DISABLED = enum.auto()
22 GENERATE_UUID = enum.auto()
25class Compression(object):
26 """The compression type to use for exported files. The default value is
27 :attr:`NONE`.
29 :attr:`DEFLATE` and :attr:`SNAPPY` are
30 only supported for Avro.
31 """
33 GZIP = "GZIP"
34 """Specifies GZIP format."""
36 DEFLATE = "DEFLATE"
37 """Specifies DEFLATE format."""
39 SNAPPY = "SNAPPY"
40 """Specifies SNAPPY format."""
42 NONE = "NONE"
43 """Specifies no compression."""
46class DecimalTargetType:
47 """The data types that could be used as a target type when converting decimal values.
49 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#DecimalTargetType
51 .. versionadded:: 2.21.0
52 """
54 NUMERIC = "NUMERIC"
55 """Decimal values could be converted to NUMERIC type."""
57 BIGNUMERIC = "BIGNUMERIC"
58 """Decimal values could be converted to BIGNUMERIC type."""
60 STRING = "STRING"
61 """Decimal values could be converted to STRING type."""
64class CreateDisposition(object):
65 """Specifies whether the job is allowed to create new tables. The default
66 value is :attr:`CREATE_IF_NEEDED`.
68 Creation, truncation and append actions occur as one atomic update
69 upon job completion.
70 """
72 CREATE_IF_NEEDED = "CREATE_IF_NEEDED"
73 """If the table does not exist, BigQuery creates the table."""
75 CREATE_NEVER = "CREATE_NEVER"
76 """The table must already exist. If it does not, a 'notFound' error is
77 returned in the job result."""
80class DefaultPandasDTypes(enum.Enum):
81 """Default Pandas DataFrem DTypes to convert BigQuery data. These
82 Sentinel values are used instead of None to maintain backward compatibility,
83 and allow Pandas package is not available. For more information:
84 https://stackoverflow.com/a/60605919/101923
85 """
87 BOOL_DTYPE = object()
88 """Specifies default bool dtype"""
90 INT_DTYPE = object()
91 """Specifies default integer dtype"""
94class DestinationFormat(object):
95 """The exported file format. The default value is :attr:`CSV`.
97 Tables with nested or repeated fields cannot be exported as CSV.
98 """
100 CSV = "CSV"
101 """Specifies CSV format."""
103 NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
104 """Specifies newline delimited JSON format."""
106 AVRO = "AVRO"
107 """Specifies Avro format."""
109 PARQUET = "PARQUET"
110 """Specifies Parquet format."""
113class Encoding(object):
114 """The character encoding of the data. The default is :attr:`UTF_8`.
116 BigQuery decodes the data after the raw, binary data has been
117 split using the values of the quote and fieldDelimiter properties.
118 """
120 UTF_8 = "UTF-8"
121 """Specifies UTF-8 encoding."""
123 ISO_8859_1 = "ISO-8859-1"
124 """Specifies ISO-8859-1 encoding."""
127class QueryPriority(object):
128 """Specifies a priority for the query. The default value is
129 :attr:`INTERACTIVE`.
130 """
132 INTERACTIVE = "INTERACTIVE"
133 """Specifies interactive priority."""
135 BATCH = "BATCH"
136 """Specifies batch priority."""
139class QueryApiMethod(str, enum.Enum):
140 """API method used to start the query. The default value is
141 :attr:`INSERT`.
142 """
144 INSERT = "INSERT"
145 """Submit a query job by using the `jobs.insert REST API method
146 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert>`_.
148 This supports all job configuration options.
149 """
151 QUERY = "QUERY"
152 """Submit a query job by using the `jobs.query REST API method
153 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_.
155 Differences from ``INSERT``:
157 * Many parameters and job configuration options, including job ID and
158 destination table, cannot be used
159 with this API method. See the `jobs.query REST API documentation
160 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_ for
161 the complete list of supported configuration options.
163 * API blocks up to a specified timeout, waiting for the query to
164 finish.
166 * The full job resource (including job statistics) may not be available.
167 Call :meth:`~google.cloud.bigquery.job.QueryJob.reload` or
168 :meth:`~google.cloud.bigquery.client.Client.get_job` to get full job
169 statistics and configuration.
171 * :meth:`~google.cloud.bigquery.Client.query` can raise API exceptions if
172 the query fails, whereas the same errors don't appear until calling
173 :meth:`~google.cloud.bigquery.job.QueryJob.result` when the ``INSERT``
174 API method is used.
175 """
178class SchemaUpdateOption(object):
179 """Specifies an update to the destination table schema as a side effect of
180 a load job.
181 """
183 ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION"
184 """Allow adding a nullable field to the schema."""
186 ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION"
187 """Allow relaxing a required field in the original schema to nullable."""
190class SourceFormat(object):
191 """The format of the data files. The default value is :attr:`CSV`.
193 Note that the set of allowed values for loading data is different
194 than the set used for external data sources (see
195 :class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`).
196 """
198 CSV = "CSV"
199 """Specifies CSV format."""
201 DATASTORE_BACKUP = "DATASTORE_BACKUP"
202 """Specifies datastore backup format"""
204 NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
205 """Specifies newline delimited JSON format."""
207 AVRO = "AVRO"
208 """Specifies Avro format."""
210 PARQUET = "PARQUET"
211 """Specifies Parquet format."""
213 ORC = "ORC"
214 """Specifies Orc format."""
217class KeyResultStatementKind:
218 """Determines which statement in the script represents the "key result".
220 The "key result" is used to populate the schema and query results of the script job.
222 https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#keyresultstatementkind
223 """
225 KEY_RESULT_STATEMENT_KIND_UNSPECIFIED = "KEY_RESULT_STATEMENT_KIND_UNSPECIFIED"
226 LAST = "LAST"
227 FIRST_SELECT = "FIRST_SELECT"
230class StandardSqlTypeNames(str, enum.Enum):
231 def _generate_next_value_(name, start, count, last_values):
232 return name
234 TYPE_KIND_UNSPECIFIED = enum.auto()
235 INT64 = enum.auto()
236 BOOL = enum.auto()
237 FLOAT64 = enum.auto()
238 STRING = enum.auto()
239 BYTES = enum.auto()
240 TIMESTAMP = enum.auto()
241 DATE = enum.auto()
242 TIME = enum.auto()
243 DATETIME = enum.auto()
244 INTERVAL = enum.auto()
245 GEOGRAPHY = enum.auto()
246 NUMERIC = enum.auto()
247 BIGNUMERIC = enum.auto()
248 JSON = enum.auto()
249 ARRAY = enum.auto()
250 STRUCT = enum.auto()
253class EntityTypes(str, enum.Enum):
254 """Enum of allowed entity type names in AccessEntry"""
256 USER_BY_EMAIL = "userByEmail"
257 GROUP_BY_EMAIL = "groupByEmail"
258 DOMAIN = "domain"
259 DATASET = "dataset"
260 SPECIAL_GROUP = "specialGroup"
261 VIEW = "view"
262 IAM_MEMBER = "iamMember"
263 ROUTINE = "routine"
266# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types
267# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
268class SqlTypeNames(str, enum.Enum):
269 """Enum of allowed SQL type names in schema.SchemaField."""
271 STRING = "STRING"
272 BYTES = "BYTES"
273 INTEGER = "INTEGER"
274 INT64 = "INTEGER"
275 FLOAT = "FLOAT"
276 FLOAT64 = "FLOAT"
277 DECIMAL = NUMERIC = "NUMERIC"
278 BIGDECIMAL = BIGNUMERIC = "BIGNUMERIC"
279 BOOLEAN = "BOOLEAN"
280 BOOL = "BOOLEAN"
281 GEOGRAPHY = "GEOGRAPHY" # NOTE: not available in legacy types
282 RECORD = "RECORD"
283 STRUCT = "RECORD"
284 TIMESTAMP = "TIMESTAMP"
285 DATE = "DATE"
286 TIME = "TIME"
287 DATETIME = "DATETIME"
288 INTERVAL = "INTERVAL" # NOTE: not available in legacy types
291class WriteDisposition(object):
292 """Specifies the action that occurs if destination table already exists.
294 The default value is :attr:`WRITE_APPEND`.
296 Each action is atomic and only occurs if BigQuery is able to complete
297 the job successfully. Creation, truncation and append actions occur as one
298 atomic update upon job completion.
299 """
301 WRITE_APPEND = "WRITE_APPEND"
302 """If the table already exists, BigQuery appends the data to the table."""
304 WRITE_TRUNCATE = "WRITE_TRUNCATE"
305 """If the table already exists, BigQuery overwrites the table data."""
307 WRITE_EMPTY = "WRITE_EMPTY"
308 """If the table already exists and contains data, a 'duplicate' error is
309 returned in the job result."""
312class DeterminismLevel:
313 """Specifies determinism level for JavaScript user-defined functions (UDFs).
315 https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#DeterminismLevel
316 """
318 DETERMINISM_LEVEL_UNSPECIFIED = "DETERMINISM_LEVEL_UNSPECIFIED"
319 """The determinism of the UDF is unspecified."""
321 DETERMINISTIC = "DETERMINISTIC"
322 """The UDF is deterministic, meaning that 2 function calls with the same inputs
323 always produce the same result, even across 2 query runs."""
325 NOT_DETERMINISTIC = "NOT_DETERMINISTIC"
326 """The UDF is not deterministic."""