1# Copyright 2019 Google LLC 
    2# 
    3# Licensed under the Apache License, Version 2.0 (the "License"); 
    4# you may not use this file except in compliance with the License. 
    5# You may obtain a copy of the License at 
    6# 
    7#     http://www.apache.org/licenses/LICENSE-2.0 
    8# 
    9# Unless required by applicable law or agreed to in writing, software 
    10# distributed under the License is distributed on an "AS IS" BASIS, 
    11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
    12# See the License for the specific language governing permissions and 
    13# limitations under the License. 
    14 
    15import enum 
    16 
    17 
    18class AutoRowIDs(enum.Enum): 
    19    """How to handle automatic insert IDs when inserting rows as a stream.""" 
    20 
    21    DISABLED = enum.auto() 
    22    GENERATE_UUID = enum.auto() 
    23 
    24 
    25class Compression(str, enum.Enum): 
    26    """The compression type to use for exported files. The default value is 
    27    :attr:`NONE`. 
    28 
    29    :attr:`DEFLATE` and :attr:`SNAPPY` are 
    30    only supported for Avro. 
    31    """ 
    32 
    33    GZIP = "GZIP" 
    34    """Specifies GZIP format.""" 
    35 
    36    DEFLATE = "DEFLATE" 
    37    """Specifies DEFLATE format.""" 
    38 
    39    SNAPPY = "SNAPPY" 
    40    """Specifies SNAPPY format.""" 
    41 
    42    ZSTD = "ZSTD" 
    43    """Specifies ZSTD format.""" 
    44 
    45    NONE = "NONE" 
    46    """Specifies no compression.""" 
    47 
    48 
    49class DecimalTargetType: 
    50    """The data types that could be used as a target type when converting decimal values. 
    51 
    52    https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#DecimalTargetType 
    53 
    54    .. versionadded:: 2.21.0 
    55    """ 
    56 
    57    NUMERIC = "NUMERIC" 
    58    """Decimal values could be converted to NUMERIC type.""" 
    59 
    60    BIGNUMERIC = "BIGNUMERIC" 
    61    """Decimal values could be converted to BIGNUMERIC type.""" 
    62 
    63    STRING = "STRING" 
    64    """Decimal values could be converted to STRING type.""" 
    65 
    66 
    67class CreateDisposition(object): 
    68    """Specifies whether the job is allowed to create new tables. The default 
    69    value is :attr:`CREATE_IF_NEEDED`. 
    70 
    71    Creation, truncation and append actions occur as one atomic update 
    72    upon job completion. 
    73    """ 
    74 
    75    CREATE_IF_NEEDED = "CREATE_IF_NEEDED" 
    76    """If the table does not exist, BigQuery creates the table.""" 
    77 
    78    CREATE_NEVER = "CREATE_NEVER" 
    79    """The table must already exist. If it does not, a 'notFound' error is 
    80    returned in the job result.""" 
    81 
    82 
    83class DatasetView(enum.Enum): 
    84    """DatasetView specifies which dataset information is returned.""" 
    85 
    86    DATASET_VIEW_UNSPECIFIED = "DATASET_VIEW_UNSPECIFIED" 
    87    """The default value. Currently maps to the FULL view.""" 
    88 
    89    METADATA = "METADATA" 
    90    """View metadata information for the dataset, such as friendlyName, 
    91    description, labels, etc.""" 
    92 
    93    ACL = "ACL" 
    94    """View ACL information for the dataset, which defines dataset access 
    95    for one or more entities.""" 
    96 
    97    FULL = "FULL" 
    98    """View both dataset metadata and ACL information.""" 
    99 
    100 
    101class DefaultPandasDTypes(enum.Enum): 
    102    """Default Pandas DataFrem DTypes to convert BigQuery data. These 
    103    Sentinel values are used instead of None to maintain backward compatibility, 
    104    and allow Pandas package is not available. For more information: 
    105    https://stackoverflow.com/a/60605919/101923 
    106    """ 
    107 
    108    BOOL_DTYPE = object() 
    109    """Specifies default bool dtype""" 
    110 
    111    INT_DTYPE = object() 
    112    """Specifies default integer dtype""" 
    113 
    114    DATE_DTYPE = object() 
    115    """Specifies default date dtype""" 
    116 
    117    TIME_DTYPE = object() 
    118    """Specifies default time dtype""" 
    119 
    120    RANGE_DATE_DTYPE = object() 
    121    """Specifies default range date dtype""" 
    122 
    123    RANGE_DATETIME_DTYPE = object() 
    124    """Specifies default range datetime dtype""" 
    125 
    126    RANGE_TIMESTAMP_DTYPE = object() 
    127    """Specifies default range timestamp dtype""" 
    128 
    129 
    130class DestinationFormat(object): 
    131    """The exported file format. The default value is :attr:`CSV`. 
    132 
    133    Tables with nested or repeated fields cannot be exported as CSV. 
    134    """ 
    135 
    136    CSV = "CSV" 
    137    """Specifies CSV format.""" 
    138 
    139    NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" 
    140    """Specifies newline delimited JSON format.""" 
    141 
    142    AVRO = "AVRO" 
    143    """Specifies Avro format.""" 
    144 
    145    PARQUET = "PARQUET" 
    146    """Specifies Parquet format.""" 
    147 
    148 
    149class Encoding(object): 
    150    """The character encoding of the data. The default is :attr:`UTF_8`. 
    151 
    152    BigQuery decodes the data after the raw, binary data has been 
    153    split using the values of the quote and fieldDelimiter properties. 
    154    """ 
    155 
    156    UTF_8 = "UTF-8" 
    157    """Specifies UTF-8 encoding.""" 
    158 
    159    ISO_8859_1 = "ISO-8859-1" 
    160    """Specifies ISO-8859-1 encoding.""" 
    161 
    162 
    163class QueryPriority(object): 
    164    """Specifies a priority for the query. The default value is 
    165    :attr:`INTERACTIVE`. 
    166    """ 
    167 
    168    INTERACTIVE = "INTERACTIVE" 
    169    """Specifies interactive priority.""" 
    170 
    171    BATCH = "BATCH" 
    172    """Specifies batch priority.""" 
    173 
    174 
    175class QueryApiMethod(str, enum.Enum): 
    176    """API method used to start the query. The default value is 
    177    :attr:`INSERT`. 
    178    """ 
    179 
    180    INSERT = "INSERT" 
    181    """Submit a query job by using the `jobs.insert REST API method 
    182    <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert>`_. 
    183 
    184    This supports all job configuration options. 
    185    """ 
    186 
    187    QUERY = "QUERY" 
    188    """Submit a query job by using the `jobs.query REST API method 
    189    <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_. 
    190 
    191    Differences from ``INSERT``: 
    192 
    193    * Many parameters and job configuration options, including job ID and 
    194      destination table, cannot be used 
    195      with this API method. See the `jobs.query REST API documentation 
    196      <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_ for 
    197      the complete list of supported configuration options. 
    198 
    199    * API blocks up to a specified timeout, waiting for the query to 
    200      finish. 
    201 
    202    * The full job resource (including job statistics) may not be available. 
    203      Call :meth:`~google.cloud.bigquery.job.QueryJob.reload` or 
    204      :meth:`~google.cloud.bigquery.client.Client.get_job` to get full job 
    205      statistics and configuration. 
    206 
    207    * :meth:`~google.cloud.bigquery.Client.query` can raise API exceptions if 
    208      the query fails, whereas the same errors don't appear until calling 
    209      :meth:`~google.cloud.bigquery.job.QueryJob.result` when the ``INSERT`` 
    210      API method is used. 
    211    """ 
    212 
    213 
    214class SchemaUpdateOption(object): 
    215    """Specifies an update to the destination table schema as a side effect of 
    216    a load job. 
    217    """ 
    218 
    219    ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION" 
    220    """Allow adding a nullable field to the schema.""" 
    221 
    222    ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION" 
    223    """Allow relaxing a required field in the original schema to nullable.""" 
    224 
    225 
    226class SourceFormat(object): 
    227    """The format of the data files. The default value is :attr:`CSV`. 
    228 
    229    Note that the set of allowed values for loading data is different 
    230    than the set used for external data sources (see 
    231    :class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`). 
    232    """ 
    233 
    234    CSV = "CSV" 
    235    """Specifies CSV format.""" 
    236 
    237    DATASTORE_BACKUP = "DATASTORE_BACKUP" 
    238    """Specifies datastore backup format""" 
    239 
    240    NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" 
    241    """Specifies newline delimited JSON format.""" 
    242 
    243    AVRO = "AVRO" 
    244    """Specifies Avro format.""" 
    245 
    246    PARQUET = "PARQUET" 
    247    """Specifies Parquet format.""" 
    248 
    249    ORC = "ORC" 
    250    """Specifies Orc format.""" 
    251 
    252 
    253class KeyResultStatementKind: 
    254    """Determines which statement in the script represents the "key result". 
    255 
    256    The "key result" is used to populate the schema and query results of the script job. 
    257 
    258    https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#keyresultstatementkind 
    259    """ 
    260 
    261    KEY_RESULT_STATEMENT_KIND_UNSPECIFIED = "KEY_RESULT_STATEMENT_KIND_UNSPECIFIED" 
    262    LAST = "LAST" 
    263    FIRST_SELECT = "FIRST_SELECT" 
    264 
    265 
    266class StandardSqlTypeNames(str, enum.Enum): 
    267    """Enum of allowed SQL type names in schema.SchemaField. 
    268 
    269    Datatype used in GoogleSQL. 
    270    """ 
    271 
    272    def _generate_next_value_(name, start, count, last_values): 
    273        return name 
    274 
    275    TYPE_KIND_UNSPECIFIED = enum.auto() 
    276    INT64 = enum.auto() 
    277    BOOL = enum.auto() 
    278    FLOAT64 = enum.auto() 
    279    STRING = enum.auto() 
    280    BYTES = enum.auto() 
    281    TIMESTAMP = enum.auto() 
    282    DATE = enum.auto() 
    283    TIME = enum.auto() 
    284    DATETIME = enum.auto() 
    285    INTERVAL = enum.auto() 
    286    GEOGRAPHY = enum.auto() 
    287    NUMERIC = enum.auto() 
    288    BIGNUMERIC = enum.auto() 
    289    JSON = enum.auto() 
    290    ARRAY = enum.auto() 
    291    STRUCT = enum.auto() 
    292    RANGE = enum.auto() 
    293    # NOTE: FOREIGN acts as a wrapper for data types 
    294    # not natively understood by BigQuery unless translated 
    295    FOREIGN = enum.auto() 
    296 
    297 
    298class EntityTypes(str, enum.Enum): 
    299    """Enum of allowed entity type names in AccessEntry""" 
    300 
    301    USER_BY_EMAIL = "userByEmail" 
    302    GROUP_BY_EMAIL = "groupByEmail" 
    303    DOMAIN = "domain" 
    304    DATASET = "dataset" 
    305    SPECIAL_GROUP = "specialGroup" 
    306    VIEW = "view" 
    307    IAM_MEMBER = "iamMember" 
    308    ROUTINE = "routine" 
    309 
    310 
    311# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types 
    312# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types 
    313class SqlTypeNames(str, enum.Enum): 
    314    """Enum of allowed SQL type names in schema.SchemaField. 
    315 
    316    Datatype used in Legacy SQL. 
    317    """ 
    318 
    319    STRING = "STRING" 
    320    BYTES = "BYTES" 
    321    INTEGER = "INTEGER" 
    322    INT64 = "INTEGER" 
    323    FLOAT = "FLOAT" 
    324    FLOAT64 = "FLOAT" 
    325    DECIMAL = NUMERIC = "NUMERIC" 
    326    BIGDECIMAL = BIGNUMERIC = "BIGNUMERIC" 
    327    BOOLEAN = "BOOLEAN" 
    328    BOOL = "BOOLEAN" 
    329    GEOGRAPHY = "GEOGRAPHY"  # NOTE: not available in legacy types 
    330    RECORD = "RECORD" 
    331    STRUCT = "RECORD" 
    332    TIMESTAMP = "TIMESTAMP" 
    333    DATE = "DATE" 
    334    TIME = "TIME" 
    335    DATETIME = "DATETIME" 
    336    INTERVAL = "INTERVAL"  # NOTE: not available in legacy types 
    337    RANGE = "RANGE"  # NOTE: not available in legacy types 
    338    # NOTE: FOREIGN acts as a wrapper for data types 
    339    # not natively understood by BigQuery unless translated 
    340    FOREIGN = "FOREIGN" 
    341 
    342 
    343class WriteDisposition(object): 
    344    """Specifies the action that occurs if destination table already exists. 
    345 
    346    The default value is :attr:`WRITE_APPEND`. 
    347 
    348    Each action is atomic and only occurs if BigQuery is able to complete 
    349    the job successfully. Creation, truncation and append actions occur as one 
    350    atomic update upon job completion. 
    351    """ 
    352 
    353    WRITE_APPEND = "WRITE_APPEND" 
    354    """If the table already exists, BigQuery appends the data to the table.""" 
    355 
    356    WRITE_TRUNCATE = "WRITE_TRUNCATE" 
    357    """If the table already exists, BigQuery overwrites the table data.""" 
    358 
    359    WRITE_TRUNCATE_DATA = "WRITE_TRUNCATE_DATA" 
    360    """For existing tables, truncate data but preserve existing schema 
    361    and constraints.""" 
    362 
    363    WRITE_EMPTY = "WRITE_EMPTY" 
    364    """If the table already exists and contains data, a 'duplicate' error is 
    365    returned in the job result.""" 
    366 
    367 
    368class DeterminismLevel: 
    369    """Specifies determinism level for JavaScript user-defined functions (UDFs). 
    370 
    371    https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#DeterminismLevel 
    372    """ 
    373 
    374    DETERMINISM_LEVEL_UNSPECIFIED = "DETERMINISM_LEVEL_UNSPECIFIED" 
    375    """The determinism of the UDF is unspecified.""" 
    376 
    377    DETERMINISTIC = "DETERMINISTIC" 
    378    """The UDF is deterministic, meaning that 2 function calls with the same inputs 
    379    always produce the same result, even across 2 query runs.""" 
    380 
    381    NOT_DETERMINISTIC = "NOT_DETERMINISTIC" 
    382    """The UDF is not deterministic.""" 
    383 
    384 
    385class RoundingMode(str, enum.Enum): 
    386    """Rounding mode options that can be used when storing NUMERIC or BIGNUMERIC 
    387    values. 
    388 
    389    ROUNDING_MODE_UNSPECIFIED: will default to using ROUND_HALF_AWAY_FROM_ZERO. 
    390 
    391    ROUND_HALF_AWAY_FROM_ZERO: rounds half values away from zero when applying 
    392    precision and scale upon writing of NUMERIC and BIGNUMERIC values. 
    393    For Scale: 0 
    394    * 1.1, 1.2, 1.3, 1.4 => 1 
    395    * 1.5, 1.6, 1.7, 1.8, 1.9 => 2 
    396 
    397    ROUND_HALF_EVEN: rounds half values to the nearest even value when applying 
    398    precision and scale upon writing of NUMERIC and BIGNUMERIC values. 
    399    For Scale: 0 
    400    * 1.1, 1.2, 1.3, 1.4 => 1 
    401    * 1.5 => 2 
    402    * 1.6, 1.7, 1.8, 1.9 => 2 
    403    * 2.5 => 2 
    404    """ 
    405 
    406    def _generate_next_value_(name, start, count, last_values): 
    407        return name 
    408 
    409    ROUNDING_MODE_UNSPECIFIED = enum.auto() 
    410    ROUND_HALF_AWAY_FROM_ZERO = enum.auto() 
    411    ROUND_HALF_EVEN = enum.auto() 
    412 
    413 
    414class BigLakeFileFormat(object): 
    415    FILE_FORMAT_UNSPECIFIED = "FILE_FORMAT_UNSPECIFIED" 
    416    """The default unspecified value.""" 
    417 
    418    PARQUET = "PARQUET" 
    419    """Apache Parquet format.""" 
    420 
    421 
    422class BigLakeTableFormat(object): 
    423    TABLE_FORMAT_UNSPECIFIED = "TABLE_FORMAT_UNSPECIFIED" 
    424    """The default unspecified value.""" 
    425 
    426    ICEBERG = "ICEBERG" 
    427    """Apache Iceberg format.""" 
    428 
    429 
    430class UpdateMode(enum.Enum): 
    431    """Specifies the kind of information to update in a dataset.""" 
    432 
    433    UPDATE_MODE_UNSPECIFIED = "UPDATE_MODE_UNSPECIFIED" 
    434    """The default value. Behavior defaults to UPDATE_FULL.""" 
    435 
    436    UPDATE_METADATA = "UPDATE_METADATA" 
    437    """Includes metadata information for the dataset, such as friendlyName, 
    438    description, labels, etc.""" 
    439 
    440    UPDATE_ACL = "UPDATE_ACL" 
    441    """Includes ACL information for the dataset, which defines dataset access 
    442    for one or more entities.""" 
    443 
    444    UPDATE_FULL = "UPDATE_FULL" 
    445    """Includes both dataset metadata and ACL information.""" 
    446 
    447 
    448class JobCreationMode(object): 
    449    """Documented values for Job Creation Mode.""" 
    450 
    451    JOB_CREATION_MODE_UNSPECIFIED = "JOB_CREATION_MODE_UNSPECIFIED" 
    452    """Job creation mode is unspecified.""" 
    453 
    454    JOB_CREATION_REQUIRED = "JOB_CREATION_REQUIRED" 
    455    """Job creation is always required.""" 
    456 
    457    JOB_CREATION_OPTIONAL = "JOB_CREATION_OPTIONAL" 
    458    """Job creation is optional. 
    459 
    460    Returning immediate results is prioritized. 
    461    BigQuery will automatically determine if a Job needs to be created. 
    462    The conditions under which BigQuery can decide to not create a Job are 
    463    subject to change. 
    464    """ 
    465 
    466 
    467class SourceColumnMatch(str, enum.Enum): 
    468    """Uses sensible defaults based on how the schema is provided. 
    469    If autodetect is used, then columns are matched by name. Otherwise, columns 
    470    are matched by position. This is done to keep the behavior backward-compatible. 
    471    """ 
    472 
    473    SOURCE_COLUMN_MATCH_UNSPECIFIED = "SOURCE_COLUMN_MATCH_UNSPECIFIED" 
    474    """Unspecified column name match option.""" 
    475 
    476    POSITION = "POSITION" 
    477    """Matches by position. This assumes that the columns are ordered the same 
    478    way as the schema.""" 
    479 
    480    NAME = "NAME" 
    481    """Matches by name. This reads the header row as column names and reorders 
    482    columns to match the field names in the schema."""