Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/enums.py: 100%

115 statements  

« prev     ^ index     » next       coverage.py v7.2.2, created at 2023-03-26 06:07 +0000

1# Copyright 2019 Google LLC 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15import enum 

16 

17 

18class AutoRowIDs(enum.Enum): 

19 """How to handle automatic insert IDs when inserting rows as a stream.""" 

20 

21 DISABLED = enum.auto() 

22 GENERATE_UUID = enum.auto() 

23 

24 

25class Compression(object): 

26 """The compression type to use for exported files. The default value is 

27 :attr:`NONE`. 

28 

29 :attr:`DEFLATE` and :attr:`SNAPPY` are 

30 only supported for Avro. 

31 """ 

32 

33 GZIP = "GZIP" 

34 """Specifies GZIP format.""" 

35 

36 DEFLATE = "DEFLATE" 

37 """Specifies DEFLATE format.""" 

38 

39 SNAPPY = "SNAPPY" 

40 """Specifies SNAPPY format.""" 

41 

42 NONE = "NONE" 

43 """Specifies no compression.""" 

44 

45 

46class DecimalTargetType: 

47 """The data types that could be used as a target type when converting decimal values. 

48 

49 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#DecimalTargetType 

50 

51 .. versionadded:: 2.21.0 

52 """ 

53 

54 NUMERIC = "NUMERIC" 

55 """Decimal values could be converted to NUMERIC type.""" 

56 

57 BIGNUMERIC = "BIGNUMERIC" 

58 """Decimal values could be converted to BIGNUMERIC type.""" 

59 

60 STRING = "STRING" 

61 """Decimal values could be converted to STRING type.""" 

62 

63 

64class CreateDisposition(object): 

65 """Specifies whether the job is allowed to create new tables. The default 

66 value is :attr:`CREATE_IF_NEEDED`. 

67 

68 Creation, truncation and append actions occur as one atomic update 

69 upon job completion. 

70 """ 

71 

72 CREATE_IF_NEEDED = "CREATE_IF_NEEDED" 

73 """If the table does not exist, BigQuery creates the table.""" 

74 

75 CREATE_NEVER = "CREATE_NEVER" 

76 """The table must already exist. If it does not, a 'notFound' error is 

77 returned in the job result.""" 

78 

79 

80class DefaultPandasDTypes(enum.Enum): 

81 """Default Pandas DataFrem DTypes to convert BigQuery data. These 

82 Sentinel values are used instead of None to maintain backward compatibility, 

83 and allow Pandas package is not available. For more information: 

84 https://stackoverflow.com/a/60605919/101923 

85 """ 

86 

87 BOOL_DTYPE = object() 

88 """Specifies default bool dtype""" 

89 

90 INT_DTYPE = object() 

91 """Specifies default integer dtype""" 

92 

93 

94class DestinationFormat(object): 

95 """The exported file format. The default value is :attr:`CSV`. 

96 

97 Tables with nested or repeated fields cannot be exported as CSV. 

98 """ 

99 

100 CSV = "CSV" 

101 """Specifies CSV format.""" 

102 

103 NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" 

104 """Specifies newline delimited JSON format.""" 

105 

106 AVRO = "AVRO" 

107 """Specifies Avro format.""" 

108 

109 PARQUET = "PARQUET" 

110 """Specifies Parquet format.""" 

111 

112 

113class Encoding(object): 

114 """The character encoding of the data. The default is :attr:`UTF_8`. 

115 

116 BigQuery decodes the data after the raw, binary data has been 

117 split using the values of the quote and fieldDelimiter properties. 

118 """ 

119 

120 UTF_8 = "UTF-8" 

121 """Specifies UTF-8 encoding.""" 

122 

123 ISO_8859_1 = "ISO-8859-1" 

124 """Specifies ISO-8859-1 encoding.""" 

125 

126 

127class QueryPriority(object): 

128 """Specifies a priority for the query. The default value is 

129 :attr:`INTERACTIVE`. 

130 """ 

131 

132 INTERACTIVE = "INTERACTIVE" 

133 """Specifies interactive priority.""" 

134 

135 BATCH = "BATCH" 

136 """Specifies batch priority.""" 

137 

138 

139class QueryApiMethod(str, enum.Enum): 

140 """API method used to start the query. The default value is 

141 :attr:`INSERT`. 

142 """ 

143 

144 INSERT = "INSERT" 

145 """Submit a query job by using the `jobs.insert REST API method 

146 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert>`_. 

147 

148 This supports all job configuration options. 

149 """ 

150 

151 QUERY = "QUERY" 

152 """Submit a query job by using the `jobs.query REST API method 

153 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_. 

154 

155 Differences from ``INSERT``: 

156 

157 * Many parameters and job configuration options, including job ID and 

158 destination table, cannot be used 

159 with this API method. See the `jobs.query REST API documentation 

160 <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query>`_ for 

161 the complete list of supported configuration options. 

162 

163 * API blocks up to a specified timeout, waiting for the query to 

164 finish. 

165 

166 * The full job resource (including job statistics) may not be available. 

167 Call :meth:`~google.cloud.bigquery.job.QueryJob.reload` or 

168 :meth:`~google.cloud.bigquery.client.Client.get_job` to get full job 

169 statistics and configuration. 

170 

171 * :meth:`~google.cloud.bigquery.Client.query` can raise API exceptions if 

172 the query fails, whereas the same errors don't appear until calling 

173 :meth:`~google.cloud.bigquery.job.QueryJob.result` when the ``INSERT`` 

174 API method is used. 

175 """ 

176 

177 

178class SchemaUpdateOption(object): 

179 """Specifies an update to the destination table schema as a side effect of 

180 a load job. 

181 """ 

182 

183 ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION" 

184 """Allow adding a nullable field to the schema.""" 

185 

186 ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION" 

187 """Allow relaxing a required field in the original schema to nullable.""" 

188 

189 

190class SourceFormat(object): 

191 """The format of the data files. The default value is :attr:`CSV`. 

192 

193 Note that the set of allowed values for loading data is different 

194 than the set used for external data sources (see 

195 :class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`). 

196 """ 

197 

198 CSV = "CSV" 

199 """Specifies CSV format.""" 

200 

201 DATASTORE_BACKUP = "DATASTORE_BACKUP" 

202 """Specifies datastore backup format""" 

203 

204 NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" 

205 """Specifies newline delimited JSON format.""" 

206 

207 AVRO = "AVRO" 

208 """Specifies Avro format.""" 

209 

210 PARQUET = "PARQUET" 

211 """Specifies Parquet format.""" 

212 

213 ORC = "ORC" 

214 """Specifies Orc format.""" 

215 

216 

217class KeyResultStatementKind: 

218 """Determines which statement in the script represents the "key result". 

219 

220 The "key result" is used to populate the schema and query results of the script job. 

221 

222 https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#keyresultstatementkind 

223 """ 

224 

225 KEY_RESULT_STATEMENT_KIND_UNSPECIFIED = "KEY_RESULT_STATEMENT_KIND_UNSPECIFIED" 

226 LAST = "LAST" 

227 FIRST_SELECT = "FIRST_SELECT" 

228 

229 

230class StandardSqlTypeNames(str, enum.Enum): 

231 def _generate_next_value_(name, start, count, last_values): 

232 return name 

233 

234 TYPE_KIND_UNSPECIFIED = enum.auto() 

235 INT64 = enum.auto() 

236 BOOL = enum.auto() 

237 FLOAT64 = enum.auto() 

238 STRING = enum.auto() 

239 BYTES = enum.auto() 

240 TIMESTAMP = enum.auto() 

241 DATE = enum.auto() 

242 TIME = enum.auto() 

243 DATETIME = enum.auto() 

244 INTERVAL = enum.auto() 

245 GEOGRAPHY = enum.auto() 

246 NUMERIC = enum.auto() 

247 BIGNUMERIC = enum.auto() 

248 JSON = enum.auto() 

249 ARRAY = enum.auto() 

250 STRUCT = enum.auto() 

251 

252 

253class EntityTypes(str, enum.Enum): 

254 """Enum of allowed entity type names in AccessEntry""" 

255 

256 USER_BY_EMAIL = "userByEmail" 

257 GROUP_BY_EMAIL = "groupByEmail" 

258 DOMAIN = "domain" 

259 DATASET = "dataset" 

260 SPECIAL_GROUP = "specialGroup" 

261 VIEW = "view" 

262 IAM_MEMBER = "iamMember" 

263 ROUTINE = "routine" 

264 

265 

266# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types 

267# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types 

268class SqlTypeNames(str, enum.Enum): 

269 """Enum of allowed SQL type names in schema.SchemaField.""" 

270 

271 STRING = "STRING" 

272 BYTES = "BYTES" 

273 INTEGER = "INTEGER" 

274 INT64 = "INTEGER" 

275 FLOAT = "FLOAT" 

276 FLOAT64 = "FLOAT" 

277 DECIMAL = NUMERIC = "NUMERIC" 

278 BIGDECIMAL = BIGNUMERIC = "BIGNUMERIC" 

279 BOOLEAN = "BOOLEAN" 

280 BOOL = "BOOLEAN" 

281 GEOGRAPHY = "GEOGRAPHY" # NOTE: not available in legacy types 

282 RECORD = "RECORD" 

283 STRUCT = "RECORD" 

284 TIMESTAMP = "TIMESTAMP" 

285 DATE = "DATE" 

286 TIME = "TIME" 

287 DATETIME = "DATETIME" 

288 INTERVAL = "INTERVAL" # NOTE: not available in legacy types 

289 

290 

291class WriteDisposition(object): 

292 """Specifies the action that occurs if destination table already exists. 

293 

294 The default value is :attr:`WRITE_APPEND`. 

295 

296 Each action is atomic and only occurs if BigQuery is able to complete 

297 the job successfully. Creation, truncation and append actions occur as one 

298 atomic update upon job completion. 

299 """ 

300 

301 WRITE_APPEND = "WRITE_APPEND" 

302 """If the table already exists, BigQuery appends the data to the table.""" 

303 

304 WRITE_TRUNCATE = "WRITE_TRUNCATE" 

305 """If the table already exists, BigQuery overwrites the table data.""" 

306 

307 WRITE_EMPTY = "WRITE_EMPTY" 

308 """If the table already exists and contains data, a 'duplicate' error is 

309 returned in the job result.""" 

310 

311 

312class DeterminismLevel: 

313 """Specifies determinism level for JavaScript user-defined functions (UDFs). 

314 

315 https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#DeterminismLevel 

316 """ 

317 

318 DETERMINISM_LEVEL_UNSPECIFIED = "DETERMINISM_LEVEL_UNSPECIFIED" 

319 """The determinism of the UDF is unspecified.""" 

320 

321 DETERMINISTIC = "DETERMINISTIC" 

322 """The UDF is deterministic, meaning that 2 function calls with the same inputs 

323 always produce the same result, even across 2 query runs.""" 

324 

325 NOT_DETERMINISTIC = "NOT_DETERMINISTIC" 

326 """The UDF is not deterministic."""