Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery_storage_v1/services/big_query_write/client.py: 35%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

300 statements  

1# -*- coding: utf-8 -*- 

2# Copyright 2024 Google LLC 

3# 

4# Licensed under the Apache License, Version 2.0 (the "License"); 

5# you may not use this file except in compliance with the License. 

6# You may obtain a copy of the License at 

7# 

8# http://www.apache.org/licenses/LICENSE-2.0 

9# 

10# Unless required by applicable law or agreed to in writing, software 

11# distributed under the License is distributed on an "AS IS" BASIS, 

12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

13# See the License for the specific language governing permissions and 

14# limitations under the License. 

15# 

16from collections import OrderedDict 

17import os 

18import re 

19from typing import ( 

20 Dict, 

21 Iterable, 

22 Iterator, 

23 Mapping, 

24 MutableMapping, 

25 MutableSequence, 

26 Optional, 

27 Sequence, 

28 Tuple, 

29 Type, 

30 Union, 

31 cast, 

32) 

33import warnings 

34 

35from google.api_core import client_options as client_options_lib 

36from google.api_core import exceptions as core_exceptions 

37from google.api_core import gapic_v1 

38from google.api_core import retry as retries 

39from google.auth import credentials as ga_credentials # type: ignore 

40from google.auth.exceptions import MutualTLSChannelError # type: ignore 

41from google.auth.transport import mtls # type: ignore 

42from google.auth.transport.grpc import SslCredentials # type: ignore 

43from google.oauth2 import service_account # type: ignore 

44 

45from google.cloud.bigquery_storage_v1 import gapic_version as package_version 

46 

47try: 

48 OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] 

49except AttributeError: # pragma: NO COVER 

50 OptionalRetry = Union[retries.Retry, object, None] # type: ignore 

51 

52from google.protobuf import timestamp_pb2 # type: ignore 

53from google.rpc import status_pb2 # type: ignore 

54 

55from google.cloud.bigquery_storage_v1.types import storage, stream, table 

56 

57from .transports.base import DEFAULT_CLIENT_INFO, BigQueryWriteTransport 

58from .transports.grpc import BigQueryWriteGrpcTransport 

59from .transports.grpc_asyncio import BigQueryWriteGrpcAsyncIOTransport 

60 

61 

62class BigQueryWriteClientMeta(type): 

63 """Metaclass for the BigQueryWrite client. 

64 

65 This provides class-level methods for building and retrieving 

66 support objects (e.g. transport) without polluting the client instance 

67 objects. 

68 """ 

69 

70 _transport_registry = OrderedDict() # type: Dict[str, Type[BigQueryWriteTransport]] 

71 _transport_registry["grpc"] = BigQueryWriteGrpcTransport 

72 _transport_registry["grpc_asyncio"] = BigQueryWriteGrpcAsyncIOTransport 

73 

74 def get_transport_class( 

75 cls, 

76 label: Optional[str] = None, 

77 ) -> Type[BigQueryWriteTransport]: 

78 """Returns an appropriate transport class. 

79 

80 Args: 

81 label: The name of the desired transport. If none is 

82 provided, then the first transport in the registry is used. 

83 

84 Returns: 

85 The transport class to use. 

86 """ 

87 # If a specific transport is requested, return that one. 

88 if label: 

89 return cls._transport_registry[label] 

90 

91 # No transport is requested; return the default (that is, the first one 

92 # in the dictionary). 

93 return next(iter(cls._transport_registry.values())) 

94 

95 

96class BigQueryWriteClient(metaclass=BigQueryWriteClientMeta): 

97 """BigQuery Write API. 

98 

99 The Write API can be used to write data to BigQuery. 

100 

101 For supplementary information about the Write API, see: 

102 

103 https://cloud.google.com/bigquery/docs/write-api 

104 """ 

105 

106 @staticmethod 

107 def _get_default_mtls_endpoint(api_endpoint): 

108 """Converts api endpoint to mTLS endpoint. 

109 

110 Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to 

111 "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. 

112 Args: 

113 api_endpoint (Optional[str]): the api endpoint to convert. 

114 Returns: 

115 str: converted mTLS api endpoint. 

116 """ 

117 if not api_endpoint: 

118 return api_endpoint 

119 

120 mtls_endpoint_re = re.compile( 

121 r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" 

122 ) 

123 

124 m = mtls_endpoint_re.match(api_endpoint) 

125 name, mtls, sandbox, googledomain = m.groups() 

126 if mtls or not googledomain: 

127 return api_endpoint 

128 

129 if sandbox: 

130 return api_endpoint.replace( 

131 "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" 

132 ) 

133 

134 return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") 

135 

136 # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. 

137 DEFAULT_ENDPOINT = "bigquerystorage.googleapis.com" 

138 DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore 

139 DEFAULT_ENDPOINT 

140 ) 

141 

142 _DEFAULT_ENDPOINT_TEMPLATE = "bigquerystorage.{UNIVERSE_DOMAIN}" 

143 _DEFAULT_UNIVERSE = "googleapis.com" 

144 

145 @classmethod 

146 def from_service_account_info(cls, info: dict, *args, **kwargs): 

147 """Creates an instance of this client using the provided credentials 

148 info. 

149 

150 Args: 

151 info (dict): The service account private key info. 

152 args: Additional arguments to pass to the constructor. 

153 kwargs: Additional arguments to pass to the constructor. 

154 

155 Returns: 

156 BigQueryWriteClient: The constructed client. 

157 """ 

158 credentials = service_account.Credentials.from_service_account_info(info) 

159 kwargs["credentials"] = credentials 

160 return cls(*args, **kwargs) 

161 

162 @classmethod 

163 def from_service_account_file(cls, filename: str, *args, **kwargs): 

164 """Creates an instance of this client using the provided credentials 

165 file. 

166 

167 Args: 

168 filename (str): The path to the service account private key json 

169 file. 

170 args: Additional arguments to pass to the constructor. 

171 kwargs: Additional arguments to pass to the constructor. 

172 

173 Returns: 

174 BigQueryWriteClient: The constructed client. 

175 """ 

176 credentials = service_account.Credentials.from_service_account_file(filename) 

177 kwargs["credentials"] = credentials 

178 return cls(*args, **kwargs) 

179 

180 from_service_account_json = from_service_account_file 

181 

182 @property 

183 def transport(self) -> BigQueryWriteTransport: 

184 """Returns the transport used by the client instance. 

185 

186 Returns: 

187 BigQueryWriteTransport: The transport used by the client 

188 instance. 

189 """ 

190 return self._transport 

191 

192 @staticmethod 

193 def table_path( 

194 project: str, 

195 dataset: str, 

196 table: str, 

197 ) -> str: 

198 """Returns a fully-qualified table string.""" 

199 return "projects/{project}/datasets/{dataset}/tables/{table}".format( 

200 project=project, 

201 dataset=dataset, 

202 table=table, 

203 ) 

204 

205 @staticmethod 

206 def parse_table_path(path: str) -> Dict[str, str]: 

207 """Parses a table path into its component segments.""" 

208 m = re.match( 

209 r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)/tables/(?P<table>.+?)$", 

210 path, 

211 ) 

212 return m.groupdict() if m else {} 

213 

214 @staticmethod 

215 def write_stream_path( 

216 project: str, 

217 dataset: str, 

218 table: str, 

219 stream: str, 

220 ) -> str: 

221 """Returns a fully-qualified write_stream string.""" 

222 return "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}".format( 

223 project=project, 

224 dataset=dataset, 

225 table=table, 

226 stream=stream, 

227 ) 

228 

229 @staticmethod 

230 def parse_write_stream_path(path: str) -> Dict[str, str]: 

231 """Parses a write_stream path into its component segments.""" 

232 m = re.match( 

233 r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)/tables/(?P<table>.+?)/streams/(?P<stream>.+?)$", 

234 path, 

235 ) 

236 return m.groupdict() if m else {} 

237 

238 @staticmethod 

239 def common_billing_account_path( 

240 billing_account: str, 

241 ) -> str: 

242 """Returns a fully-qualified billing_account string.""" 

243 return "billingAccounts/{billing_account}".format( 

244 billing_account=billing_account, 

245 ) 

246 

247 @staticmethod 

248 def parse_common_billing_account_path(path: str) -> Dict[str, str]: 

249 """Parse a billing_account path into its component segments.""" 

250 m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path) 

251 return m.groupdict() if m else {} 

252 

253 @staticmethod 

254 def common_folder_path( 

255 folder: str, 

256 ) -> str: 

257 """Returns a fully-qualified folder string.""" 

258 return "folders/{folder}".format( 

259 folder=folder, 

260 ) 

261 

262 @staticmethod 

263 def parse_common_folder_path(path: str) -> Dict[str, str]: 

264 """Parse a folder path into its component segments.""" 

265 m = re.match(r"^folders/(?P<folder>.+?)$", path) 

266 return m.groupdict() if m else {} 

267 

268 @staticmethod 

269 def common_organization_path( 

270 organization: str, 

271 ) -> str: 

272 """Returns a fully-qualified organization string.""" 

273 return "organizations/{organization}".format( 

274 organization=organization, 

275 ) 

276 

277 @staticmethod 

278 def parse_common_organization_path(path: str) -> Dict[str, str]: 

279 """Parse a organization path into its component segments.""" 

280 m = re.match(r"^organizations/(?P<organization>.+?)$", path) 

281 return m.groupdict() if m else {} 

282 

283 @staticmethod 

284 def common_project_path( 

285 project: str, 

286 ) -> str: 

287 """Returns a fully-qualified project string.""" 

288 return "projects/{project}".format( 

289 project=project, 

290 ) 

291 

292 @staticmethod 

293 def parse_common_project_path(path: str) -> Dict[str, str]: 

294 """Parse a project path into its component segments.""" 

295 m = re.match(r"^projects/(?P<project>.+?)$", path) 

296 return m.groupdict() if m else {} 

297 

298 @staticmethod 

299 def common_location_path( 

300 project: str, 

301 location: str, 

302 ) -> str: 

303 """Returns a fully-qualified location string.""" 

304 return "projects/{project}/locations/{location}".format( 

305 project=project, 

306 location=location, 

307 ) 

308 

309 @staticmethod 

310 def parse_common_location_path(path: str) -> Dict[str, str]: 

311 """Parse a location path into its component segments.""" 

312 m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path) 

313 return m.groupdict() if m else {} 

314 

315 @classmethod 

316 def get_mtls_endpoint_and_cert_source( 

317 cls, client_options: Optional[client_options_lib.ClientOptions] = None 

318 ): 

319 """Deprecated. Return the API endpoint and client cert source for mutual TLS. 

320 

321 The client cert source is determined in the following order: 

322 (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the 

323 client cert source is None. 

324 (2) if `client_options.client_cert_source` is provided, use the provided one; if the 

325 default client cert source exists, use the default one; otherwise the client cert 

326 source is None. 

327 

328 The API endpoint is determined in the following order: 

329 (1) if `client_options.api_endpoint` if provided, use the provided one. 

330 (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the 

331 default mTLS endpoint; if the environment variable is "never", use the default API 

332 endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise 

333 use the default API endpoint. 

334 

335 More details can be found at https://google.aip.dev/auth/4114. 

336 

337 Args: 

338 client_options (google.api_core.client_options.ClientOptions): Custom options for the 

339 client. Only the `api_endpoint` and `client_cert_source` properties may be used 

340 in this method. 

341 

342 Returns: 

343 Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the 

344 client cert source to use. 

345 

346 Raises: 

347 google.auth.exceptions.MutualTLSChannelError: If any errors happen. 

348 """ 

349 

350 warnings.warn( 

351 "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", 

352 DeprecationWarning, 

353 ) 

354 if client_options is None: 

355 client_options = client_options_lib.ClientOptions() 

356 use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") 

357 use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") 

358 if use_client_cert not in ("true", "false"): 

359 raise ValueError( 

360 "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" 

361 ) 

362 if use_mtls_endpoint not in ("auto", "never", "always"): 

363 raise MutualTLSChannelError( 

364 "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" 

365 ) 

366 

367 # Figure out the client cert source to use. 

368 client_cert_source = None 

369 if use_client_cert == "true": 

370 if client_options.client_cert_source: 

371 client_cert_source = client_options.client_cert_source 

372 elif mtls.has_default_client_cert_source(): 

373 client_cert_source = mtls.default_client_cert_source() 

374 

375 # Figure out which api endpoint to use. 

376 if client_options.api_endpoint is not None: 

377 api_endpoint = client_options.api_endpoint 

378 elif use_mtls_endpoint == "always" or ( 

379 use_mtls_endpoint == "auto" and client_cert_source 

380 ): 

381 api_endpoint = cls.DEFAULT_MTLS_ENDPOINT 

382 else: 

383 api_endpoint = cls.DEFAULT_ENDPOINT 

384 

385 return api_endpoint, client_cert_source 

386 

387 @staticmethod 

388 def _read_environment_variables(): 

389 """Returns the environment variables used by the client. 

390 

391 Returns: 

392 Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, 

393 GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. 

394 

395 Raises: 

396 ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not 

397 any of ["true", "false"]. 

398 google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT 

399 is not any of ["auto", "never", "always"]. 

400 """ 

401 use_client_cert = os.getenv( 

402 "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" 

403 ).lower() 

404 use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() 

405 universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") 

406 if use_client_cert not in ("true", "false"): 

407 raise ValueError( 

408 "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" 

409 ) 

410 if use_mtls_endpoint not in ("auto", "never", "always"): 

411 raise MutualTLSChannelError( 

412 "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" 

413 ) 

414 return use_client_cert == "true", use_mtls_endpoint, universe_domain_env 

415 

416 @staticmethod 

417 def _get_client_cert_source(provided_cert_source, use_cert_flag): 

418 """Return the client cert source to be used by the client. 

419 

420 Args: 

421 provided_cert_source (bytes): The client certificate source provided. 

422 use_cert_flag (bool): A flag indicating whether to use the client certificate. 

423 

424 Returns: 

425 bytes or None: The client cert source to be used by the client. 

426 """ 

427 client_cert_source = None 

428 if use_cert_flag: 

429 if provided_cert_source: 

430 client_cert_source = provided_cert_source 

431 elif mtls.has_default_client_cert_source(): 

432 client_cert_source = mtls.default_client_cert_source() 

433 return client_cert_source 

434 

435 @staticmethod 

436 def _get_api_endpoint( 

437 api_override, client_cert_source, universe_domain, use_mtls_endpoint 

438 ): 

439 """Return the API endpoint used by the client. 

440 

441 Args: 

442 api_override (str): The API endpoint override. If specified, this is always 

443 the return value of this function and the other arguments are not used. 

444 client_cert_source (bytes): The client certificate source used by the client. 

445 universe_domain (str): The universe domain used by the client. 

446 use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. 

447 Possible values are "always", "auto", or "never". 

448 

449 Returns: 

450 str: The API endpoint to be used by the client. 

451 """ 

452 if api_override is not None: 

453 api_endpoint = api_override 

454 elif use_mtls_endpoint == "always" or ( 

455 use_mtls_endpoint == "auto" and client_cert_source 

456 ): 

457 _default_universe = BigQueryWriteClient._DEFAULT_UNIVERSE 

458 if universe_domain != _default_universe: 

459 raise MutualTLSChannelError( 

460 f"mTLS is not supported in any universe other than {_default_universe}." 

461 ) 

462 api_endpoint = BigQueryWriteClient.DEFAULT_MTLS_ENDPOINT 

463 else: 

464 api_endpoint = BigQueryWriteClient._DEFAULT_ENDPOINT_TEMPLATE.format( 

465 UNIVERSE_DOMAIN=universe_domain 

466 ) 

467 return api_endpoint 

468 

469 @staticmethod 

470 def _get_universe_domain( 

471 client_universe_domain: Optional[str], universe_domain_env: Optional[str] 

472 ) -> str: 

473 """Return the universe domain used by the client. 

474 

475 Args: 

476 client_universe_domain (Optional[str]): The universe domain configured via the client options. 

477 universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. 

478 

479 Returns: 

480 str: The universe domain to be used by the client. 

481 

482 Raises: 

483 ValueError: If the universe domain is an empty string. 

484 """ 

485 universe_domain = BigQueryWriteClient._DEFAULT_UNIVERSE 

486 if client_universe_domain is not None: 

487 universe_domain = client_universe_domain 

488 elif universe_domain_env is not None: 

489 universe_domain = universe_domain_env 

490 if len(universe_domain.strip()) == 0: 

491 raise ValueError("Universe Domain cannot be an empty string.") 

492 return universe_domain 

493 

494 @staticmethod 

495 def _compare_universes( 

496 client_universe: str, credentials: ga_credentials.Credentials 

497 ) -> bool: 

498 """Returns True iff the universe domains used by the client and credentials match. 

499 

500 Args: 

501 client_universe (str): The universe domain configured via the client options. 

502 credentials (ga_credentials.Credentials): The credentials being used in the client. 

503 

504 Returns: 

505 bool: True iff client_universe matches the universe in credentials. 

506 

507 Raises: 

508 ValueError: when client_universe does not match the universe in credentials. 

509 """ 

510 

511 default_universe = BigQueryWriteClient._DEFAULT_UNIVERSE 

512 credentials_universe = getattr(credentials, "universe_domain", default_universe) 

513 

514 if client_universe != credentials_universe: 

515 raise ValueError( 

516 "The configured universe domain " 

517 f"({client_universe}) does not match the universe domain " 

518 f"found in the credentials ({credentials_universe}). " 

519 "If you haven't configured the universe domain explicitly, " 

520 f"`{default_universe}` is the default." 

521 ) 

522 return True 

523 

524 def _validate_universe_domain(self): 

525 """Validates client's and credentials' universe domains are consistent. 

526 

527 Returns: 

528 bool: True iff the configured universe domain is valid. 

529 

530 Raises: 

531 ValueError: If the configured universe domain is not valid. 

532 """ 

533 self._is_universe_domain_valid = ( 

534 self._is_universe_domain_valid 

535 or BigQueryWriteClient._compare_universes( 

536 self.universe_domain, self.transport._credentials 

537 ) 

538 ) 

539 return self._is_universe_domain_valid 

540 

541 @property 

542 def api_endpoint(self): 

543 """Return the API endpoint used by the client instance. 

544 

545 Returns: 

546 str: The API endpoint used by the client instance. 

547 """ 

548 return self._api_endpoint 

549 

550 @property 

551 def universe_domain(self) -> str: 

552 """Return the universe domain used by the client instance. 

553 

554 Returns: 

555 str: The universe domain used by the client instance. 

556 """ 

557 return self._universe_domain 

558 

559 def __init__( 

560 self, 

561 *, 

562 credentials: Optional[ga_credentials.Credentials] = None, 

563 transport: Optional[Union[str, BigQueryWriteTransport]] = None, 

564 client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, 

565 client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, 

566 ) -> None: 

567 """Instantiates the big query write client. 

568 

569 Args: 

570 credentials (Optional[google.auth.credentials.Credentials]): The 

571 authorization credentials to attach to requests. These 

572 credentials identify the application to the service; if none 

573 are specified, the client will attempt to ascertain the 

574 credentials from the environment. 

575 transport (Union[str, BigQueryWriteTransport]): The 

576 transport to use. If set to None, a transport is chosen 

577 automatically. 

578 client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): 

579 Custom options for the client. 

580 

581 1. The ``api_endpoint`` property can be used to override the 

582 default endpoint provided by the client when ``transport`` is 

583 not explicitly provided. Only if this property is not set and 

584 ``transport`` was not explicitly provided, the endpoint is 

585 determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment 

586 variable, which have one of the following values: 

587 "always" (always use the default mTLS endpoint), "never" (always 

588 use the default regular endpoint) and "auto" (auto-switch to the 

589 default mTLS endpoint if client certificate is present; this is 

590 the default value). 

591 

592 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable 

593 is "true", then the ``client_cert_source`` property can be used 

594 to provide a client certificate for mTLS transport. If 

595 not provided, the default SSL client certificate will be used if 

596 present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not 

597 set, no client certificate will be used. 

598 

599 3. The ``universe_domain`` property can be used to override the 

600 default "googleapis.com" universe. Note that the ``api_endpoint`` 

601 property still takes precedence; and ``universe_domain`` is 

602 currently not supported for mTLS. 

603 

604 client_info (google.api_core.gapic_v1.client_info.ClientInfo): 

605 The client info used to send a user-agent string along with 

606 API requests. If ``None``, then default info will be used. 

607 Generally, you only need to set this if you're developing 

608 your own client library. 

609 

610 Raises: 

611 google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport 

612 creation failed for any reason. 

613 """ 

614 self._client_options = client_options 

615 if isinstance(self._client_options, dict): 

616 self._client_options = client_options_lib.from_dict(self._client_options) 

617 if self._client_options is None: 

618 self._client_options = client_options_lib.ClientOptions() 

619 self._client_options = cast( 

620 client_options_lib.ClientOptions, self._client_options 

621 ) 

622 

623 universe_domain_opt = getattr(self._client_options, "universe_domain", None) 

624 

625 ( 

626 self._use_client_cert, 

627 self._use_mtls_endpoint, 

628 self._universe_domain_env, 

629 ) = BigQueryWriteClient._read_environment_variables() 

630 self._client_cert_source = BigQueryWriteClient._get_client_cert_source( 

631 self._client_options.client_cert_source, self._use_client_cert 

632 ) 

633 self._universe_domain = BigQueryWriteClient._get_universe_domain( 

634 universe_domain_opt, self._universe_domain_env 

635 ) 

636 self._api_endpoint = None # updated below, depending on `transport` 

637 

638 # Initialize the universe domain validation. 

639 self._is_universe_domain_valid = False 

640 

641 api_key_value = getattr(self._client_options, "api_key", None) 

642 if api_key_value and credentials: 

643 raise ValueError( 

644 "client_options.api_key and credentials are mutually exclusive" 

645 ) 

646 

647 # Save or instantiate the transport. 

648 # Ordinarily, we provide the transport, but allowing a custom transport 

649 # instance provides an extensibility point for unusual situations. 

650 transport_provided = isinstance(transport, BigQueryWriteTransport) 

651 if transport_provided: 

652 # transport is a BigQueryWriteTransport instance. 

653 if credentials or self._client_options.credentials_file or api_key_value: 

654 raise ValueError( 

655 "When providing a transport instance, " 

656 "provide its credentials directly." 

657 ) 

658 if self._client_options.scopes: 

659 raise ValueError( 

660 "When providing a transport instance, provide its scopes " 

661 "directly." 

662 ) 

663 self._transport = cast(BigQueryWriteTransport, transport) 

664 self._api_endpoint = self._transport.host 

665 

666 self._api_endpoint = ( 

667 self._api_endpoint 

668 or BigQueryWriteClient._get_api_endpoint( 

669 self._client_options.api_endpoint, 

670 self._client_cert_source, 

671 self._universe_domain, 

672 self._use_mtls_endpoint, 

673 ) 

674 ) 

675 

676 if not transport_provided: 

677 import google.auth._default # type: ignore 

678 

679 if api_key_value and hasattr( 

680 google.auth._default, "get_api_key_credentials" 

681 ): 

682 credentials = google.auth._default.get_api_key_credentials( 

683 api_key_value 

684 ) 

685 

686 Transport = type(self).get_transport_class(cast(str, transport)) 

687 self._transport = Transport( 

688 credentials=credentials, 

689 credentials_file=self._client_options.credentials_file, 

690 host=self._api_endpoint, 

691 scopes=self._client_options.scopes, 

692 client_cert_source_for_mtls=self._client_cert_source, 

693 quota_project_id=self._client_options.quota_project_id, 

694 client_info=client_info, 

695 always_use_jwt_access=True, 

696 api_audience=self._client_options.api_audience, 

697 ) 

698 

699 def create_write_stream( 

700 self, 

701 request: Optional[Union[storage.CreateWriteStreamRequest, dict]] = None, 

702 *, 

703 parent: Optional[str] = None, 

704 write_stream: Optional[stream.WriteStream] = None, 

705 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

706 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

707 metadata: Sequence[Tuple[str, str]] = (), 

708 ) -> stream.WriteStream: 

709 r"""Creates a write stream to the given table. Additionally, every 

710 table has a special stream named '_default' to which data can be 

711 written. This stream doesn't need to be created using 

712 CreateWriteStream. It is a stream that can be used 

713 simultaneously by any number of clients. Data written to this 

714 stream is considered committed as soon as an acknowledgement is 

715 received. 

716 

717 .. code-block:: python 

718 

719 # This snippet has been automatically generated and should be regarded as a 

720 # code template only. 

721 # It will require modifications to work: 

722 # - It may require correct/in-range values for request initialization. 

723 # - It may require specifying regional endpoints when creating the service 

724 # client as shown in: 

725 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

726 from google.cloud import bigquery_storage_v1 

727 

728 def sample_create_write_stream(): 

729 # Create a client 

730 client = bigquery_storage_v1.BigQueryWriteClient() 

731 

732 # Initialize request argument(s) 

733 request = bigquery_storage_v1.CreateWriteStreamRequest( 

734 parent="parent_value", 

735 ) 

736 

737 # Make the request 

738 response = client.create_write_stream(request=request) 

739 

740 # Handle the response 

741 print(response) 

742 

743 Args: 

744 request (Union[google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest, dict]): 

745 The request object. Request message for ``CreateWriteStream``. 

746 parent (str): 

747 Required. Reference to the table to which the stream 

748 belongs, in the format of 

749 ``projects/{project}/datasets/{dataset}/tables/{table}``. 

750 

751 This corresponds to the ``parent`` field 

752 on the ``request`` instance; if ``request`` is provided, this 

753 should not be set. 

754 write_stream (google.cloud.bigquery_storage_v1.types.WriteStream): 

755 Required. Stream to be created. 

756 This corresponds to the ``write_stream`` field 

757 on the ``request`` instance; if ``request`` is provided, this 

758 should not be set. 

759 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

760 should be retried. 

761 timeout (float): The timeout for this request. 

762 metadata (Sequence[Tuple[str, str]]): Strings which should be 

763 sent along with the request as metadata. 

764 

765 Returns: 

766 google.cloud.bigquery_storage_v1.types.WriteStream: 

767 Information about a single stream 

768 that gets data inside the storage 

769 system. 

770 

771 """ 

772 # Create or coerce a protobuf request object. 

773 # Quick check: If we got a request object, we should *not* have 

774 # gotten any keyword arguments that map to the request. 

775 has_flattened_params = any([parent, write_stream]) 

776 if request is not None and has_flattened_params: 

777 raise ValueError( 

778 "If the `request` argument is set, then none of " 

779 "the individual field arguments should be set." 

780 ) 

781 

782 # Minor optimization to avoid making a copy if the user passes 

783 # in a storage.CreateWriteStreamRequest. 

784 # There's no risk of modifying the input as we've already verified 

785 # there are no flattened fields. 

786 if not isinstance(request, storage.CreateWriteStreamRequest): 

787 request = storage.CreateWriteStreamRequest(request) 

788 # If we have keyword arguments corresponding to fields on the 

789 # request, apply these. 

790 if parent is not None: 

791 request.parent = parent 

792 if write_stream is not None: 

793 request.write_stream = write_stream 

794 

795 # Wrap the RPC method; this adds retry and timeout information, 

796 # and friendly error handling. 

797 rpc = self._transport._wrapped_methods[self._transport.create_write_stream] 

798 

799 # Certain fields should be provided within the metadata header; 

800 # add these here. 

801 metadata = tuple(metadata) + ( 

802 gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), 

803 ) 

804 

805 # Validate the universe domain. 

806 self._validate_universe_domain() 

807 

808 # Send the request. 

809 response = rpc( 

810 request, 

811 retry=retry, 

812 timeout=timeout, 

813 metadata=metadata, 

814 ) 

815 

816 # Done; return the response. 

817 return response 

818 

819 def append_rows( 

820 self, 

821 requests: Optional[Iterator[storage.AppendRowsRequest]] = None, 

822 *, 

823 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

824 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

825 metadata: Sequence[Tuple[str, str]] = (), 

826 ) -> Iterable[storage.AppendRowsResponse]: 

827 r"""Appends data to the given stream. 

828 

829 If ``offset`` is specified, the ``offset`` is checked against 

830 the end of stream. The server returns ``OUT_OF_RANGE`` in 

831 ``AppendRowsResponse`` if an attempt is made to append to an 

832 offset beyond the current end of the stream or 

833 ``ALREADY_EXISTS`` if user provides an ``offset`` that has 

834 already been written to. User can retry with adjusted offset 

835 within the same RPC connection. If ``offset`` is not specified, 

836 append happens at the end of the stream. 

837 

838 The response contains an optional offset at which the append 

839 happened. No offset information will be returned for appends to 

840 a default stream. 

841 

842 Responses are received in the same order in which requests are 

843 sent. There will be one response for each successful inserted 

844 request. Responses may optionally embed error information if the 

845 originating AppendRequest was not successfully processed. 

846 

847 The specifics of when successfully appended data is made visible 

848 to the table are governed by the type of stream: 

849 

850 - For COMMITTED streams (which includes the default stream), 

851 data is visible immediately upon successful append. 

852 

853 - For BUFFERED streams, data is made visible via a subsequent 

854 ``FlushRows`` rpc which advances a cursor to a newer offset 

855 in the stream. 

856 

857 - For PENDING streams, data is not made visible until the 

858 stream itself is finalized (via the ``FinalizeWriteStream`` 

859 rpc), and the stream is explicitly committed via the 

860 ``BatchCommitWriteStreams`` rpc. 

861 

862 .. code-block:: python 

863 

864 # This snippet has been automatically generated and should be regarded as a 

865 # code template only. 

866 # It will require modifications to work: 

867 # - It may require correct/in-range values for request initialization. 

868 # - It may require specifying regional endpoints when creating the service 

869 # client as shown in: 

870 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

871 from google.cloud import bigquery_storage_v1 

872 

873 def sample_append_rows(): 

874 # Create a client 

875 client = bigquery_storage_v1.BigQueryWriteClient() 

876 

877 # Initialize request argument(s) 

878 request = bigquery_storage_v1.AppendRowsRequest( 

879 write_stream="write_stream_value", 

880 ) 

881 

882 # This method expects an iterator which contains 

883 # 'bigquery_storage_v1.AppendRowsRequest' objects 

884 # Here we create a generator that yields a single `request` for 

885 # demonstrative purposes. 

886 requests = [request] 

887 

888 def request_generator(): 

889 for request in requests: 

890 yield request 

891 

892 # Make the request 

893 stream = client.append_rows(requests=request_generator()) 

894 

895 # Handle the response 

896 for response in stream: 

897 print(response) 

898 

899 Args: 

900 requests (Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]): 

901 The request object iterator. Request message for ``AppendRows``. 

902 

903 Because AppendRows is a bidirectional streaming RPC, 

904 certain parts of the AppendRowsRequest need only be 

905 specified for the first request before switching table 

906 destinations. You can also switch table destinations 

907 within the same connection for the default stream. 

908 

909 The size of a single AppendRowsRequest must be less than 

910 10 MB in size. Requests larger than this return an 

911 error, typically ``INVALID_ARGUMENT``. 

912 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

913 should be retried. 

914 timeout (float): The timeout for this request. 

915 metadata (Sequence[Tuple[str, str]]): Strings which should be 

916 sent along with the request as metadata. 

917 

918 Returns: 

919 Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]: 

920 Response message for AppendRows. 

921 """ 

922 

923 # Wrap the RPC method; this adds retry and timeout information, 

924 # and friendly error handling. 

925 rpc = self._transport._wrapped_methods[self._transport.append_rows] 

926 

927 # Certain fields should be provided within the metadata header; 

928 # add these here. 

929 metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) 

930 

931 # Validate the universe domain. 

932 self._validate_universe_domain() 

933 

934 # Send the request. 

935 response = rpc( 

936 requests, 

937 retry=retry, 

938 timeout=timeout, 

939 metadata=metadata, 

940 ) 

941 

942 # Done; return the response. 

943 return response 

944 

945 def get_write_stream( 

946 self, 

947 request: Optional[Union[storage.GetWriteStreamRequest, dict]] = None, 

948 *, 

949 name: Optional[str] = None, 

950 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

951 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

952 metadata: Sequence[Tuple[str, str]] = (), 

953 ) -> stream.WriteStream: 

954 r"""Gets information about a write stream. 

955 

956 .. code-block:: python 

957 

958 # This snippet has been automatically generated and should be regarded as a 

959 # code template only. 

960 # It will require modifications to work: 

961 # - It may require correct/in-range values for request initialization. 

962 # - It may require specifying regional endpoints when creating the service 

963 # client as shown in: 

964 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

965 from google.cloud import bigquery_storage_v1 

966 

967 def sample_get_write_stream(): 

968 # Create a client 

969 client = bigquery_storage_v1.BigQueryWriteClient() 

970 

971 # Initialize request argument(s) 

972 request = bigquery_storage_v1.GetWriteStreamRequest( 

973 name="name_value", 

974 ) 

975 

976 # Make the request 

977 response = client.get_write_stream(request=request) 

978 

979 # Handle the response 

980 print(response) 

981 

982 Args: 

983 request (Union[google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest, dict]): 

984 The request object. Request message for ``GetWriteStreamRequest``. 

985 name (str): 

986 Required. Name of the stream to get, in the form of 

987 ``projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}``. 

988 

989 This corresponds to the ``name`` field 

990 on the ``request`` instance; if ``request`` is provided, this 

991 should not be set. 

992 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

993 should be retried. 

994 timeout (float): The timeout for this request. 

995 metadata (Sequence[Tuple[str, str]]): Strings which should be 

996 sent along with the request as metadata. 

997 

998 Returns: 

999 google.cloud.bigquery_storage_v1.types.WriteStream: 

1000 Information about a single stream 

1001 that gets data inside the storage 

1002 system. 

1003 

1004 """ 

1005 # Create or coerce a protobuf request object. 

1006 # Quick check: If we got a request object, we should *not* have 

1007 # gotten any keyword arguments that map to the request. 

1008 has_flattened_params = any([name]) 

1009 if request is not None and has_flattened_params: 

1010 raise ValueError( 

1011 "If the `request` argument is set, then none of " 

1012 "the individual field arguments should be set." 

1013 ) 

1014 

1015 # Minor optimization to avoid making a copy if the user passes 

1016 # in a storage.GetWriteStreamRequest. 

1017 # There's no risk of modifying the input as we've already verified 

1018 # there are no flattened fields. 

1019 if not isinstance(request, storage.GetWriteStreamRequest): 

1020 request = storage.GetWriteStreamRequest(request) 

1021 # If we have keyword arguments corresponding to fields on the 

1022 # request, apply these. 

1023 if name is not None: 

1024 request.name = name 

1025 

1026 # Wrap the RPC method; this adds retry and timeout information, 

1027 # and friendly error handling. 

1028 rpc = self._transport._wrapped_methods[self._transport.get_write_stream] 

1029 

1030 # Certain fields should be provided within the metadata header; 

1031 # add these here. 

1032 metadata = tuple(metadata) + ( 

1033 gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), 

1034 ) 

1035 

1036 # Validate the universe domain. 

1037 self._validate_universe_domain() 

1038 

1039 # Send the request. 

1040 response = rpc( 

1041 request, 

1042 retry=retry, 

1043 timeout=timeout, 

1044 metadata=metadata, 

1045 ) 

1046 

1047 # Done; return the response. 

1048 return response 

1049 

1050 def finalize_write_stream( 

1051 self, 

1052 request: Optional[Union[storage.FinalizeWriteStreamRequest, dict]] = None, 

1053 *, 

1054 name: Optional[str] = None, 

1055 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

1056 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

1057 metadata: Sequence[Tuple[str, str]] = (), 

1058 ) -> storage.FinalizeWriteStreamResponse: 

1059 r"""Finalize a write stream so that no new data can be appended to 

1060 the stream. Finalize is not supported on the '_default' stream. 

1061 

1062 .. code-block:: python 

1063 

1064 # This snippet has been automatically generated and should be regarded as a 

1065 # code template only. 

1066 # It will require modifications to work: 

1067 # - It may require correct/in-range values for request initialization. 

1068 # - It may require specifying regional endpoints when creating the service 

1069 # client as shown in: 

1070 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

1071 from google.cloud import bigquery_storage_v1 

1072 

1073 def sample_finalize_write_stream(): 

1074 # Create a client 

1075 client = bigquery_storage_v1.BigQueryWriteClient() 

1076 

1077 # Initialize request argument(s) 

1078 request = bigquery_storage_v1.FinalizeWriteStreamRequest( 

1079 name="name_value", 

1080 ) 

1081 

1082 # Make the request 

1083 response = client.finalize_write_stream(request=request) 

1084 

1085 # Handle the response 

1086 print(response) 

1087 

1088 Args: 

1089 request (Union[google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest, dict]): 

1090 The request object. Request message for invoking ``FinalizeWriteStream``. 

1091 name (str): 

1092 Required. Name of the stream to finalize, in the form of 

1093 ``projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}``. 

1094 

1095 This corresponds to the ``name`` field 

1096 on the ``request`` instance; if ``request`` is provided, this 

1097 should not be set. 

1098 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

1099 should be retried. 

1100 timeout (float): The timeout for this request. 

1101 metadata (Sequence[Tuple[str, str]]): Strings which should be 

1102 sent along with the request as metadata. 

1103 

1104 Returns: 

1105 google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse: 

1106 Response message for FinalizeWriteStream. 

1107 """ 

1108 # Create or coerce a protobuf request object. 

1109 # Quick check: If we got a request object, we should *not* have 

1110 # gotten any keyword arguments that map to the request. 

1111 has_flattened_params = any([name]) 

1112 if request is not None and has_flattened_params: 

1113 raise ValueError( 

1114 "If the `request` argument is set, then none of " 

1115 "the individual field arguments should be set." 

1116 ) 

1117 

1118 # Minor optimization to avoid making a copy if the user passes 

1119 # in a storage.FinalizeWriteStreamRequest. 

1120 # There's no risk of modifying the input as we've already verified 

1121 # there are no flattened fields. 

1122 if not isinstance(request, storage.FinalizeWriteStreamRequest): 

1123 request = storage.FinalizeWriteStreamRequest(request) 

1124 # If we have keyword arguments corresponding to fields on the 

1125 # request, apply these. 

1126 if name is not None: 

1127 request.name = name 

1128 

1129 # Wrap the RPC method; this adds retry and timeout information, 

1130 # and friendly error handling. 

1131 rpc = self._transport._wrapped_methods[self._transport.finalize_write_stream] 

1132 

1133 # Certain fields should be provided within the metadata header; 

1134 # add these here. 

1135 metadata = tuple(metadata) + ( 

1136 gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), 

1137 ) 

1138 

1139 # Validate the universe domain. 

1140 self._validate_universe_domain() 

1141 

1142 # Send the request. 

1143 response = rpc( 

1144 request, 

1145 retry=retry, 

1146 timeout=timeout, 

1147 metadata=metadata, 

1148 ) 

1149 

1150 # Done; return the response. 

1151 return response 

1152 

1153 def batch_commit_write_streams( 

1154 self, 

1155 request: Optional[Union[storage.BatchCommitWriteStreamsRequest, dict]] = None, 

1156 *, 

1157 parent: Optional[str] = None, 

1158 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

1159 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

1160 metadata: Sequence[Tuple[str, str]] = (), 

1161 ) -> storage.BatchCommitWriteStreamsResponse: 

1162 r"""Atomically commits a group of ``PENDING`` streams that belong to 

1163 the same ``parent`` table. 

1164 

1165 Streams must be finalized before commit and cannot be committed 

1166 multiple times. Once a stream is committed, data in the stream 

1167 becomes available for read operations. 

1168 

1169 .. code-block:: python 

1170 

1171 # This snippet has been automatically generated and should be regarded as a 

1172 # code template only. 

1173 # It will require modifications to work: 

1174 # - It may require correct/in-range values for request initialization. 

1175 # - It may require specifying regional endpoints when creating the service 

1176 # client as shown in: 

1177 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

1178 from google.cloud import bigquery_storage_v1 

1179 

1180 def sample_batch_commit_write_streams(): 

1181 # Create a client 

1182 client = bigquery_storage_v1.BigQueryWriteClient() 

1183 

1184 # Initialize request argument(s) 

1185 request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( 

1186 parent="parent_value", 

1187 write_streams=['write_streams_value1', 'write_streams_value2'], 

1188 ) 

1189 

1190 # Make the request 

1191 response = client.batch_commit_write_streams(request=request) 

1192 

1193 # Handle the response 

1194 print(response) 

1195 

1196 Args: 

1197 request (Union[google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest, dict]): 

1198 The request object. Request message for ``BatchCommitWriteStreams``. 

1199 parent (str): 

1200 Required. Parent table that all the streams should 

1201 belong to, in the form of 

1202 ``projects/{project}/datasets/{dataset}/tables/{table}``. 

1203 

1204 This corresponds to the ``parent`` field 

1205 on the ``request`` instance; if ``request`` is provided, this 

1206 should not be set. 

1207 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

1208 should be retried. 

1209 timeout (float): The timeout for this request. 

1210 metadata (Sequence[Tuple[str, str]]): Strings which should be 

1211 sent along with the request as metadata. 

1212 

1213 Returns: 

1214 google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse: 

1215 Response message for BatchCommitWriteStreams. 

1216 """ 

1217 # Create or coerce a protobuf request object. 

1218 # Quick check: If we got a request object, we should *not* have 

1219 # gotten any keyword arguments that map to the request. 

1220 has_flattened_params = any([parent]) 

1221 if request is not None and has_flattened_params: 

1222 raise ValueError( 

1223 "If the `request` argument is set, then none of " 

1224 "the individual field arguments should be set." 

1225 ) 

1226 

1227 # Minor optimization to avoid making a copy if the user passes 

1228 # in a storage.BatchCommitWriteStreamsRequest. 

1229 # There's no risk of modifying the input as we've already verified 

1230 # there are no flattened fields. 

1231 if not isinstance(request, storage.BatchCommitWriteStreamsRequest): 

1232 request = storage.BatchCommitWriteStreamsRequest(request) 

1233 # If we have keyword arguments corresponding to fields on the 

1234 # request, apply these. 

1235 if parent is not None: 

1236 request.parent = parent 

1237 

1238 # Wrap the RPC method; this adds retry and timeout information, 

1239 # and friendly error handling. 

1240 rpc = self._transport._wrapped_methods[ 

1241 self._transport.batch_commit_write_streams 

1242 ] 

1243 

1244 # Certain fields should be provided within the metadata header; 

1245 # add these here. 

1246 metadata = tuple(metadata) + ( 

1247 gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), 

1248 ) 

1249 

1250 # Validate the universe domain. 

1251 self._validate_universe_domain() 

1252 

1253 # Send the request. 

1254 response = rpc( 

1255 request, 

1256 retry=retry, 

1257 timeout=timeout, 

1258 metadata=metadata, 

1259 ) 

1260 

1261 # Done; return the response. 

1262 return response 

1263 

1264 def flush_rows( 

1265 self, 

1266 request: Optional[Union[storage.FlushRowsRequest, dict]] = None, 

1267 *, 

1268 write_stream: Optional[str] = None, 

1269 retry: OptionalRetry = gapic_v1.method.DEFAULT, 

1270 timeout: Union[float, object] = gapic_v1.method.DEFAULT, 

1271 metadata: Sequence[Tuple[str, str]] = (), 

1272 ) -> storage.FlushRowsResponse: 

1273 r"""Flushes rows to a BUFFERED stream. 

1274 

1275 If users are appending rows to BUFFERED stream, flush operation 

1276 is required in order for the rows to become available for 

1277 reading. A Flush operation flushes up to any previously flushed 

1278 offset in a BUFFERED stream, to the offset specified in the 

1279 request. 

1280 

1281 Flush is not supported on the \_default stream, since it is not 

1282 BUFFERED. 

1283 

1284 .. code-block:: python 

1285 

1286 # This snippet has been automatically generated and should be regarded as a 

1287 # code template only. 

1288 # It will require modifications to work: 

1289 # - It may require correct/in-range values for request initialization. 

1290 # - It may require specifying regional endpoints when creating the service 

1291 # client as shown in: 

1292 # https://googleapis.dev/python/google-api-core/latest/client_options.html 

1293 from google.cloud import bigquery_storage_v1 

1294 

1295 def sample_flush_rows(): 

1296 # Create a client 

1297 client = bigquery_storage_v1.BigQueryWriteClient() 

1298 

1299 # Initialize request argument(s) 

1300 request = bigquery_storage_v1.FlushRowsRequest( 

1301 write_stream="write_stream_value", 

1302 ) 

1303 

1304 # Make the request 

1305 response = client.flush_rows(request=request) 

1306 

1307 # Handle the response 

1308 print(response) 

1309 

1310 Args: 

1311 request (Union[google.cloud.bigquery_storage_v1.types.FlushRowsRequest, dict]): 

1312 The request object. Request message for ``FlushRows``. 

1313 write_stream (str): 

1314 Required. The stream that is the 

1315 target of the flush operation. 

1316 

1317 This corresponds to the ``write_stream`` field 

1318 on the ``request`` instance; if ``request`` is provided, this 

1319 should not be set. 

1320 retry (google.api_core.retry.Retry): Designation of what errors, if any, 

1321 should be retried. 

1322 timeout (float): The timeout for this request. 

1323 metadata (Sequence[Tuple[str, str]]): Strings which should be 

1324 sent along with the request as metadata. 

1325 

1326 Returns: 

1327 google.cloud.bigquery_storage_v1.types.FlushRowsResponse: 

1328 Respond message for FlushRows. 

1329 """ 

1330 # Create or coerce a protobuf request object. 

1331 # Quick check: If we got a request object, we should *not* have 

1332 # gotten any keyword arguments that map to the request. 

1333 has_flattened_params = any([write_stream]) 

1334 if request is not None and has_flattened_params: 

1335 raise ValueError( 

1336 "If the `request` argument is set, then none of " 

1337 "the individual field arguments should be set." 

1338 ) 

1339 

1340 # Minor optimization to avoid making a copy if the user passes 

1341 # in a storage.FlushRowsRequest. 

1342 # There's no risk of modifying the input as we've already verified 

1343 # there are no flattened fields. 

1344 if not isinstance(request, storage.FlushRowsRequest): 

1345 request = storage.FlushRowsRequest(request) 

1346 # If we have keyword arguments corresponding to fields on the 

1347 # request, apply these. 

1348 if write_stream is not None: 

1349 request.write_stream = write_stream 

1350 

1351 # Wrap the RPC method; this adds retry and timeout information, 

1352 # and friendly error handling. 

1353 rpc = self._transport._wrapped_methods[self._transport.flush_rows] 

1354 

1355 # Certain fields should be provided within the metadata header; 

1356 # add these here. 

1357 metadata = tuple(metadata) + ( 

1358 gapic_v1.routing_header.to_grpc_metadata( 

1359 (("write_stream", request.write_stream),) 

1360 ), 

1361 ) 

1362 

1363 # Validate the universe domain. 

1364 self._validate_universe_domain() 

1365 

1366 # Send the request. 

1367 response = rpc( 

1368 request, 

1369 retry=retry, 

1370 timeout=timeout, 

1371 metadata=metadata, 

1372 ) 

1373 

1374 # Done; return the response. 

1375 return response 

1376 

1377 def __enter__(self) -> "BigQueryWriteClient": 

1378 return self 

1379 

1380 def __exit__(self, type, value, traceback): 

1381 """Releases underlying transport's resources. 

1382 

1383 .. warning:: 

1384 ONLY use as a context manager if the transport is NOT shared 

1385 with other clients! Exiting the with block will CLOSE the transport 

1386 and may cause errors in other clients! 

1387 """ 

1388 self.transport.close() 

1389 

1390 

1391DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( 

1392 gapic_version=package_version.__version__ 

1393) 

1394 

1395 

1396__all__ = ("BigQueryWriteClient",)