Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery_storage_v1/client.py: 73%

15 statements  

« prev     ^ index     » next       coverage.py v7.2.2, created at 2023-03-26 06:10 +0000

1# -*- coding: utf-8 -*- 

2# 

3# Copyright 2020 Google LLC 

4# 

5# Licensed under the Apache License, Version 2.0 (the "License"); 

6# you may not use this file except in compliance with the License. 

7# You may obtain a copy of the License at 

8# 

9# https://www.apache.org/licenses/LICENSE-2.0 

10# 

11# Unless required by applicable law or agreed to in writing, software 

12# distributed under the License is distributed on an "AS IS" BASIS, 

13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

14# See the License for the specific language governing permissions and 

15# limitations under the License. 

16 

17"""Parent client for calling the Cloud BigQuery Storage API. 

18 

19This is the base from which all interactions with the API occur. 

20""" 

21 

22from __future__ import absolute_import 

23 

24import google.api_core.gapic_v1.method 

25 

26from google.cloud.bigquery_storage_v1 import reader 

27from google.cloud.bigquery_storage_v1.services import big_query_read 

28from google.cloud.bigquery_storage_v1.services import big_query_write 

29 

30 

31_SCOPES = ( 

32 "https://www.googleapis.com/auth/bigquery", 

33 "https://www.googleapis.com/auth/cloud-platform", 

34) 

35 

36 

37class BigQueryReadClient(big_query_read.BigQueryReadClient): 

38 """Client for interacting with BigQuery Storage API. 

39 

40 The BigQuery storage API can be used to read data stored in BigQuery. 

41 """ 

42 

43 def read_rows( 

44 self, 

45 name, 

46 offset=0, 

47 retry=google.api_core.gapic_v1.method.DEFAULT, 

48 timeout=google.api_core.gapic_v1.method.DEFAULT, 

49 metadata=(), 

50 retry_delay_callback=None, 

51 ): 

52 """ 

53 Reads rows from the table in the format prescribed by the read 

54 session. Each response contains one or more table rows, up to a 

55 maximum of 10 MiB per response; read requests which attempt to read 

56 individual rows larger than this will fail. 

57 

58 Each request also returns a set of stream statistics reflecting the 

59 estimated total number of rows in the read stream. This number is 

60 computed based on the total table size and the number of active 

61 streams in the read session, and may change as other streams continue 

62 to read data. 

63 

64 Example: 

65 >>> from google.cloud import bigquery_storage 

66 >>> 

67 >>> client = bigquery_storage.BigQueryReadClient() 

68 >>> 

69 >>> # TODO: Initialize ``table``: 

70 >>> table = "projects/{}/datasets/{}/tables/{}".format( 

71 ... 'project_id': 'your-data-project-id', 

72 ... 'dataset_id': 'your_dataset_id', 

73 ... 'table_id': 'your_table_id', 

74 ... ) 

75 >>> 

76 >>> # TODO: Initialize `parent`: 

77 >>> parent = 'projects/your-billing-project-id' 

78 >>> 

79 >>> requested_session = bigquery_storage.types.ReadSession( 

80 ... table=table, 

81 ... data_format=bigquery_storage.types.DataFormat.AVRO, 

82 ... ) 

83 >>> session = client.create_read_session( 

84 ... parent=parent, read_session=requested_session 

85 ... ) 

86 >>> 

87 >>> stream = session.streams[0], # TODO: Also read any other streams. 

88 >>> read_rows_stream = client.read_rows(stream.name) 

89 >>> 

90 >>> for element in read_rows_stream.rows(session): 

91 ... # process element 

92 ... pass 

93 

94 Args: 

95 name (str): 

96 Required. Name of the stream to start 

97 reading from, of the form 

98 `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}` 

99 offset (Optional[int]): 

100 The starting offset from which to begin reading rows from 

101 in the stream. The offset requested must be less than the last 

102 row read from ReadRows. Requesting a larger offset is 

103 undefined. 

104 retry (Optional[google.api_core.retry.Retry]): A retry object used 

105 to retry requests. If ``None`` is specified, requests will not 

106 be retried. 

107 timeout (Optional[float]): The amount of time, in seconds, to wait 

108 for the request to complete. Note that if ``retry`` is 

109 specified, the timeout applies to each individual attempt. 

110 metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata 

111 that is provided to the method. 

112 retry_delay_callback (Optional[Callable[[float], None]]): 

113 If the client receives a retryable error that asks the client to 

114 delay its next attempt and retry_delay_callback is not None, 

115 BigQueryReadClient will call retry_delay_callback with the delay 

116 duration (in seconds) before it starts sleeping until the next 

117 attempt. 

118 

119 Returns: 

120 ~google.cloud.bigquery_storage_v1.reader.ReadRowsStream: 

121 An iterable of 

122 :class:`~google.cloud.bigquery_storage_v1.types.ReadRowsResponse`. 

123 

124 Raises: 

125 google.api_core.exceptions.GoogleAPICallError: If the request 

126 failed for any reason. 

127 google.api_core.exceptions.RetryError: If the request failed due 

128 to a retryable error and retry attempts failed. 

129 ValueError: If the parameters are invalid. 

130 """ 

131 gapic_client = super(BigQueryReadClient, self) 

132 stream = reader.ReadRowsStream( 

133 gapic_client, 

134 name, 

135 offset, 

136 {"retry": retry, "timeout": timeout, "metadata": metadata}, 

137 retry_delay_callback=retry_delay_callback, 

138 ) 

139 stream._reconnect() 

140 return stream 

141 

142 

143class BigQueryWriteClient(big_query_write.BigQueryWriteClient): 

144 __doc__ = big_query_write.BigQueryWriteClient.__doc__