1# -*- coding: utf-8 -*-
2# Copyright 2025 Google LLC
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
16import inspect
17import json
18import pickle
19import logging as std_logging
20import warnings
21from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
22
23from google.api_core import gapic_v1
24from google.api_core import grpc_helpers_async
25from google.api_core import exceptions as core_exceptions
26from google.api_core import retry_async as retries
27from google.auth import credentials as ga_credentials # type: ignore
28from google.auth.transport.grpc import SslCredentials # type: ignore
29from google.protobuf.json_format import MessageToJson
30import google.protobuf.message
31
32import grpc # type: ignore
33import proto # type: ignore
34from grpc.experimental import aio # type: ignore
35
36from google.cloud.firestore_v1.types import document
37from google.cloud.firestore_v1.types import document as gf_document
38from google.cloud.firestore_v1.types import firestore
39from google.cloud.location import locations_pb2 # type: ignore
40from google.longrunning import operations_pb2 # type: ignore
41from google.protobuf import empty_pb2 # type: ignore
42from .base import FirestoreTransport, DEFAULT_CLIENT_INFO
43from .grpc import FirestoreGrpcTransport
44
45try:
46 from google.api_core import client_logging # type: ignore
47
48 CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
49except ImportError: # pragma: NO COVER
50 CLIENT_LOGGING_SUPPORTED = False
51
52_LOGGER = std_logging.getLogger(__name__)
53
54
55class _LoggingClientAIOInterceptor(
56 grpc.aio.UnaryUnaryClientInterceptor
57): # pragma: NO COVER
58 async def intercept_unary_unary(self, continuation, client_call_details, request):
59 logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
60 std_logging.DEBUG
61 )
62 if logging_enabled: # pragma: NO COVER
63 request_metadata = client_call_details.metadata
64 if isinstance(request, proto.Message):
65 request_payload = type(request).to_json(request)
66 elif isinstance(request, google.protobuf.message.Message):
67 request_payload = MessageToJson(request)
68 else:
69 request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
70
71 request_metadata = {
72 key: value.decode("utf-8") if isinstance(value, bytes) else value
73 for key, value in request_metadata
74 }
75 grpc_request = {
76 "payload": request_payload,
77 "requestMethod": "grpc",
78 "metadata": dict(request_metadata),
79 }
80 _LOGGER.debug(
81 f"Sending request for {client_call_details.method}",
82 extra={
83 "serviceName": "google.firestore.v1.Firestore",
84 "rpcName": str(client_call_details.method),
85 "request": grpc_request,
86 "metadata": grpc_request["metadata"],
87 },
88 )
89 response = await continuation(client_call_details, request)
90 if logging_enabled: # pragma: NO COVER
91 response_metadata = await response.trailing_metadata()
92 # Convert gRPC metadata `<class 'grpc.aio._metadata.Metadata'>` to list of tuples
93 metadata = (
94 dict([(k, str(v)) for k, v in response_metadata])
95 if response_metadata
96 else None
97 )
98 result = await response
99 if isinstance(result, proto.Message):
100 response_payload = type(result).to_json(result)
101 elif isinstance(result, google.protobuf.message.Message):
102 response_payload = MessageToJson(result)
103 else:
104 response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
105 grpc_response = {
106 "payload": response_payload,
107 "metadata": metadata,
108 "status": "OK",
109 }
110 _LOGGER.debug(
111 f"Received response to rpc {client_call_details.method}.",
112 extra={
113 "serviceName": "google.firestore.v1.Firestore",
114 "rpcName": str(client_call_details.method),
115 "response": grpc_response,
116 "metadata": grpc_response["metadata"],
117 },
118 )
119 return response
120
121
122class FirestoreGrpcAsyncIOTransport(FirestoreTransport):
123 """gRPC AsyncIO backend transport for Firestore.
124
125 The Cloud Firestore service.
126
127 Cloud Firestore is a fast, fully managed, serverless,
128 cloud-native NoSQL document database that simplifies storing,
129 syncing, and querying data for your mobile, web, and IoT apps at
130 global scale. Its client libraries provide live synchronization
131 and offline support, while its security features and
132 integrations with Firebase and Google Cloud Platform accelerate
133 building truly serverless apps.
134
135 This class defines the same methods as the primary client, so the
136 primary client can load the underlying transport implementation
137 and call it.
138
139 It sends protocol buffers over the wire using gRPC (which is built on
140 top of HTTP/2); the ``grpcio`` package must be installed.
141 """
142
143 _grpc_channel: aio.Channel
144 _stubs: Dict[str, Callable] = {}
145
146 @classmethod
147 def create_channel(
148 cls,
149 host: str = "firestore.googleapis.com",
150 credentials: Optional[ga_credentials.Credentials] = None,
151 credentials_file: Optional[str] = None,
152 scopes: Optional[Sequence[str]] = None,
153 quota_project_id: Optional[str] = None,
154 **kwargs,
155 ) -> aio.Channel:
156 """Create and return a gRPC AsyncIO channel object.
157 Args:
158 host (Optional[str]): The host for the channel to use.
159 credentials (Optional[~.Credentials]): The
160 authorization credentials to attach to requests. These
161 credentials identify this application to the service. If
162 none are specified, the client will attempt to ascertain
163 the credentials from the environment.
164 credentials_file (Optional[str]): Deprecated. A file with credentials that can
165 be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be
166 removed in the next major version of this library.
167 scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
168 service. These are only used when credentials are not specified and
169 are passed to :func:`google.auth.default`.
170 quota_project_id (Optional[str]): An optional project to use for billing
171 and quota.
172 kwargs (Optional[dict]): Keyword arguments, which are passed to the
173 channel creation.
174 Returns:
175 aio.Channel: A gRPC AsyncIO channel object.
176 """
177
178 return grpc_helpers_async.create_channel(
179 host,
180 credentials=credentials,
181 credentials_file=credentials_file,
182 quota_project_id=quota_project_id,
183 default_scopes=cls.AUTH_SCOPES,
184 scopes=scopes,
185 default_host=cls.DEFAULT_HOST,
186 **kwargs,
187 )
188
189 def __init__(
190 self,
191 *,
192 host: str = "firestore.googleapis.com",
193 credentials: Optional[ga_credentials.Credentials] = None,
194 credentials_file: Optional[str] = None,
195 scopes: Optional[Sequence[str]] = None,
196 channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
197 api_mtls_endpoint: Optional[str] = None,
198 client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
199 ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
200 client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
201 quota_project_id: Optional[str] = None,
202 client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
203 always_use_jwt_access: Optional[bool] = False,
204 api_audience: Optional[str] = None,
205 ) -> None:
206 """Instantiate the transport.
207
208 Args:
209 host (Optional[str]):
210 The hostname to connect to (default: 'firestore.googleapis.com').
211 credentials (Optional[google.auth.credentials.Credentials]): The
212 authorization credentials to attach to requests. These
213 credentials identify the application to the service; if none
214 are specified, the client will attempt to ascertain the
215 credentials from the environment.
216 This argument is ignored if a ``channel`` instance is provided.
217 credentials_file (Optional[str]): Deprecated. A file with credentials that can
218 be loaded with :func:`google.auth.load_credentials_from_file`.
219 This argument is ignored if a ``channel`` instance is provided.
220 This argument will be removed in the next major version of this library.
221 scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
222 service. These are only used when credentials are not specified and
223 are passed to :func:`google.auth.default`.
224 channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
225 A ``Channel`` instance through which to make calls, or a Callable
226 that constructs and returns one. If set to None, ``self.create_channel``
227 is used to create the channel. If a Callable is given, it will be called
228 with the same arguments as used in ``self.create_channel``.
229 api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
230 If provided, it overrides the ``host`` argument and tries to create
231 a mutual TLS channel with client SSL credentials from
232 ``client_cert_source`` or application default SSL credentials.
233 client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
234 Deprecated. A callback to provide client SSL certificate bytes and
235 private key bytes, both in PEM format. It is ignored if
236 ``api_mtls_endpoint`` is None.
237 ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
238 for the grpc channel. It is ignored if a ``channel`` instance is provided.
239 client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
240 A callback to provide client certificate bytes and private key bytes,
241 both in PEM format. It is used to configure a mutual TLS channel. It is
242 ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
243 quota_project_id (Optional[str]): An optional project to use for billing
244 and quota.
245 client_info (google.api_core.gapic_v1.client_info.ClientInfo):
246 The client info used to send a user-agent string along with
247 API requests. If ``None``, then default info will be used.
248 Generally, you only need to set this if you're developing
249 your own client library.
250 always_use_jwt_access (Optional[bool]): Whether self signed JWT should
251 be used for service account credentials.
252
253 Raises:
254 google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
255 creation failed for any reason.
256 google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
257 and ``credentials_file`` are passed.
258 """
259 self._grpc_channel = None
260 self._ssl_channel_credentials = ssl_channel_credentials
261 self._stubs: Dict[str, Callable] = {}
262
263 if api_mtls_endpoint:
264 warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
265 if client_cert_source:
266 warnings.warn("client_cert_source is deprecated", DeprecationWarning)
267
268 if isinstance(channel, aio.Channel):
269 # Ignore credentials if a channel was passed.
270 credentials = None
271 self._ignore_credentials = True
272 # If a channel was explicitly provided, set it.
273 self._grpc_channel = channel
274 self._ssl_channel_credentials = None
275 else:
276 if api_mtls_endpoint:
277 host = api_mtls_endpoint
278
279 # Create SSL credentials with client_cert_source or application
280 # default SSL credentials.
281 if client_cert_source:
282 cert, key = client_cert_source()
283 self._ssl_channel_credentials = grpc.ssl_channel_credentials(
284 certificate_chain=cert, private_key=key
285 )
286 else:
287 self._ssl_channel_credentials = SslCredentials().ssl_credentials
288
289 else:
290 if client_cert_source_for_mtls and not ssl_channel_credentials:
291 cert, key = client_cert_source_for_mtls()
292 self._ssl_channel_credentials = grpc.ssl_channel_credentials(
293 certificate_chain=cert, private_key=key
294 )
295
296 # The base transport sets the host, credentials and scopes
297 super().__init__(
298 host=host,
299 credentials=credentials,
300 credentials_file=credentials_file,
301 scopes=scopes,
302 quota_project_id=quota_project_id,
303 client_info=client_info,
304 always_use_jwt_access=always_use_jwt_access,
305 api_audience=api_audience,
306 )
307
308 if not self._grpc_channel:
309 # initialize with the provided callable or the default channel
310 channel_init = channel or type(self).create_channel
311 self._grpc_channel = channel_init(
312 self._host,
313 # use the credentials which are saved
314 credentials=self._credentials,
315 # Set ``credentials_file`` to ``None`` here as
316 # the credentials that we saved earlier should be used.
317 credentials_file=None,
318 scopes=self._scopes,
319 ssl_credentials=self._ssl_channel_credentials,
320 quota_project_id=quota_project_id,
321 options=[
322 ("grpc.max_send_message_length", -1),
323 ("grpc.max_receive_message_length", -1),
324 ],
325 )
326
327 self._interceptor = _LoggingClientAIOInterceptor()
328 self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
329 self._logged_channel = self._grpc_channel
330 self._wrap_with_kind = (
331 "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
332 )
333 # Wrap messages. This must be done after self._logged_channel exists
334 self._prep_wrapped_messages(client_info)
335
336 @property
337 def grpc_channel(self) -> aio.Channel:
338 """Create the channel designed to connect to this service.
339
340 This property caches on the instance; repeated calls return
341 the same channel.
342 """
343 # Return the channel from cache.
344 return self._grpc_channel
345
346 @property
347 def get_document(
348 self,
349 ) -> Callable[[firestore.GetDocumentRequest], Awaitable[document.Document]]:
350 r"""Return a callable for the get document method over gRPC.
351
352 Gets a single document.
353
354 Returns:
355 Callable[[~.GetDocumentRequest],
356 Awaitable[~.Document]]:
357 A function that, when called, will call the underlying RPC
358 on the server.
359 """
360 # Generate a "stub function" on-the-fly which will actually make
361 # the request.
362 # gRPC handles serialization and deserialization, so we just need
363 # to pass in the functions for each.
364 if "get_document" not in self._stubs:
365 self._stubs["get_document"] = self._logged_channel.unary_unary(
366 "/google.firestore.v1.Firestore/GetDocument",
367 request_serializer=firestore.GetDocumentRequest.serialize,
368 response_deserializer=document.Document.deserialize,
369 )
370 return self._stubs["get_document"]
371
372 @property
373 def list_documents(
374 self,
375 ) -> Callable[
376 [firestore.ListDocumentsRequest], Awaitable[firestore.ListDocumentsResponse]
377 ]:
378 r"""Return a callable for the list documents method over gRPC.
379
380 Lists documents.
381
382 Returns:
383 Callable[[~.ListDocumentsRequest],
384 Awaitable[~.ListDocumentsResponse]]:
385 A function that, when called, will call the underlying RPC
386 on the server.
387 """
388 # Generate a "stub function" on-the-fly which will actually make
389 # the request.
390 # gRPC handles serialization and deserialization, so we just need
391 # to pass in the functions for each.
392 if "list_documents" not in self._stubs:
393 self._stubs["list_documents"] = self._logged_channel.unary_unary(
394 "/google.firestore.v1.Firestore/ListDocuments",
395 request_serializer=firestore.ListDocumentsRequest.serialize,
396 response_deserializer=firestore.ListDocumentsResponse.deserialize,
397 )
398 return self._stubs["list_documents"]
399
400 @property
401 def update_document(
402 self,
403 ) -> Callable[[firestore.UpdateDocumentRequest], Awaitable[gf_document.Document]]:
404 r"""Return a callable for the update document method over gRPC.
405
406 Updates or inserts a document.
407
408 Returns:
409 Callable[[~.UpdateDocumentRequest],
410 Awaitable[~.Document]]:
411 A function that, when called, will call the underlying RPC
412 on the server.
413 """
414 # Generate a "stub function" on-the-fly which will actually make
415 # the request.
416 # gRPC handles serialization and deserialization, so we just need
417 # to pass in the functions for each.
418 if "update_document" not in self._stubs:
419 self._stubs["update_document"] = self._logged_channel.unary_unary(
420 "/google.firestore.v1.Firestore/UpdateDocument",
421 request_serializer=firestore.UpdateDocumentRequest.serialize,
422 response_deserializer=gf_document.Document.deserialize,
423 )
424 return self._stubs["update_document"]
425
426 @property
427 def delete_document(
428 self,
429 ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty_pb2.Empty]]:
430 r"""Return a callable for the delete document method over gRPC.
431
432 Deletes a document.
433
434 Returns:
435 Callable[[~.DeleteDocumentRequest],
436 Awaitable[~.Empty]]:
437 A function that, when called, will call the underlying RPC
438 on the server.
439 """
440 # Generate a "stub function" on-the-fly which will actually make
441 # the request.
442 # gRPC handles serialization and deserialization, so we just need
443 # to pass in the functions for each.
444 if "delete_document" not in self._stubs:
445 self._stubs["delete_document"] = self._logged_channel.unary_unary(
446 "/google.firestore.v1.Firestore/DeleteDocument",
447 request_serializer=firestore.DeleteDocumentRequest.serialize,
448 response_deserializer=empty_pb2.Empty.FromString,
449 )
450 return self._stubs["delete_document"]
451
452 @property
453 def batch_get_documents(
454 self,
455 ) -> Callable[
456 [firestore.BatchGetDocumentsRequest],
457 Awaitable[firestore.BatchGetDocumentsResponse],
458 ]:
459 r"""Return a callable for the batch get documents method over gRPC.
460
461 Gets multiple documents.
462
463 Documents returned by this method are not guaranteed to
464 be returned in the same order that they were requested.
465
466 Returns:
467 Callable[[~.BatchGetDocumentsRequest],
468 Awaitable[~.BatchGetDocumentsResponse]]:
469 A function that, when called, will call the underlying RPC
470 on the server.
471 """
472 # Generate a "stub function" on-the-fly which will actually make
473 # the request.
474 # gRPC handles serialization and deserialization, so we just need
475 # to pass in the functions for each.
476 if "batch_get_documents" not in self._stubs:
477 self._stubs["batch_get_documents"] = self._logged_channel.unary_stream(
478 "/google.firestore.v1.Firestore/BatchGetDocuments",
479 request_serializer=firestore.BatchGetDocumentsRequest.serialize,
480 response_deserializer=firestore.BatchGetDocumentsResponse.deserialize,
481 )
482 return self._stubs["batch_get_documents"]
483
484 @property
485 def begin_transaction(
486 self,
487 ) -> Callable[
488 [firestore.BeginTransactionRequest],
489 Awaitable[firestore.BeginTransactionResponse],
490 ]:
491 r"""Return a callable for the begin transaction method over gRPC.
492
493 Starts a new transaction.
494
495 Returns:
496 Callable[[~.BeginTransactionRequest],
497 Awaitable[~.BeginTransactionResponse]]:
498 A function that, when called, will call the underlying RPC
499 on the server.
500 """
501 # Generate a "stub function" on-the-fly which will actually make
502 # the request.
503 # gRPC handles serialization and deserialization, so we just need
504 # to pass in the functions for each.
505 if "begin_transaction" not in self._stubs:
506 self._stubs["begin_transaction"] = self._logged_channel.unary_unary(
507 "/google.firestore.v1.Firestore/BeginTransaction",
508 request_serializer=firestore.BeginTransactionRequest.serialize,
509 response_deserializer=firestore.BeginTransactionResponse.deserialize,
510 )
511 return self._stubs["begin_transaction"]
512
513 @property
514 def commit(
515 self,
516 ) -> Callable[[firestore.CommitRequest], Awaitable[firestore.CommitResponse]]:
517 r"""Return a callable for the commit method over gRPC.
518
519 Commits a transaction, while optionally updating
520 documents.
521
522 Returns:
523 Callable[[~.CommitRequest],
524 Awaitable[~.CommitResponse]]:
525 A function that, when called, will call the underlying RPC
526 on the server.
527 """
528 # Generate a "stub function" on-the-fly which will actually make
529 # the request.
530 # gRPC handles serialization and deserialization, so we just need
531 # to pass in the functions for each.
532 if "commit" not in self._stubs:
533 self._stubs["commit"] = self._logged_channel.unary_unary(
534 "/google.firestore.v1.Firestore/Commit",
535 request_serializer=firestore.CommitRequest.serialize,
536 response_deserializer=firestore.CommitResponse.deserialize,
537 )
538 return self._stubs["commit"]
539
540 @property
541 def rollback(
542 self,
543 ) -> Callable[[firestore.RollbackRequest], Awaitable[empty_pb2.Empty]]:
544 r"""Return a callable for the rollback method over gRPC.
545
546 Rolls back a transaction.
547
548 Returns:
549 Callable[[~.RollbackRequest],
550 Awaitable[~.Empty]]:
551 A function that, when called, will call the underlying RPC
552 on the server.
553 """
554 # Generate a "stub function" on-the-fly which will actually make
555 # the request.
556 # gRPC handles serialization and deserialization, so we just need
557 # to pass in the functions for each.
558 if "rollback" not in self._stubs:
559 self._stubs["rollback"] = self._logged_channel.unary_unary(
560 "/google.firestore.v1.Firestore/Rollback",
561 request_serializer=firestore.RollbackRequest.serialize,
562 response_deserializer=empty_pb2.Empty.FromString,
563 )
564 return self._stubs["rollback"]
565
566 @property
567 def run_query(
568 self,
569 ) -> Callable[[firestore.RunQueryRequest], Awaitable[firestore.RunQueryResponse]]:
570 r"""Return a callable for the run query method over gRPC.
571
572 Runs a query.
573
574 Returns:
575 Callable[[~.RunQueryRequest],
576 Awaitable[~.RunQueryResponse]]:
577 A function that, when called, will call the underlying RPC
578 on the server.
579 """
580 # Generate a "stub function" on-the-fly which will actually make
581 # the request.
582 # gRPC handles serialization and deserialization, so we just need
583 # to pass in the functions for each.
584 if "run_query" not in self._stubs:
585 self._stubs["run_query"] = self._logged_channel.unary_stream(
586 "/google.firestore.v1.Firestore/RunQuery",
587 request_serializer=firestore.RunQueryRequest.serialize,
588 response_deserializer=firestore.RunQueryResponse.deserialize,
589 )
590 return self._stubs["run_query"]
591
592 @property
593 def run_aggregation_query(
594 self,
595 ) -> Callable[
596 [firestore.RunAggregationQueryRequest],
597 Awaitable[firestore.RunAggregationQueryResponse],
598 ]:
599 r"""Return a callable for the run aggregation query method over gRPC.
600
601 Runs an aggregation query.
602
603 Rather than producing [Document][google.firestore.v1.Document]
604 results like
605 [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery],
606 this API allows running an aggregation to produce a series of
607 [AggregationResult][google.firestore.v1.AggregationResult]
608 server-side.
609
610 High-Level Example:
611
612 ::
613
614 -- Return the number of documents in table given a filter.
615 SELECT COUNT(*) FROM ( SELECT * FROM k where a = true );
616
617 Returns:
618 Callable[[~.RunAggregationQueryRequest],
619 Awaitable[~.RunAggregationQueryResponse]]:
620 A function that, when called, will call the underlying RPC
621 on the server.
622 """
623 # Generate a "stub function" on-the-fly which will actually make
624 # the request.
625 # gRPC handles serialization and deserialization, so we just need
626 # to pass in the functions for each.
627 if "run_aggregation_query" not in self._stubs:
628 self._stubs["run_aggregation_query"] = self._logged_channel.unary_stream(
629 "/google.firestore.v1.Firestore/RunAggregationQuery",
630 request_serializer=firestore.RunAggregationQueryRequest.serialize,
631 response_deserializer=firestore.RunAggregationQueryResponse.deserialize,
632 )
633 return self._stubs["run_aggregation_query"]
634
635 @property
636 def partition_query(
637 self,
638 ) -> Callable[
639 [firestore.PartitionQueryRequest], Awaitable[firestore.PartitionQueryResponse]
640 ]:
641 r"""Return a callable for the partition query method over gRPC.
642
643 Partitions a query by returning partition cursors
644 that can be used to run the query in parallel. The
645 returned partition cursors are split points that can be
646 used by RunQuery as starting/end points for the query
647 results.
648
649 Returns:
650 Callable[[~.PartitionQueryRequest],
651 Awaitable[~.PartitionQueryResponse]]:
652 A function that, when called, will call the underlying RPC
653 on the server.
654 """
655 # Generate a "stub function" on-the-fly which will actually make
656 # the request.
657 # gRPC handles serialization and deserialization, so we just need
658 # to pass in the functions for each.
659 if "partition_query" not in self._stubs:
660 self._stubs["partition_query"] = self._logged_channel.unary_unary(
661 "/google.firestore.v1.Firestore/PartitionQuery",
662 request_serializer=firestore.PartitionQueryRequest.serialize,
663 response_deserializer=firestore.PartitionQueryResponse.deserialize,
664 )
665 return self._stubs["partition_query"]
666
667 @property
668 def write(
669 self,
670 ) -> Callable[[firestore.WriteRequest], Awaitable[firestore.WriteResponse]]:
671 r"""Return a callable for the write method over gRPC.
672
673 Streams batches of document updates and deletes, in
674 order. This method is only available via gRPC or
675 WebChannel (not REST).
676
677 Returns:
678 Callable[[~.WriteRequest],
679 Awaitable[~.WriteResponse]]:
680 A function that, when called, will call the underlying RPC
681 on the server.
682 """
683 # Generate a "stub function" on-the-fly which will actually make
684 # the request.
685 # gRPC handles serialization and deserialization, so we just need
686 # to pass in the functions for each.
687 if "write" not in self._stubs:
688 self._stubs["write"] = self._logged_channel.stream_stream(
689 "/google.firestore.v1.Firestore/Write",
690 request_serializer=firestore.WriteRequest.serialize,
691 response_deserializer=firestore.WriteResponse.deserialize,
692 )
693 return self._stubs["write"]
694
695 @property
696 def listen(
697 self,
698 ) -> Callable[[firestore.ListenRequest], Awaitable[firestore.ListenResponse]]:
699 r"""Return a callable for the listen method over gRPC.
700
701 Listens to changes. This method is only available via
702 gRPC or WebChannel (not REST).
703
704 Returns:
705 Callable[[~.ListenRequest],
706 Awaitable[~.ListenResponse]]:
707 A function that, when called, will call the underlying RPC
708 on the server.
709 """
710 # Generate a "stub function" on-the-fly which will actually make
711 # the request.
712 # gRPC handles serialization and deserialization, so we just need
713 # to pass in the functions for each.
714 if "listen" not in self._stubs:
715 self._stubs["listen"] = self._logged_channel.stream_stream(
716 "/google.firestore.v1.Firestore/Listen",
717 request_serializer=firestore.ListenRequest.serialize,
718 response_deserializer=firestore.ListenResponse.deserialize,
719 )
720 return self._stubs["listen"]
721
722 @property
723 def list_collection_ids(
724 self,
725 ) -> Callable[
726 [firestore.ListCollectionIdsRequest],
727 Awaitable[firestore.ListCollectionIdsResponse],
728 ]:
729 r"""Return a callable for the list collection ids method over gRPC.
730
731 Lists all the collection IDs underneath a document.
732
733 Returns:
734 Callable[[~.ListCollectionIdsRequest],
735 Awaitable[~.ListCollectionIdsResponse]]:
736 A function that, when called, will call the underlying RPC
737 on the server.
738 """
739 # Generate a "stub function" on-the-fly which will actually make
740 # the request.
741 # gRPC handles serialization and deserialization, so we just need
742 # to pass in the functions for each.
743 if "list_collection_ids" not in self._stubs:
744 self._stubs["list_collection_ids"] = self._logged_channel.unary_unary(
745 "/google.firestore.v1.Firestore/ListCollectionIds",
746 request_serializer=firestore.ListCollectionIdsRequest.serialize,
747 response_deserializer=firestore.ListCollectionIdsResponse.deserialize,
748 )
749 return self._stubs["list_collection_ids"]
750
751 @property
752 def batch_write(
753 self,
754 ) -> Callable[
755 [firestore.BatchWriteRequest], Awaitable[firestore.BatchWriteResponse]
756 ]:
757 r"""Return a callable for the batch write method over gRPC.
758
759 Applies a batch of write operations.
760
761 The BatchWrite method does not apply the write operations
762 atomically and can apply them out of order. Method does not
763 allow more than one write per document. Each write succeeds or
764 fails independently. See the
765 [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for
766 the success status of each write.
767
768 If you require an atomically applied set of writes, use
769 [Commit][google.firestore.v1.Firestore.Commit] instead.
770
771 Returns:
772 Callable[[~.BatchWriteRequest],
773 Awaitable[~.BatchWriteResponse]]:
774 A function that, when called, will call the underlying RPC
775 on the server.
776 """
777 # Generate a "stub function" on-the-fly which will actually make
778 # the request.
779 # gRPC handles serialization and deserialization, so we just need
780 # to pass in the functions for each.
781 if "batch_write" not in self._stubs:
782 self._stubs["batch_write"] = self._logged_channel.unary_unary(
783 "/google.firestore.v1.Firestore/BatchWrite",
784 request_serializer=firestore.BatchWriteRequest.serialize,
785 response_deserializer=firestore.BatchWriteResponse.deserialize,
786 )
787 return self._stubs["batch_write"]
788
789 @property
790 def create_document(
791 self,
792 ) -> Callable[[firestore.CreateDocumentRequest], Awaitable[document.Document]]:
793 r"""Return a callable for the create document method over gRPC.
794
795 Creates a new document.
796
797 Returns:
798 Callable[[~.CreateDocumentRequest],
799 Awaitable[~.Document]]:
800 A function that, when called, will call the underlying RPC
801 on the server.
802 """
803 # Generate a "stub function" on-the-fly which will actually make
804 # the request.
805 # gRPC handles serialization and deserialization, so we just need
806 # to pass in the functions for each.
807 if "create_document" not in self._stubs:
808 self._stubs["create_document"] = self._logged_channel.unary_unary(
809 "/google.firestore.v1.Firestore/CreateDocument",
810 request_serializer=firestore.CreateDocumentRequest.serialize,
811 response_deserializer=document.Document.deserialize,
812 )
813 return self._stubs["create_document"]
814
815 def _prep_wrapped_messages(self, client_info):
816 """Precompute the wrapped methods, overriding the base class method to use async wrappers."""
817 self._wrapped_methods = {
818 self.get_document: self._wrap_method(
819 self.get_document,
820 default_retry=retries.AsyncRetry(
821 initial=0.1,
822 maximum=60.0,
823 multiplier=1.3,
824 predicate=retries.if_exception_type(
825 core_exceptions.DeadlineExceeded,
826 core_exceptions.InternalServerError,
827 core_exceptions.ResourceExhausted,
828 core_exceptions.ServiceUnavailable,
829 ),
830 deadline=60.0,
831 ),
832 default_timeout=60.0,
833 client_info=client_info,
834 ),
835 self.list_documents: self._wrap_method(
836 self.list_documents,
837 default_retry=retries.AsyncRetry(
838 initial=0.1,
839 maximum=60.0,
840 multiplier=1.3,
841 predicate=retries.if_exception_type(
842 core_exceptions.DeadlineExceeded,
843 core_exceptions.InternalServerError,
844 core_exceptions.ResourceExhausted,
845 core_exceptions.ServiceUnavailable,
846 ),
847 deadline=60.0,
848 ),
849 default_timeout=60.0,
850 client_info=client_info,
851 ),
852 self.update_document: self._wrap_method(
853 self.update_document,
854 default_retry=retries.AsyncRetry(
855 initial=0.1,
856 maximum=60.0,
857 multiplier=1.3,
858 predicate=retries.if_exception_type(
859 core_exceptions.ResourceExhausted,
860 core_exceptions.ServiceUnavailable,
861 ),
862 deadline=60.0,
863 ),
864 default_timeout=60.0,
865 client_info=client_info,
866 ),
867 self.delete_document: self._wrap_method(
868 self.delete_document,
869 default_retry=retries.AsyncRetry(
870 initial=0.1,
871 maximum=60.0,
872 multiplier=1.3,
873 predicate=retries.if_exception_type(
874 core_exceptions.DeadlineExceeded,
875 core_exceptions.InternalServerError,
876 core_exceptions.ResourceExhausted,
877 core_exceptions.ServiceUnavailable,
878 ),
879 deadline=60.0,
880 ),
881 default_timeout=60.0,
882 client_info=client_info,
883 ),
884 self.batch_get_documents: self._wrap_method(
885 self.batch_get_documents,
886 default_retry=retries.AsyncRetry(
887 initial=0.1,
888 maximum=60.0,
889 multiplier=1.3,
890 predicate=retries.if_exception_type(
891 core_exceptions.DeadlineExceeded,
892 core_exceptions.InternalServerError,
893 core_exceptions.ResourceExhausted,
894 core_exceptions.ServiceUnavailable,
895 ),
896 deadline=300.0,
897 ),
898 default_timeout=300.0,
899 client_info=client_info,
900 ),
901 self.begin_transaction: self._wrap_method(
902 self.begin_transaction,
903 default_retry=retries.AsyncRetry(
904 initial=0.1,
905 maximum=60.0,
906 multiplier=1.3,
907 predicate=retries.if_exception_type(
908 core_exceptions.DeadlineExceeded,
909 core_exceptions.InternalServerError,
910 core_exceptions.ResourceExhausted,
911 core_exceptions.ServiceUnavailable,
912 ),
913 deadline=60.0,
914 ),
915 default_timeout=60.0,
916 client_info=client_info,
917 ),
918 self.commit: self._wrap_method(
919 self.commit,
920 default_retry=retries.AsyncRetry(
921 initial=0.1,
922 maximum=60.0,
923 multiplier=1.3,
924 predicate=retries.if_exception_type(
925 core_exceptions.ResourceExhausted,
926 core_exceptions.ServiceUnavailable,
927 ),
928 deadline=60.0,
929 ),
930 default_timeout=60.0,
931 client_info=client_info,
932 ),
933 self.rollback: self._wrap_method(
934 self.rollback,
935 default_retry=retries.AsyncRetry(
936 initial=0.1,
937 maximum=60.0,
938 multiplier=1.3,
939 predicate=retries.if_exception_type(
940 core_exceptions.DeadlineExceeded,
941 core_exceptions.InternalServerError,
942 core_exceptions.ResourceExhausted,
943 core_exceptions.ServiceUnavailable,
944 ),
945 deadline=60.0,
946 ),
947 default_timeout=60.0,
948 client_info=client_info,
949 ),
950 self.run_query: self._wrap_method(
951 self.run_query,
952 default_retry=retries.AsyncRetry(
953 initial=0.1,
954 maximum=60.0,
955 multiplier=1.3,
956 predicate=retries.if_exception_type(
957 core_exceptions.DeadlineExceeded,
958 core_exceptions.InternalServerError,
959 core_exceptions.ResourceExhausted,
960 core_exceptions.ServiceUnavailable,
961 ),
962 deadline=300.0,
963 ),
964 default_timeout=300.0,
965 client_info=client_info,
966 ),
967 self.run_aggregation_query: self._wrap_method(
968 self.run_aggregation_query,
969 default_retry=retries.AsyncRetry(
970 initial=0.1,
971 maximum=60.0,
972 multiplier=1.3,
973 predicate=retries.if_exception_type(
974 core_exceptions.DeadlineExceeded,
975 core_exceptions.InternalServerError,
976 core_exceptions.ResourceExhausted,
977 core_exceptions.ServiceUnavailable,
978 ),
979 deadline=300.0,
980 ),
981 default_timeout=300.0,
982 client_info=client_info,
983 ),
984 self.partition_query: self._wrap_method(
985 self.partition_query,
986 default_retry=retries.AsyncRetry(
987 initial=0.1,
988 maximum=60.0,
989 multiplier=1.3,
990 predicate=retries.if_exception_type(
991 core_exceptions.DeadlineExceeded,
992 core_exceptions.InternalServerError,
993 core_exceptions.ResourceExhausted,
994 core_exceptions.ServiceUnavailable,
995 ),
996 deadline=300.0,
997 ),
998 default_timeout=300.0,
999 client_info=client_info,
1000 ),
1001 self.write: self._wrap_method(
1002 self.write,
1003 default_timeout=86400.0,
1004 client_info=client_info,
1005 ),
1006 self.listen: self._wrap_method(
1007 self.listen,
1008 default_retry=retries.AsyncRetry(
1009 initial=0.1,
1010 maximum=60.0,
1011 multiplier=1.3,
1012 predicate=retries.if_exception_type(
1013 core_exceptions.DeadlineExceeded,
1014 core_exceptions.InternalServerError,
1015 core_exceptions.ResourceExhausted,
1016 core_exceptions.ServiceUnavailable,
1017 ),
1018 deadline=86400.0,
1019 ),
1020 default_timeout=86400.0,
1021 client_info=client_info,
1022 ),
1023 self.list_collection_ids: self._wrap_method(
1024 self.list_collection_ids,
1025 default_retry=retries.AsyncRetry(
1026 initial=0.1,
1027 maximum=60.0,
1028 multiplier=1.3,
1029 predicate=retries.if_exception_type(
1030 core_exceptions.DeadlineExceeded,
1031 core_exceptions.InternalServerError,
1032 core_exceptions.ResourceExhausted,
1033 core_exceptions.ServiceUnavailable,
1034 ),
1035 deadline=60.0,
1036 ),
1037 default_timeout=60.0,
1038 client_info=client_info,
1039 ),
1040 self.batch_write: self._wrap_method(
1041 self.batch_write,
1042 default_retry=retries.AsyncRetry(
1043 initial=0.1,
1044 maximum=60.0,
1045 multiplier=1.3,
1046 predicate=retries.if_exception_type(
1047 core_exceptions.Aborted,
1048 core_exceptions.ResourceExhausted,
1049 core_exceptions.ServiceUnavailable,
1050 ),
1051 deadline=60.0,
1052 ),
1053 default_timeout=60.0,
1054 client_info=client_info,
1055 ),
1056 self.create_document: self._wrap_method(
1057 self.create_document,
1058 default_retry=retries.AsyncRetry(
1059 initial=0.1,
1060 maximum=60.0,
1061 multiplier=1.3,
1062 predicate=retries.if_exception_type(
1063 core_exceptions.ResourceExhausted,
1064 core_exceptions.ServiceUnavailable,
1065 ),
1066 deadline=60.0,
1067 ),
1068 default_timeout=60.0,
1069 client_info=client_info,
1070 ),
1071 self.cancel_operation: self._wrap_method(
1072 self.cancel_operation,
1073 default_timeout=None,
1074 client_info=client_info,
1075 ),
1076 self.delete_operation: self._wrap_method(
1077 self.delete_operation,
1078 default_timeout=None,
1079 client_info=client_info,
1080 ),
1081 self.get_operation: self._wrap_method(
1082 self.get_operation,
1083 default_timeout=None,
1084 client_info=client_info,
1085 ),
1086 self.list_operations: self._wrap_method(
1087 self.list_operations,
1088 default_timeout=None,
1089 client_info=client_info,
1090 ),
1091 }
1092
1093 def _wrap_method(self, func, *args, **kwargs):
1094 if self._wrap_with_kind: # pragma: NO COVER
1095 kwargs["kind"] = self.kind
1096 return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
1097
1098 def close(self):
1099 return self._logged_channel.close()
1100
1101 @property
1102 def kind(self) -> str:
1103 return "grpc_asyncio"
1104
1105 @property
1106 def delete_operation(
1107 self,
1108 ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:
1109 r"""Return a callable for the delete_operation method over gRPC."""
1110 # Generate a "stub function" on-the-fly which will actually make
1111 # the request.
1112 # gRPC handles serialization and deserialization, so we just need
1113 # to pass in the functions for each.
1114 if "delete_operation" not in self._stubs:
1115 self._stubs["delete_operation"] = self._logged_channel.unary_unary(
1116 "/google.longrunning.Operations/DeleteOperation",
1117 request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,
1118 response_deserializer=None,
1119 )
1120 return self._stubs["delete_operation"]
1121
1122 @property
1123 def cancel_operation(
1124 self,
1125 ) -> Callable[[operations_pb2.CancelOperationRequest], None]:
1126 r"""Return a callable for the cancel_operation method over gRPC."""
1127 # Generate a "stub function" on-the-fly which will actually make
1128 # the request.
1129 # gRPC handles serialization and deserialization, so we just need
1130 # to pass in the functions for each.
1131 if "cancel_operation" not in self._stubs:
1132 self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
1133 "/google.longrunning.Operations/CancelOperation",
1134 request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
1135 response_deserializer=None,
1136 )
1137 return self._stubs["cancel_operation"]
1138
1139 @property
1140 def get_operation(
1141 self,
1142 ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
1143 r"""Return a callable for the get_operation method over gRPC."""
1144 # Generate a "stub function" on-the-fly which will actually make
1145 # the request.
1146 # gRPC handles serialization and deserialization, so we just need
1147 # to pass in the functions for each.
1148 if "get_operation" not in self._stubs:
1149 self._stubs["get_operation"] = self._logged_channel.unary_unary(
1150 "/google.longrunning.Operations/GetOperation",
1151 request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
1152 response_deserializer=operations_pb2.Operation.FromString,
1153 )
1154 return self._stubs["get_operation"]
1155
1156 @property
1157 def list_operations(
1158 self,
1159 ) -> Callable[
1160 [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
1161 ]:
1162 r"""Return a callable for the list_operations method over gRPC."""
1163 # Generate a "stub function" on-the-fly which will actually make
1164 # the request.
1165 # gRPC handles serialization and deserialization, so we just need
1166 # to pass in the functions for each.
1167 if "list_operations" not in self._stubs:
1168 self._stubs["list_operations"] = self._logged_channel.unary_unary(
1169 "/google.longrunning.Operations/ListOperations",
1170 request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
1171 response_deserializer=operations_pb2.ListOperationsResponse.FromString,
1172 )
1173 return self._stubs["list_operations"]
1174
1175
1176__all__ = ("FirestoreGrpcAsyncIOTransport",)