Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/logging_v2/_gapic.py: 30%
151 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:45 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:45 +0000
1# Copyright 2016 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15"""Wrapper for adapting the autogenerated gapic client to the hand-written
16client."""
18from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client
19from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client
20from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client
21from google.cloud.logging_v2.types import CreateSinkRequest
22from google.cloud.logging_v2.types import UpdateSinkRequest
23from google.cloud.logging_v2.types import ListSinksRequest
24from google.cloud.logging_v2.types import ListLogMetricsRequest
25from google.cloud.logging_v2.types import ListLogEntriesRequest
26from google.cloud.logging_v2.types import WriteLogEntriesRequest
27from google.cloud.logging_v2.types import LogSink
28from google.cloud.logging_v2.types import LogMetric
29from google.cloud.logging_v2.types import LogEntry as LogEntryPB
31from google.protobuf.json_format import MessageToDict
32from google.protobuf.json_format import ParseDict
34from google.cloud.logging_v2._helpers import entry_from_resource
35from google.cloud.logging_v2.sink import Sink
36from google.cloud.logging_v2.metric import Metric
38from google.api_core import client_info
39from google.api_core import gapic_v1
42class _LoggingAPI(object):
43 """Helper mapping logging-related APIs."""
45 def __init__(self, gapic_api, client):
46 self._gapic_api = gapic_api
47 self._client = client
49 def list_entries(
50 self,
51 resource_names,
52 *,
53 filter_=None,
54 order_by=None,
55 max_results=None,
56 page_size=None,
57 page_token=None,
58 ):
59 """Return a generator of log entry resources.
61 Args:
62 resource_names (Sequence[str]): Names of one or more parent resources
63 from which to retrieve log entries:
65 ::
67 "projects/[PROJECT_ID]"
68 "organizations/[ORGANIZATION_ID]"
69 "billingAccounts/[BILLING_ACCOUNT_ID]"
70 "folders/[FOLDER_ID]"
72 filter_ (str): a filter expression. See
73 https://cloud.google.com/logging/docs/view/advanced_filters
74 order_by (str) One of :data:`~logging_v2.ASCENDING`
75 or :data:`~logging_v2.DESCENDING`.
76 max_results (Optional[int]):
77 Optional. The maximum number of entries to return.
78 Non-positive values are treated as 0. If None, uses API defaults.
79 page_size (int): number of entries to fetch in each API call. Although
80 requests are paged internally, logs are returned by the generator
81 one at a time. If not passed, defaults to a value set by the API.
82 page_token (str): opaque marker for the starting "page" of entries. If not
83 passed, the API will return the first page of entries.
84 Returns:
85 Generator[~logging_v2.LogEntry]
86 """
87 # full resource names are expected by the API
88 resource_names = resource_names
89 request = ListLogEntriesRequest(
90 resource_names=resource_names,
91 filter=filter_,
92 order_by=order_by,
93 page_size=page_size,
94 page_token=page_token,
95 )
97 response = self._gapic_api.list_log_entries(request=request)
98 log_iter = iter(response)
100 # We attach a mutable loggers dictionary so that as Logger
101 # objects are created by entry_from_resource, they can be
102 # re-used by other log entries from the same logger.
103 loggers = {}
105 if max_results is not None and max_results < 0:
106 raise ValueError("max_results must be positive")
108 # create generator
109 def log_entries_pager(log_iter):
110 i = 0
111 for entry in log_iter:
112 if max_results is not None and i >= max_results:
113 break
114 log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry))
115 yield entry_from_resource(log_entry_dict, self._client, loggers=loggers)
116 i += 1
118 return log_entries_pager(log_iter)
120 def write_entries(
121 self,
122 entries,
123 *,
124 logger_name=None,
125 resource=None,
126 labels=None,
127 partial_success=True,
128 dry_run=False,
129 ):
130 """Log an entry resource via a POST request
132 Args:
133 entries (Sequence[Mapping[str, ...]]): sequence of mappings representing
134 the log entry resources to log.
135 logger_name (Optional[str]): name of default logger to which to log the entries;
136 individual entries may override.
137 resource(Optional[Mapping[str, ...]]): default resource to associate with entries;
138 individual entries may override.
139 labels (Optional[Mapping[str, ...]]): default labels to associate with entries;
140 individual entries may override.
141 partial_success (Optional[bool]): Whether valid entries should be written even if
142 some other entries fail due to INVALID_ARGUMENT or
143 PERMISSION_DENIED errors. If any entry is not written, then
144 the response status is the error associated with one of the
145 failed entries and the response includes error details keyed
146 by the entries' zero-based index in the ``entries.write``
147 method.
148 dry_run (Optional[bool]):
149 If true, the request should expect normal response,
150 but the entries won't be persisted nor exported.
151 Useful for checking whether the logging API endpoints are working
152 properly before sending valuable data.
153 """
154 log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries]
156 request = WriteLogEntriesRequest(
157 log_name=logger_name,
158 resource=resource,
159 labels=labels,
160 entries=log_entry_pbs,
161 partial_success=partial_success,
162 )
163 self._gapic_api.write_log_entries(request=request)
165 def logger_delete(self, logger_name):
166 """Delete all entries in a logger.
168 Args:
169 logger_name (str): The resource name of the log to delete:
171 ::
173 "projects/[PROJECT_ID]/logs/[LOG_ID]"
174 "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
175 "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
176 "folders/[FOLDER_ID]/logs/[LOG_ID]"
178 ``[LOG_ID]`` must be URL-encoded. For example,
179 ``"projects/my-project-id/logs/syslog"``,
180 ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``.
181 """
182 self._gapic_api.delete_log(log_name=logger_name)
185class _SinksAPI(object):
186 """Helper mapping sink-related APIs."""
188 def __init__(self, gapic_api, client):
189 self._gapic_api = gapic_api
190 self._client = client
192 def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None):
193 """List sinks for the parent resource.
195 Args:
196 parent (str): The parent resource whose sinks are to be listed:
198 ::
200 "projects/[PROJECT_ID]"
201 "organizations/[ORGANIZATION_ID]"
202 "billingAccounts/[BILLING_ACCOUNT_ID]"
203 "folders/[FOLDER_ID]".
204 max_results (Optional[int]):
205 Optional. The maximum number of entries to return.
206 Non-positive values are treated as 0. If None, uses API defaults.
207 page_size (int): number of entries to fetch in each API call. Although
208 requests are paged internally, logs are returned by the generator
209 one at a time. If not passed, defaults to a value set by the API.
210 page_token (str): opaque marker for the starting "page" of entries. If not
211 passed, the API will return the first page of entries.
213 Returns:
214 Generator[~logging_v2.Sink]
215 """
216 request = ListSinksRequest(
217 parent=parent, page_size=page_size, page_token=page_token
218 )
219 response = self._gapic_api.list_sinks(request)
220 sink_iter = iter(response)
222 if max_results is not None and max_results < 0:
223 raise ValueError("max_results must be positive")
225 def sinks_pager(sink_iter):
226 i = 0
227 for entry in sink_iter:
228 if max_results is not None and i >= max_results:
229 break
230 # Convert the GAPIC sink type into the handwritten `Sink` type
231 yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client)
232 i += 1
234 return sinks_pager(sink_iter)
236 def sink_create(
237 self, parent, sink_name, filter_, destination, *, unique_writer_identity=False
238 ):
239 """Create a sink resource.
241 See
242 https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create
244 Args:
245 parent(str): The resource in which to create the sink,
246 including the parent resource and the sink identifier:
248 ::
250 "projects/[PROJECT_ID]"
251 "organizations/[ORGANIZATION_ID]"
252 "billingAccounts/[BILLING_ACCOUNT_ID]"
253 "folders/[FOLDER_ID]".
254 sink_name (str): The name of the sink.
255 filter_ (str): The advanced logs filter expression defining the
256 entries exported by the sink.
257 destination (str): Destination URI for the entries exported by
258 the sink.
259 unique_writer_identity (Optional[bool]): determines the kind of
260 IAM identity returned as writer_identity in the new sink.
262 Returns:
263 dict: The sink resource returned from the API (converted from a
264 protobuf to a dictionary).
265 """
266 sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination)
267 request = CreateSinkRequest(
268 parent=parent, sink=sink_pb, unique_writer_identity=unique_writer_identity
269 )
270 created_pb = self._gapic_api.create_sink(request=request)
271 return MessageToDict(
272 LogSink.pb(created_pb),
273 preserving_proto_field_name=False,
274 including_default_value_fields=False,
275 )
277 def sink_get(self, sink_name):
278 """Retrieve a sink resource.
280 Args:
281 sink_name (str): The resource name of the sink,
282 including the parent resource and the sink identifier:
284 ::
286 "projects/[PROJECT_ID]/sinks/[SINK_ID]"
287 "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
288 "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
289 "folders/[FOLDER_ID]/sinks/[SINK_ID]"
291 Returns:
292 dict: The sink object returned from the API (converted from a
293 protobuf to a dictionary).
294 """
295 sink_pb = self._gapic_api.get_sink(sink_name=sink_name)
296 # NOTE: LogSink message type does not have an ``Any`` field
297 # so `MessageToDict`` can safely be used.
298 return MessageToDict(
299 LogSink.pb(sink_pb),
300 preserving_proto_field_name=False,
301 including_default_value_fields=False,
302 )
304 def sink_update(
305 self,
306 sink_name,
307 filter_,
308 destination,
309 *,
310 unique_writer_identity=False,
311 ):
312 """Update a sink resource.
314 Args:
315 sink_name (str): Required. The resource name of the sink,
316 including the parent resource and the sink identifier:
318 ::
320 "projects/[PROJECT_ID]/sinks/[SINK_ID]"
321 "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
322 "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
323 "folders/[FOLDER_ID]/sinks/[SINK_ID]"
324 filter_ (str): The advanced logs filter expression defining the
325 entries exported by the sink.
326 destination (str): destination URI for the entries exported by
327 the sink.
328 unique_writer_identity (Optional[bool]): determines the kind of
329 IAM identity returned as writer_identity in the new sink.
332 Returns:
333 dict: The sink resource returned from the API (converted from a
334 protobuf to a dictionary).
335 """
336 name = sink_name.split("/")[-1] # parse name out of full resoure name
337 sink_pb = LogSink(
338 name=name,
339 filter=filter_,
340 destination=destination,
341 )
343 request = UpdateSinkRequest(
344 sink_name=sink_name,
345 sink=sink_pb,
346 unique_writer_identity=unique_writer_identity,
347 )
348 sink_pb = self._gapic_api.update_sink(request=request)
349 # NOTE: LogSink message type does not have an ``Any`` field
350 # so `MessageToDict`` can safely be used.
351 return MessageToDict(
352 LogSink.pb(sink_pb),
353 preserving_proto_field_name=False,
354 including_default_value_fields=False,
355 )
357 def sink_delete(self, sink_name):
358 """Delete a sink resource.
360 Args:
361 sink_name (str): Required. The full resource name of the sink to delete,
362 including the parent resource and the sink identifier:
364 ::
366 "projects/[PROJECT_ID]/sinks/[SINK_ID]"
367 "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
368 "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
369 "folders/[FOLDER_ID]/sinks/[SINK_ID]"
371 Example: ``"projects/my-project-id/sinks/my-sink-id"``.
372 """
373 self._gapic_api.delete_sink(sink_name=sink_name)
376class _MetricsAPI(object):
377 """Helper mapping sink-related APIs."""
379 def __init__(self, gapic_api, client):
380 self._gapic_api = gapic_api
381 self._client = client
383 def list_metrics(
384 self, project, *, max_results=None, page_size=None, page_token=None
385 ):
386 """List metrics for the project associated with this client.
388 Args:
389 project (str): ID of the project whose metrics are to be listed.
390 max_results (Optional[int]):
391 Optional. The maximum number of entries to return.
392 Non-positive values are treated as 0. If None, uses API defaults.
393 page_size (int): number of entries to fetch in each API call. Although
394 requests are paged internally, logs are returned by the generator
395 one at a time. If not passed, defaults to a value set by the API.
396 page_token (str): opaque marker for the starting "page" of entries. If not
397 passed, the API will return the first page of entries.
399 Returns:
400 Generator[logging_v2.Metric]
401 """
402 path = f"projects/{project}"
403 request = ListLogMetricsRequest(
404 parent=path,
405 page_size=page_size,
406 page_token=page_token,
407 )
408 response = self._gapic_api.list_log_metrics(request=request)
409 metric_iter = iter(response)
411 if max_results is not None and max_results < 0:
412 raise ValueError("max_results must be positive")
414 def metrics_pager(metric_iter):
415 i = 0
416 for entry in metric_iter:
417 if max_results is not None and i >= max_results:
418 break
419 # Convert GAPIC metrics type into handwritten `Metric` type
420 yield Metric.from_api_repr(
421 LogMetric.to_dict(entry), client=self._client
422 )
423 i += 1
425 return metrics_pager(metric_iter)
427 def metric_create(self, project, metric_name, filter_, description):
428 """Create a metric resource.
430 See
431 https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create
433 Args:
434 project (str): ID of the project in which to create the metric.
435 metric_name (str): The name of the metric
436 filter_ (str): The advanced logs filter expression defining the
437 entries exported by the metric.
438 description (str): description of the metric.
439 """
440 parent = f"projects/{project}"
441 metric_pb = LogMetric(name=metric_name, filter=filter_, description=description)
442 self._gapic_api.create_log_metric(parent=parent, metric=metric_pb)
444 def metric_get(self, project, metric_name):
445 """Retrieve a metric resource.
447 Args:
448 project (str): ID of the project containing the metric.
449 metric_name (str): The name of the metric
451 Returns:
452 dict: The metric object returned from the API (converted from a
453 protobuf to a dictionary).
454 """
455 path = f"projects/{project}/metrics/{metric_name}"
456 metric_pb = self._gapic_api.get_log_metric(metric_name=path)
457 # NOTE: LogMetric message type does not have an ``Any`` field
458 # so `MessageToDict`` can safely be used.
459 return MessageToDict(
460 LogMetric.pb(metric_pb),
461 preserving_proto_field_name=False,
462 including_default_value_fields=False,
463 )
465 def metric_update(
466 self,
467 project,
468 metric_name,
469 filter_,
470 description,
471 ):
472 """Update a metric resource.
474 Args:
475 project (str): ID of the project containing the metric.
476 metric_name (str): the name of the metric
477 filter_ (str): the advanced logs filter expression defining the
478 entries exported by the metric.
479 description (str): description of the metric.
481 Returns:
482 The metric object returned from the API (converted from a
483 protobuf to a dictionary).
484 """
485 path = f"projects/{project}/metrics/{metric_name}"
486 metric_pb = LogMetric(
487 name=path,
488 filter=filter_,
489 description=description,
490 )
491 metric_pb = self._gapic_api.update_log_metric(
492 metric_name=path, metric=metric_pb
493 )
494 # NOTE: LogMetric message type does not have an ``Any`` field
495 # so `MessageToDict`` can safely be used.
496 return MessageToDict(
497 LogMetric.pb(metric_pb),
498 preserving_proto_field_name=False,
499 including_default_value_fields=False,
500 )
502 def metric_delete(self, project, metric_name):
503 """Delete a metric resource.
505 Args:
506 project (str): ID of the project containing the metric.
507 metric_name (str): The name of the metric
508 """
509 path = f"projects/{project}/metrics/{metric_name}"
510 self._gapic_api.delete_log_metric(metric_name=path)
513def _parse_log_entry(entry_pb):
514 """Special helper to parse ``LogEntry`` protobuf into a dictionary.
516 The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This
517 can be problematic if the type URL in the payload isn't in the
518 ``google.protobuf`` registry. To help with parsing unregistered types,
519 this function will remove ``proto_payload`` before parsing.
521 Args:
522 entry_pb (LogEntry): Log entry protobuf.
524 Returns:
525 dict: The parsed log entry. The ``protoPayload`` key may contain
526 the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if
527 it could not be parsed.
528 """
529 try:
530 return MessageToDict(
531 entry_pb,
532 preserving_proto_field_name=False,
533 including_default_value_fields=False,
534 )
535 except TypeError:
536 if entry_pb.HasField("proto_payload"):
537 proto_payload = entry_pb.proto_payload
538 entry_pb.ClearField("proto_payload")
539 entry_mapping = MessageToDict(
540 entry_pb,
541 preserving_proto_field_name=False,
542 including_default_value_fields=False,
543 )
544 entry_mapping["protoPayload"] = proto_payload
545 return entry_mapping
546 else:
547 raise
550def _log_entry_mapping_to_pb(mapping):
551 """Helper for :meth:`write_entries`, et aliae
553 Performs "impedance matching" between the protobuf attrs and
554 the keys expected in the JSON API.
555 """
556 entry_pb = LogEntryPB.pb(LogEntryPB())
557 # NOTE: We assume ``mapping`` was created in ``Batch.commit``
558 # or ``Logger._make_entry_resource``. In either case, if
559 # the ``protoPayload`` key is present, we assume that the
560 # type URL is registered with ``google.protobuf`` and will
561 # not cause any issues in the JSON->protobuf conversion
562 # of the corresponding ``proto_payload`` in the log entry
563 # (it is an ``Any`` field).
564 ParseDict(mapping, entry_pb)
565 return LogEntryPB(entry_pb)
568def _client_info_to_gapic(input_info):
569 """
570 Helper function to convert api_core.client_info to
571 api_core.gapic_v1.client_info subclass
572 """
573 return gapic_v1.client_info.ClientInfo(
574 python_version=input_info.python_version,
575 grpc_version=input_info.grpc_version,
576 api_core_version=input_info.api_core_version,
577 gapic_version=input_info.gapic_version,
578 client_library_version=input_info.client_library_version,
579 user_agent=input_info.user_agent,
580 rest_version=input_info.rest_version,
581 )
584def make_logging_api(client):
585 """Create an instance of the Logging API adapter.
587 Args:
588 client (~logging_v2.client.Client): The client
589 that holds configuration details.
591 Returns:
592 _LoggingAPI: A metrics API instance with the proper credentials.
593 """
594 info = client._client_info
595 if isinstance(info, client_info.ClientInfo):
596 # convert into gapic-compatible subclass
597 info = _client_info_to_gapic(info)
599 generated = LoggingServiceV2Client(
600 credentials=client._credentials,
601 client_info=info,
602 client_options=client._client_options,
603 )
604 return _LoggingAPI(generated, client)
607def make_metrics_api(client):
608 """Create an instance of the Metrics API adapter.
610 Args:
611 client (~logging_v2.client.Client): The client
612 that holds configuration details.
614 Returns:
615 _MetricsAPI: A metrics API instance with the proper credentials.
616 """
617 info = client._client_info
618 if isinstance(info, client_info.ClientInfo):
619 # convert into gapic-compatible subclass
620 info = _client_info_to_gapic(info)
622 generated = MetricsServiceV2Client(
623 credentials=client._credentials,
624 client_info=info,
625 client_options=client._client_options,
626 )
627 return _MetricsAPI(generated, client)
630def make_sinks_api(client):
631 """Create an instance of the Sinks API adapter.
633 Args:
634 client (~logging_v2.client.Client): The client
635 that holds configuration details.
637 Returns:
638 _SinksAPI: A metrics API instance with the proper credentials.
639 """
640 info = client._client_info
641 if isinstance(info, client_info.ClientInfo):
642 # convert into gapic-compatible subclass
643 info = _client_info_to_gapic(info)
645 generated = ConfigServiceV2Client(
646 credentials=client._credentials,
647 client_info=info,
648 client_options=client._client_options,
649 )
650 return _SinksAPI(generated, client)