Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/google/cloud/logging_v2/handlers/handlers.py: 24%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

117 statements  

1# Copyright 2016 Google LLC 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15"""Python :mod:`logging` handlers for Cloud Logging.""" 

16 

17import collections 

18import json 

19import logging 

20 

21from typing import Optional, IO, Type 

22 

23from google.cloud.logging_v2.handlers.transports import ( 

24 BackgroundThreadTransport, 

25 Transport, 

26) 

27from google.cloud.logging_v2.handlers._monitored_resources import ( 

28 detect_resource, 

29 add_resource_labels, 

30) 

31from google.cloud.logging_v2.handlers._helpers import get_request_data 

32from google.cloud.logging_v2.resource import Resource 

33 

34 

35DEFAULT_LOGGER_NAME = "python" 

36 

37"""Defaults for filtering out noisy loggers""" 

38EXCLUDED_LOGGER_DEFAULTS = ( 

39 "google.api_core.bidi", 

40 "werkzeug", 

41) 

42 

43"""Exclude internal logs from propagating through handlers""" 

44_INTERNAL_LOGGERS = ( 

45 "google.cloud", 

46 "google.auth", 

47 "google_auth_httplib2", 

48) 

49 

50"""These environments require us to remove extra handlers on setup""" 

51_CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") 

52 

53 

54class CloudLoggingFilter(logging.Filter): 

55 """Python standard ``logging`` Filter class to add Cloud Logging 

56 information to each LogRecord. 

57 

58 When attached to a LogHandler, each incoming log will be modified 

59 to include new Cloud Logging relevant data. This data can be manually 

60 overwritten using the `extras` argument when writing logs. 

61 """ 

62 

63 def __init__(self, project=None, default_labels=None): 

64 self.project = project 

65 self.default_labels = default_labels if default_labels else {} 

66 

67 @staticmethod 

68 def _infer_source_location(record): 

69 """Helper function to infer source location data from a LogRecord. 

70 Will default to record.source_location if already set 

71 """ 

72 if hasattr(record, "source_location"): 

73 return record.source_location 

74 else: 

75 name_map = [ 

76 ("line", "lineno"), 

77 ("file", "pathname"), 

78 ("function", "funcName"), 

79 ] 

80 output = {} 

81 for gcp_name, std_lib_name in name_map: 

82 value = getattr(record, std_lib_name, None) 

83 if value is not None: 

84 output[gcp_name] = value 

85 return output if output else None 

86 

87 def filter(self, record): 

88 """ 

89 Add new Cloud Logging data to each LogRecord as it comes in 

90 """ 

91 user_labels = getattr(record, "labels", {}) 

92 # infer request data from the environment 

93 ( 

94 inferred_http, 

95 inferred_trace, 

96 inferred_span, 

97 inferred_sampled, 

98 ) = get_request_data() 

99 if inferred_trace is not None and self.project is not None: 

100 # add full path for detected trace 

101 inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" 

102 # set new record values 

103 record._resource = getattr(record, "resource", None) 

104 record._trace = getattr(record, "trace", inferred_trace) or None 

105 record._span_id = getattr(record, "span_id", inferred_span) or None 

106 record._trace_sampled = bool(getattr(record, "trace_sampled", inferred_sampled)) 

107 record._http_request = getattr(record, "http_request", inferred_http) 

108 record._source_location = CloudLoggingFilter._infer_source_location(record) 

109 # add logger name as a label if possible 

110 logger_label = {"python_logger": record.name} if record.name else {} 

111 record._labels = {**logger_label, **self.default_labels, **user_labels} or None 

112 # create string representations for structured logging 

113 record._trace_str = record._trace or "" 

114 record._span_id_str = record._span_id or "" 

115 record._trace_sampled_str = "true" if record._trace_sampled else "false" 

116 record._http_request_str = json.dumps( 

117 record._http_request or {}, ensure_ascii=False 

118 ) 

119 record._source_location_str = json.dumps( 

120 record._source_location or {}, ensure_ascii=False 

121 ) 

122 record._labels_str = json.dumps(record._labels or {}, ensure_ascii=False) 

123 return True 

124 

125 

126class CloudLoggingHandler(logging.StreamHandler): 

127 """Handler that directly makes Cloud Logging API calls. 

128 

129 This is a Python standard ``logging`` handler using that can be used to 

130 route Python standard logging messages directly to the Stackdriver 

131 Logging API. 

132 

133 This handler is used when not in GAE or GKE environment. 

134 

135 This handler supports both an asynchronous and synchronous transport. 

136 

137 Example: 

138 

139 .. code-block:: python 

140 

141 import logging 

142 import google.cloud.logging 

143 from google.cloud.logging_v2.handlers import CloudLoggingHandler 

144 

145 client = google.cloud.logging.Client() 

146 handler = CloudLoggingHandler(client) 

147 

148 cloud_logger = logging.getLogger('cloudLogger') 

149 cloud_logger.setLevel(logging.INFO) 

150 cloud_logger.addHandler(handler) 

151 

152 cloud_logger.error('bad news') # API call 

153 """ 

154 

155 def __init__( 

156 self, 

157 client, 

158 *, 

159 name: str = DEFAULT_LOGGER_NAME, 

160 transport: Type[Transport] = BackgroundThreadTransport, 

161 resource: Resource = None, 

162 labels: Optional[dict] = None, 

163 stream: Optional[IO] = None, 

164 **kwargs, 

165 ): 

166 """ 

167 Args: 

168 client (~logging_v2.client.Client): 

169 The authenticated Google Cloud Logging client for this 

170 handler to use. 

171 name (str): the name of the custom log in Cloud Logging. 

172 Defaults to 'python'. The name of the Python logger will be represented 

173 in the ``python_logger`` field. 

174 transport (~logging_v2.transports.Transport): 

175 Class for creating new transport objects. It should 

176 extend from the base :class:`.Transport` type and 

177 implement :meth`.Transport.send`. Defaults to 

178 :class:`.BackgroundThreadTransport`. The other 

179 option is :class:`.SyncTransport`. 

180 resource (~logging_v2.resource.Resource): 

181 Resource for this Handler. If not given, will be inferred from the environment. 

182 labels (Optional[dict]): Additional labels to attach to logs. 

183 stream (Optional[IO]): Stream to be used by the handler. 

184 """ 

185 super(CloudLoggingHandler, self).__init__(stream) 

186 if not resource: 

187 # infer the correct monitored resource from the local environment 

188 resource = detect_resource(client.project) 

189 self.name = name 

190 self.client = client 

191 client._handlers.add(self) 

192 self.transport = transport(client, name, resource=resource) 

193 self._transport_open = True 

194 self._transport_cls = transport 

195 self.project_id = client.project 

196 self.resource = resource 

197 self.labels = labels 

198 # add extra keys to log record 

199 log_filter = CloudLoggingFilter(project=self.project_id, default_labels=labels) 

200 self.addFilter(log_filter) 

201 

202 def emit(self, record): 

203 """Actually log the specified logging record. 

204 

205 Overrides the default emit behavior of ``StreamHandler``. 

206 

207 See https://docs.python.org/2/library/logging.html#handler-objects 

208 

209 Args: 

210 record (logging.LogRecord): The record to be logged. 

211 """ 

212 resource = record._resource or self.resource 

213 labels = record._labels 

214 message = _format_and_parse_message(record, self) 

215 

216 labels = {**add_resource_labels(resource, record), **(labels or {})} or None 

217 

218 # send off request 

219 if not self._transport_open: 

220 self.transport = self._transport_cls( 

221 self.client, self.name, resource=self.resource 

222 ) 

223 self._transport_open = True 

224 

225 self.transport.send( 

226 record, 

227 message, 

228 resource=resource, 

229 labels=labels, 

230 trace=record._trace, 

231 span_id=record._span_id, 

232 trace_sampled=record._trace_sampled, 

233 http_request=record._http_request, 

234 source_location=record._source_location, 

235 ) 

236 

237 def flush(self): 

238 """Forces the Transport object to submit any pending log records. 

239 

240 For SyncTransport, this is a no-op. 

241 """ 

242 super(CloudLoggingHandler, self).flush() 

243 if self._transport_open: 

244 self.transport.flush() 

245 

246 def close(self): 

247 """Closes the log handler and cleans up all Transport objects used.""" 

248 if self._transport_open: 

249 self.transport.close() 

250 self.transport = None 

251 self._transport_open = False 

252 

253 

254def _format_and_parse_message(record, formatter_handler): 

255 """ 

256 Helper function to apply formatting to a LogRecord message, 

257 and attempt to parse encoded JSON into a dictionary object. 

258 

259 Resulting output will be of type (str | dict | None) 

260 

261 Args: 

262 record (logging.LogRecord): The record object representing the log 

263 formatter_handler (logging.Handler): The handler used to format the log 

264 """ 

265 passed_json_fields = getattr(record, "json_fields", {}) 

266 # if message is a dictionary, use dictionary directly 

267 if isinstance(record.msg, collections.abc.Mapping): 

268 payload = record.msg 

269 # attach any extra json fields if present 

270 if passed_json_fields and isinstance( 

271 passed_json_fields, collections.abc.Mapping 

272 ): 

273 payload = {**payload, **passed_json_fields} 

274 return payload 

275 # format message string based on superclass 

276 message = formatter_handler.format(record) 

277 try: 

278 # attempt to parse encoded json into dictionary 

279 if message[0] == "{": 

280 json_message = json.loads(message) 

281 if isinstance(json_message, collections.abc.Mapping): 

282 message = json_message 

283 except (json.decoder.JSONDecodeError, IndexError): 

284 # log string is not valid json 

285 pass 

286 # if json_fields was set, create a dictionary using that 

287 if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): 

288 passed_json_fields = passed_json_fields.copy() 

289 if message != "None": 

290 passed_json_fields["message"] = message 

291 return passed_json_fields 

292 # if formatted message contains no content, return None 

293 return message if message != "None" else None 

294 

295 

296def setup_logging( 

297 handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO 

298): 

299 """Attach a logging handler to the Python root logger 

300 

301 Excludes loggers that this library itself uses to avoid 

302 infinite recursion. 

303 

304 Example: 

305 

306 .. code-block:: python 

307 

308 import logging 

309 import google.cloud.logging 

310 from google.cloud.logging_v2.handlers import CloudLoggingHandler 

311 

312 client = google.cloud.logging.Client() 

313 handler = CloudLoggingHandler(client) 

314 google.cloud.logging.handlers.setup_logging(handler) 

315 logging.getLogger().setLevel(logging.DEBUG) 

316 

317 logging.error('bad news') # API call 

318 

319 Args: 

320 handler (logging.handler): the handler to attach to the global handler 

321 excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler 

322 to. This will always include the loggers in the 

323 path of the logging client itself. 

324 log_level (Optional[int]): The logging level threshold of the attached logger, 

325 as set by the :meth:`logging.Logger.setLevel` method. Defaults to 

326 :const:`logging.INFO`. 

327 """ 

328 all_excluded_loggers = set(excluded_loggers + _INTERNAL_LOGGERS) 

329 logger = logging.getLogger() 

330 

331 # remove built-in handlers on App Engine or Cloud Functions environments 

332 if detect_resource().type in _CLEAR_HANDLER_RESOURCE_TYPES: 

333 logger.handlers.clear() 

334 

335 logger.setLevel(log_level) 

336 logger.addHandler(handler) 

337 for logger_name in all_excluded_loggers: 

338 # prevent excluded loggers from propagating logs to handler 

339 logger = logging.getLogger(logger_name) 

340 logger.propagate = False