Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/logging_v2/handlers/handlers.py: 22%

98 statements  

« prev     ^ index     » next       coverage.py v7.2.2, created at 2023-03-26 07:30 +0000

1# Copyright 2016 Google LLC 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15"""Python :mod:`logging` handlers for Cloud Logging.""" 

16 

17import collections 

18import json 

19import logging 

20 

21from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport 

22from google.cloud.logging_v2.handlers._monitored_resources import detect_resource 

23from google.cloud.logging_v2.handlers._helpers import get_request_data 

24 

25DEFAULT_LOGGER_NAME = "python" 

26 

27"""Exclude internal logs from propagating through handlers""" 

28EXCLUDED_LOGGER_DEFAULTS = ( 

29 "google.cloud", 

30 "google.auth", 

31 "google_auth_httplib2", 

32 "google.api_core.bidi", 

33 "werkzeug", 

34) 

35 

36"""These environments require us to remove extra handlers on setup""" 

37_CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") 

38 

39"""Extra trace label to be added on App Engine environments""" 

40_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" 

41 

42"""Resource name for App Engine environments""" 

43_GAE_RESOURCE_TYPE = "gae_app" 

44 

45 

46class CloudLoggingFilter(logging.Filter): 

47 """Python standard ``logging`` Filter class to add Cloud Logging 

48 information to each LogRecord. 

49 

50 When attached to a LogHandler, each incoming log will be modified 

51 to include new Cloud Logging relevant data. This data can be manually 

52 overwritten using the `extras` argument when writing logs. 

53 """ 

54 

55 def __init__(self, project=None, default_labels=None): 

56 self.project = project 

57 self.default_labels = default_labels if default_labels else {} 

58 

59 @staticmethod 

60 def _infer_source_location(record): 

61 """Helper function to infer source location data from a LogRecord. 

62 Will default to record.source_location if already set 

63 """ 

64 if hasattr(record, "source_location"): 

65 return record.source_location 

66 else: 

67 name_map = [ 

68 ("line", "lineno"), 

69 ("file", "pathname"), 

70 ("function", "funcName"), 

71 ] 

72 output = {} 

73 for (gcp_name, std_lib_name) in name_map: 

74 value = getattr(record, std_lib_name, None) 

75 if value is not None: 

76 output[gcp_name] = value 

77 return output if output else None 

78 

79 def filter(self, record): 

80 """ 

81 Add new Cloud Logging data to each LogRecord as it comes in 

82 """ 

83 user_labels = getattr(record, "labels", {}) 

84 # infer request data from the environment 

85 ( 

86 inferred_http, 

87 inferred_trace, 

88 inferred_span, 

89 inferred_sampled, 

90 ) = get_request_data() 

91 if inferred_trace is not None and self.project is not None: 

92 # add full path for detected trace 

93 inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" 

94 # set new record values 

95 record._resource = getattr(record, "resource", None) 

96 record._trace = getattr(record, "trace", inferred_trace) or None 

97 record._span_id = getattr(record, "span_id", inferred_span) or None 

98 record._trace_sampled = bool(getattr(record, "trace_sampled", inferred_sampled)) 

99 record._http_request = getattr(record, "http_request", inferred_http) 

100 record._source_location = CloudLoggingFilter._infer_source_location(record) 

101 # add logger name as a label if possible 

102 logger_label = {"python_logger": record.name} if record.name else {} 

103 record._labels = {**logger_label, **self.default_labels, **user_labels} or None 

104 # create string representations for structured logging 

105 record._trace_str = record._trace or "" 

106 record._span_id_str = record._span_id or "" 

107 record._trace_sampled_str = "true" if record._trace_sampled else "false" 

108 record._http_request_str = json.dumps( 

109 record._http_request or {}, ensure_ascii=False 

110 ) 

111 record._source_location_str = json.dumps( 

112 record._source_location or {}, ensure_ascii=False 

113 ) 

114 record._labels_str = json.dumps(record._labels or {}, ensure_ascii=False) 

115 return True 

116 

117 

118class CloudLoggingHandler(logging.StreamHandler): 

119 """Handler that directly makes Cloud Logging API calls. 

120 

121 This is a Python standard ``logging`` handler using that can be used to 

122 route Python standard logging messages directly to the Stackdriver 

123 Logging API. 

124 

125 This handler is used when not in GAE or GKE environment. 

126 

127 This handler supports both an asynchronous and synchronous transport. 

128 

129 Example: 

130 

131 .. code-block:: python 

132 

133 import logging 

134 import google.cloud.logging 

135 from google.cloud.logging_v2.handlers import CloudLoggingHandler 

136 

137 client = google.cloud.logging.Client() 

138 handler = CloudLoggingHandler(client) 

139 

140 cloud_logger = logging.getLogger('cloudLogger') 

141 cloud_logger.setLevel(logging.INFO) 

142 cloud_logger.addHandler(handler) 

143 

144 cloud_logger.error('bad news') # API call 

145 """ 

146 

147 def __init__( 

148 self, 

149 client, 

150 *, 

151 name=DEFAULT_LOGGER_NAME, 

152 transport=BackgroundThreadTransport, 

153 resource=None, 

154 labels=None, 

155 stream=None, 

156 ): 

157 """ 

158 Args: 

159 client (~logging_v2.client.Client): 

160 The authenticated Google Cloud Logging client for this 

161 handler to use. 

162 name (str): the name of the custom log in Cloud Logging. 

163 Defaults to 'python'. The name of the Python logger will be represented 

164 in the ``python_logger`` field. 

165 transport (~logging_v2.transports.Transport): 

166 Class for creating new transport objects. It should 

167 extend from the base :class:`.Transport` type and 

168 implement :meth`.Transport.send`. Defaults to 

169 :class:`.BackgroundThreadTransport`. The other 

170 option is :class:`.SyncTransport`. 

171 resource (~logging_v2.resource.Resource): 

172 Resource for this Handler. If not given, will be inferred from the environment. 

173 labels (Optional[dict]): Additional labels to attach to logs. 

174 stream (Optional[IO]): Stream to be used by the handler. 

175 """ 

176 super(CloudLoggingHandler, self).__init__(stream) 

177 if not resource: 

178 # infer the correct monitored resource from the local environment 

179 resource = detect_resource(client.project) 

180 self.name = name 

181 self.client = client 

182 self.transport = transport(client, name, resource=resource) 

183 self.project_id = client.project 

184 self.resource = resource 

185 self.labels = labels 

186 # add extra keys to log record 

187 log_filter = CloudLoggingFilter(project=self.project_id, default_labels=labels) 

188 self.addFilter(log_filter) 

189 

190 def emit(self, record): 

191 """Actually log the specified logging record. 

192 

193 Overrides the default emit behavior of ``StreamHandler``. 

194 

195 See https://docs.python.org/2/library/logging.html#handler-objects 

196 

197 Args: 

198 record (logging.LogRecord): The record to be logged. 

199 """ 

200 resource = record._resource or self.resource 

201 labels = record._labels 

202 message = _format_and_parse_message(record, self) 

203 

204 if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: 

205 # add GAE-specific label 

206 labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} 

207 # send off request 

208 self.transport.send( 

209 record, 

210 message, 

211 resource=resource, 

212 labels=labels, 

213 trace=record._trace, 

214 span_id=record._span_id, 

215 trace_sampled=record._trace_sampled, 

216 http_request=record._http_request, 

217 source_location=record._source_location, 

218 ) 

219 

220 

221def _format_and_parse_message(record, formatter_handler): 

222 """ 

223 Helper function to apply formatting to a LogRecord message, 

224 and attempt to parse encoded JSON into a dictionary object. 

225 

226 Resulting output will be of type (str | dict | None) 

227 

228 Args: 

229 record (logging.LogRecord): The record object representing the log 

230 formatter_handler (logging.Handler): The handler used to format the log 

231 """ 

232 passed_json_fields = getattr(record, "json_fields", {}) 

233 # if message is a dictionary, use dictionary directly 

234 if isinstance(record.msg, collections.abc.Mapping): 

235 payload = record.msg 

236 # attach any extra json fields if present 

237 if passed_json_fields and isinstance( 

238 passed_json_fields, collections.abc.Mapping 

239 ): 

240 payload = {**payload, **passed_json_fields} 

241 return payload 

242 # format message string based on superclass 

243 message = formatter_handler.format(record) 

244 try: 

245 # attempt to parse encoded json into dictionary 

246 if message[0] == "{": 

247 json_message = json.loads(message) 

248 if isinstance(json_message, collections.abc.Mapping): 

249 message = json_message 

250 except (json.decoder.JSONDecodeError, IndexError): 

251 # log string is not valid json 

252 pass 

253 # if json_fields was set, create a dictionary using that 

254 if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): 

255 passed_json_fields = passed_json_fields.copy() 

256 if message != "None": 

257 passed_json_fields["message"] = message 

258 return passed_json_fields 

259 # if formatted message contains no content, return None 

260 return message if message != "None" else None 

261 

262 

263def setup_logging( 

264 handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO 

265): 

266 """Attach a logging handler to the Python root logger 

267 

268 Excludes loggers that this library itself uses to avoid 

269 infinite recursion. 

270 

271 Example: 

272 

273 .. code-block:: python 

274 

275 import logging 

276 import google.cloud.logging 

277 from google.cloud.logging_v2.handlers import CloudLoggingHandler 

278 

279 client = google.cloud.logging.Client() 

280 handler = CloudLoggingHandler(client) 

281 google.cloud.logging.handlers.setup_logging(handler) 

282 logging.getLogger().setLevel(logging.DEBUG) 

283 

284 logging.error('bad news') # API call 

285 

286 Args: 

287 handler (logging.handler): the handler to attach to the global handler 

288 excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler 

289 to. This will always include the loggers in the 

290 path of the logging client itself. 

291 log_level (Optional[int]): Python logging log level. Defaults to 

292 :const:`logging.INFO`. 

293 """ 

294 all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) 

295 logger = logging.getLogger() 

296 

297 # remove built-in handlers on App Engine or Cloud Functions environments 

298 if detect_resource().type in _CLEAR_HANDLER_RESOURCE_TYPES: 

299 logger.handlers.clear() 

300 

301 logger.setLevel(log_level) 

302 logger.addHandler(handler) 

303 for logger_name in all_excluded_loggers: 

304 # prevent excluded loggers from propagating logs to handler 

305 logger = logging.getLogger(logger_name) 

306 logger.propagate = False