Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.10/site-packages/jupyter_server/log.py: 14%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

56 statements  

1"""Log utilities.""" 

2 

3# ----------------------------------------------------------------------------- 

4# Copyright (c) Jupyter Development Team 

5# 

6# Distributed under the terms of the BSD License. The full license is in 

7# the file LICENSE, distributed as part of this software. 

8# ----------------------------------------------------------------------------- 

9import json 

10from urllib.parse import urlparse, urlunparse 

11 

12from tornado.log import access_log 

13 

14from .auth import User 

15from .prometheus.log_functions import prometheus_log_method 

16 

17# url params to be scrubbed if seen 

18# any url param that *contains* one of these 

19# will be scrubbed from logs 

20_SCRUB_PARAM_KEYS = {"token", "auth", "key", "code", "state", "xsrf"} 

21 

22 

23def _scrub_uri(uri: str) -> str: 

24 """scrub auth info from uri""" 

25 parsed = urlparse(uri) 

26 if parsed.query: 

27 # check for potentially sensitive url params 

28 # use manual list + split rather than parsing 

29 # to minimally perturb original 

30 parts = parsed.query.split("&") 

31 changed = False 

32 for i, s in enumerate(parts): 

33 key, sep, value = s.partition("=") 

34 for substring in _SCRUB_PARAM_KEYS: 

35 if substring in key: 

36 parts[i] = f"{key}{sep}[secret]" 

37 changed = True 

38 if changed: 

39 parsed = parsed._replace(query="&".join(parts)) 

40 return urlunparse(parsed) 

41 return uri 

42 

43 

44def log_request(handler, record_prometheus_metrics=True): 

45 """log a bit more information about each request than tornado's default 

46 

47 - move static file get success to debug-level (reduces noise) 

48 - get proxied IP instead of proxy IP 

49 - log referer for redirect and failed requests 

50 - log user-agent for failed requests 

51 

52 if record_prometheus_metrics is true, will record a histogram prometheus 

53 metric (http_request_duration_seconds) for each request handler 

54 """ 

55 status = handler.get_status() 

56 request = handler.request 

57 try: 

58 logger = handler.log 

59 except AttributeError: 

60 logger = access_log 

61 

62 if status < 300 or status == 304: 

63 # Successes (or 304 FOUND) are debug-level 

64 log_method = logger.debug 

65 elif status < 400: 

66 log_method = logger.info 

67 elif status < 500: 

68 log_method = logger.warning 

69 else: 

70 log_method = logger.error 

71 

72 request_time = 1000.0 * handler.request.request_time() 

73 ns = { 

74 "status": status, 

75 "method": request.method, 

76 "ip": request.remote_ip, 

77 "uri": _scrub_uri(request.uri), 

78 "request_time": request_time, 

79 } 

80 # log username 

81 # make sure we don't break anything 

82 # in case mixins cause current_user to not be a User somehow 

83 try: 

84 user = handler.current_user 

85 except Exception: 

86 user = None 

87 username = (user.username if isinstance(user, User) else "unknown") if user else "" 

88 ns["username"] = username 

89 

90 msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms" 

91 if status >= 400: 

92 # log bad referrers 

93 ns["referer"] = _scrub_uri(request.headers.get("Referer", "None")) 

94 msg = msg + " referer={referer}" 

95 if status >= 500 and status != 502: 

96 # Log a subset of the headers if it caused an error. 

97 headers = {} 

98 for header in ["Host", "Accept", "Referer", "User-Agent"]: 

99 if header in request.headers: 

100 headers[header] = request.headers[header] 

101 log_method(json.dumps(headers, indent=2)) 

102 log_method(msg.format(**ns)) 

103 if record_prometheus_metrics: 

104 prometheus_log_method(handler)