Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/jupyter_server/log.py: 15%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

55 statements  

1"""Log utilities.""" 

2 

3# ----------------------------------------------------------------------------- 

4# Copyright (c) Jupyter Development Team 

5# 

6# Distributed under the terms of the BSD License. The full license is in 

7# the file LICENSE, distributed as part of this software. 

8# ----------------------------------------------------------------------------- 

9import json 

10from urllib.parse import urlparse, urlunparse 

11 

12from tornado.log import access_log 

13 

14from .auth import User 

15from .prometheus.log_functions import prometheus_log_method 

16 

17# url params to be scrubbed if seen 

18# any url param that *contains* one of these 

19# will be scrubbed from logs 

20_SCRUB_PARAM_KEYS = {"token", "auth", "key", "code", "state", "xsrf"} 

21 

22 

23def _scrub_uri(uri: str) -> str: 

24 """scrub auth info from uri""" 

25 parsed = urlparse(uri) 

26 if parsed.query: 

27 # check for potentially sensitive url params 

28 # use manual list + split rather than parsing 

29 # to minimally perturb original 

30 parts = parsed.query.split("&") 

31 changed = False 

32 for i, s in enumerate(parts): 

33 key, sep, value = s.partition("=") 

34 for substring in _SCRUB_PARAM_KEYS: 

35 if substring in key: 

36 parts[i] = f"{key}{sep}[secret]" 

37 changed = True 

38 if changed: 

39 parsed = parsed._replace(query="&".join(parts)) 

40 return urlunparse(parsed) 

41 return uri 

42 

43 

44def log_request(handler): 

45 """log a bit more information about each request than tornado's default 

46 

47 - move static file get success to debug-level (reduces noise) 

48 - get proxied IP instead of proxy IP 

49 - log referer for redirect and failed requests 

50 - log user-agent for failed requests 

51 """ 

52 status = handler.get_status() 

53 request = handler.request 

54 try: 

55 logger = handler.log 

56 except AttributeError: 

57 logger = access_log 

58 

59 if status < 300 or status == 304: 

60 # Successes (or 304 FOUND) are debug-level 

61 log_method = logger.debug 

62 elif status < 400: 

63 log_method = logger.info 

64 elif status < 500: 

65 log_method = logger.warning 

66 else: 

67 log_method = logger.error 

68 

69 request_time = 1000.0 * handler.request.request_time() 

70 ns = { 

71 "status": status, 

72 "method": request.method, 

73 "ip": request.remote_ip, 

74 "uri": _scrub_uri(request.uri), 

75 "request_time": request_time, 

76 } 

77 # log username 

78 # make sure we don't break anything 

79 # in case mixins cause current_user to not be a User somehow 

80 try: 

81 user = handler.current_user 

82 except Exception: 

83 user = None 

84 username = (user.username if isinstance(user, User) else "unknown") if user else "" 

85 ns["username"] = username 

86 

87 msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms" 

88 if status >= 400: 

89 # log bad referrers 

90 ns["referer"] = _scrub_uri(request.headers.get("Referer", "None")) 

91 msg = msg + " referer={referer}" 

92 if status >= 500 and status != 502: 

93 # Log a subset of the headers if it caused an error. 

94 headers = {} 

95 for header in ["Host", "Accept", "Referer", "User-Agent"]: 

96 if header in request.headers: 

97 headers[header] = request.headers[header] 

98 log_method(json.dumps(headers, indent=2)) 

99 log_method(msg.format(**ns)) 

100 prometheus_log_method(handler)