Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/IPython/utils/tokenutil.py: 13%

60 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-04-20 06:09 +0000

1"""Token-related utilities""" 

2 

3# Copyright (c) IPython Development Team. 

4# Distributed under the terms of the Modified BSD License. 

5 

6from collections import namedtuple 

7from io import StringIO 

8from keyword import iskeyword 

9 

10import tokenize 

11 

12 

13Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line']) 

14 

15def generate_tokens(readline): 

16 """wrap generate_tokens to catch EOF errors""" 

17 try: 

18 for token in tokenize.generate_tokens(readline): 

19 yield token 

20 except tokenize.TokenError: 

21 # catch EOF error 

22 return 

23 

24def line_at_cursor(cell, cursor_pos=0): 

25 """Return the line in a cell at a given cursor position 

26 

27 Used for calling line-based APIs that don't support multi-line input, yet. 

28 

29 Parameters 

30 ---------- 

31 cell : str 

32 multiline block of text 

33 cursor_pos : integer 

34 the cursor position 

35 

36 Returns 

37 ------- 

38 (line, offset): (string, integer) 

39 The line with the current cursor, and the character offset of the start of the line. 

40 """ 

41 offset = 0 

42 lines = cell.splitlines(True) 

43 for line in lines: 

44 next_offset = offset + len(line) 

45 if not line.endswith('\n'): 

46 # If the last line doesn't have a trailing newline, treat it as if 

47 # it does so that the cursor at the end of the line still counts 

48 # as being on that line. 

49 next_offset += 1 

50 if next_offset > cursor_pos: 

51 break 

52 offset = next_offset 

53 else: 

54 line = "" 

55 return (line, offset) 

56 

57def token_at_cursor(cell, cursor_pos=0): 

58 """Get the token at a given cursor 

59 

60 Used for introspection. 

61 

62 Function calls are prioritized, so the token for the callable will be returned 

63 if the cursor is anywhere inside the call. 

64 

65 Parameters 

66 ---------- 

67 cell : unicode 

68 A block of Python code 

69 cursor_pos : int 

70 The location of the cursor in the block where the token should be found 

71 """ 

72 names = [] 

73 tokens = [] 

74 call_names = [] 

75 

76 offsets = {1: 0} # lines start at 1 

77 for tup in generate_tokens(StringIO(cell).readline): 

78 

79 tok = Token(*tup) 

80 

81 # token, text, start, end, line = tup 

82 start_line, start_col = tok.start 

83 end_line, end_col = tok.end 

84 if end_line + 1 not in offsets: 

85 # keep track of offsets for each line 

86 lines = tok.line.splitlines(True) 

87 for lineno, line in enumerate(lines, start_line + 1): 

88 if lineno not in offsets: 

89 offsets[lineno] = offsets[lineno-1] + len(line) 

90 

91 offset = offsets[start_line] 

92 # allow '|foo' to find 'foo' at the beginning of a line 

93 boundary = cursor_pos + 1 if start_col == 0 else cursor_pos 

94 if offset + start_col >= boundary: 

95 # current token starts after the cursor, 

96 # don't consume it 

97 break 

98 

99 if tok.token == tokenize.NAME and not iskeyword(tok.text): 

100 if names and tokens and tokens[-1].token == tokenize.OP and tokens[-1].text == '.': 

101 names[-1] = "%s.%s" % (names[-1], tok.text) 

102 else: 

103 names.append(tok.text) 

104 elif tok.token == tokenize.OP: 

105 if tok.text == '=' and names: 

106 # don't inspect the lhs of an assignment 

107 names.pop(-1) 

108 if tok.text == '(' and names: 

109 # if we are inside a function call, inspect the function 

110 call_names.append(names[-1]) 

111 elif tok.text == ')' and call_names: 

112 call_names.pop(-1) 

113 

114 tokens.append(tok) 

115 

116 if offsets[end_line] + end_col > cursor_pos: 

117 # we found the cursor, stop reading 

118 break 

119 

120 if call_names: 

121 return call_names[-1] 

122 elif names: 

123 return names[-1] 

124 else: 

125 return '' 

126 

127