Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/markdown_it/rules_inline/strikethrough.py: 97%

77 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:07 +0000

1# ~~strike through~~ 

2from __future__ import annotations 

3 

4from .state_inline import Delimiter, StateInline 

5 

6 

7def tokenize(state: StateInline, silent: bool): 

8 """Insert each marker as a separate text token, and add it to delimiter list""" 

9 start = state.pos 

10 marker = state.srcCharCode[start] 

11 

12 if silent: 

13 return False 

14 

15 if marker != 0x7E: # /* ~ */ 

16 return False 

17 

18 scanned = state.scanDelims(state.pos, True) 

19 length = scanned.length 

20 ch = chr(marker) 

21 

22 if length < 2: 

23 return False 

24 

25 if length % 2: 

26 token = state.push("text", "", 0) 

27 token.content = ch 

28 length -= 1 

29 

30 i = 0 

31 while i < length: 

32 token = state.push("text", "", 0) 

33 token.content = ch + ch 

34 state.delimiters.append( 

35 Delimiter( 

36 **{ 

37 "marker": marker, 

38 "length": 0, # disable "rule of 3" length checks meant for emphasis 

39 "jump": i // 2, # for `~~` 1 marker = 2 characters 

40 "token": len(state.tokens) - 1, 

41 "end": -1, 

42 "open": scanned.can_open, 

43 "close": scanned.can_close, 

44 } 

45 ) 

46 ) 

47 

48 i += 2 

49 

50 state.pos += scanned.length 

51 

52 return True 

53 

54 

55def _postProcess(state: StateInline, delimiters: list[Delimiter]): 

56 loneMarkers = [] 

57 maximum = len(delimiters) 

58 

59 i = 0 

60 while i < maximum: 

61 startDelim = delimiters[i] 

62 

63 if startDelim.marker != 0x7E: # /* ~ */ 

64 i += 1 

65 continue 

66 

67 if startDelim.end == -1: 

68 i += 1 

69 continue 

70 

71 endDelim = delimiters[startDelim.end] 

72 

73 token = state.tokens[startDelim.token] 

74 token.type = "s_open" 

75 token.tag = "s" 

76 token.nesting = 1 

77 token.markup = "~~" 

78 token.content = "" 

79 

80 token = state.tokens[endDelim.token] 

81 token.type = "s_close" 

82 token.tag = "s" 

83 token.nesting = -1 

84 token.markup = "~~" 

85 token.content = "" 

86 

87 if ( 

88 state.tokens[endDelim.token - 1].type == "text" 

89 and state.tokens[endDelim.token - 1].content == "~" 

90 ): 

91 loneMarkers.append(endDelim.token - 1) 

92 

93 i += 1 

94 

95 # If a marker sequence has an odd number of characters, it's split 

96 # like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the 

97 # start of the sequence. 

98 # 

99 # So, we have to move all those markers after subsequent s_close tags. 

100 # 

101 while loneMarkers: 

102 i = loneMarkers.pop() 

103 j = i + 1 

104 

105 while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"): 

106 j += 1 

107 

108 j -= 1 

109 

110 if i != j: 

111 token = state.tokens[j] 

112 state.tokens[j] = state.tokens[i] 

113 state.tokens[i] = token 

114 

115 

116def postProcess(state: StateInline): 

117 """Walk through delimiter list and replace text tokens with tags.""" 

118 tokens_meta = state.tokens_meta 

119 maximum = len(state.tokens_meta) 

120 _postProcess(state, state.delimiters) 

121 

122 curr = 0 

123 while curr < maximum: 

124 try: 

125 curr_meta = tokens_meta[curr] 

126 except IndexError: 

127 pass 

128 else: 

129 if curr_meta and "delimiters" in curr_meta: 

130 _postProcess(state, curr_meta["delimiters"]) 

131 curr += 1