Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/markdown_it/rules_inline/strikethrough.py: 7%

76 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:15 +0000

1# ~~strike through~~ 

2from __future__ import annotations 

3 

4from .state_inline import Delimiter, StateInline 

5 

6 

7def tokenize(state: StateInline, silent: bool) -> bool: 

8 """Insert each marker as a separate text token, and add it to delimiter list""" 

9 start = state.pos 

10 ch = state.src[start] 

11 

12 if silent: 

13 return False 

14 

15 if ch != "~": 

16 return False 

17 

18 scanned = state.scanDelims(state.pos, True) 

19 length = scanned.length 

20 

21 if length < 2: 

22 return False 

23 

24 if length % 2: 

25 token = state.push("text", "", 0) 

26 token.content = ch 

27 length -= 1 

28 

29 i = 0 

30 while i < length: 

31 token = state.push("text", "", 0) 

32 token.content = ch + ch 

33 state.delimiters.append( 

34 Delimiter( 

35 marker=ord(ch), 

36 length=0, # disable "rule of 3" length checks meant for emphasis 

37 token=len(state.tokens) - 1, 

38 end=-1, 

39 open=scanned.can_open, 

40 close=scanned.can_close, 

41 ) 

42 ) 

43 

44 i += 2 

45 

46 state.pos += scanned.length 

47 

48 return True 

49 

50 

51def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None: 

52 loneMarkers = [] 

53 maximum = len(delimiters) 

54 

55 i = 0 

56 while i < maximum: 

57 startDelim = delimiters[i] 

58 

59 if startDelim.marker != 0x7E: # /* ~ */ 

60 i += 1 

61 continue 

62 

63 if startDelim.end == -1: 

64 i += 1 

65 continue 

66 

67 endDelim = delimiters[startDelim.end] 

68 

69 token = state.tokens[startDelim.token] 

70 token.type = "s_open" 

71 token.tag = "s" 

72 token.nesting = 1 

73 token.markup = "~~" 

74 token.content = "" 

75 

76 token = state.tokens[endDelim.token] 

77 token.type = "s_close" 

78 token.tag = "s" 

79 token.nesting = -1 

80 token.markup = "~~" 

81 token.content = "" 

82 

83 if ( 

84 state.tokens[endDelim.token - 1].type == "text" 

85 and state.tokens[endDelim.token - 1].content == "~" 

86 ): 

87 loneMarkers.append(endDelim.token - 1) 

88 

89 i += 1 

90 

91 # If a marker sequence has an odd number of characters, it's split 

92 # like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the 

93 # start of the sequence. 

94 # 

95 # So, we have to move all those markers after subsequent s_close tags. 

96 # 

97 while loneMarkers: 

98 i = loneMarkers.pop() 

99 j = i + 1 

100 

101 while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"): 

102 j += 1 

103 

104 j -= 1 

105 

106 if i != j: 

107 token = state.tokens[j] 

108 state.tokens[j] = state.tokens[i] 

109 state.tokens[i] = token 

110 

111 

112def postProcess(state: StateInline) -> None: 

113 """Walk through delimiter list and replace text tokens with tags.""" 

114 tokens_meta = state.tokens_meta 

115 maximum = len(state.tokens_meta) 

116 _postProcess(state, state.delimiters) 

117 

118 curr = 0 

119 while curr < maximum: 

120 try: 

121 curr_meta = tokens_meta[curr] 

122 except IndexError: 

123 pass 

124 else: 

125 if curr_meta and "delimiters" in curr_meta: 

126 _postProcess(state, curr_meta["delimiters"]) 

127 curr += 1