1# Process *this* and _that_ 
    2# 
    3from __future__ import annotations 
    4 
    5from .state_inline import Delimiter, StateInline 
    6 
    7 
    8def tokenize(state: StateInline, silent: bool) -> bool: 
    9    """Insert each marker as a separate text token, and add it to delimiter list""" 
    10    start = state.pos 
    11    marker = state.src[start] 
    12 
    13    if silent: 
    14        return False 
    15 
    16    if marker not in ("_", "*"): 
    17        return False 
    18 
    19    scanned = state.scanDelims(state.pos, marker == "*") 
    20 
    21    for _ in range(scanned.length): 
    22        token = state.push("text", "", 0) 
    23        token.content = marker 
    24        state.delimiters.append( 
    25            Delimiter( 
    26                marker=ord(marker), 
    27                length=scanned.length, 
    28                token=len(state.tokens) - 1, 
    29                end=-1, 
    30                open=scanned.can_open, 
    31                close=scanned.can_close, 
    32            ) 
    33        ) 
    34 
    35    state.pos += scanned.length 
    36 
    37    return True 
    38 
    39 
    40def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None: 
    41    i = len(delimiters) - 1 
    42    while i >= 0: 
    43        startDelim = delimiters[i] 
    44 
    45        # /* _ */  /* * */ 
    46        if startDelim.marker != 0x5F and startDelim.marker != 0x2A: 
    47            i -= 1 
    48            continue 
    49 
    50        # Process only opening markers 
    51        if startDelim.end == -1: 
    52            i -= 1 
    53            continue 
    54 
    55        endDelim = delimiters[startDelim.end] 
    56 
    57        # If the previous delimiter has the same marker and is adjacent to this one, 
    58        # merge those into one strong delimiter. 
    59        # 
    60        # `<em><em>whatever</em></em>` -> `<strong>whatever</strong>` 
    61        # 
    62        isStrong = ( 
    63            i > 0 
    64            and delimiters[i - 1].end == startDelim.end + 1 
    65            # check that first two markers match and adjacent 
    66            and delimiters[i - 1].marker == startDelim.marker 
    67            and delimiters[i - 1].token == startDelim.token - 1 
    68            # check that last two markers are adjacent (we can safely assume they match) 
    69            and delimiters[startDelim.end + 1].token == endDelim.token + 1 
    70        ) 
    71 
    72        ch = chr(startDelim.marker) 
    73 
    74        token = state.tokens[startDelim.token] 
    75        token.type = "strong_open" if isStrong else "em_open" 
    76        token.tag = "strong" if isStrong else "em" 
    77        token.nesting = 1 
    78        token.markup = ch + ch if isStrong else ch 
    79        token.content = "" 
    80 
    81        token = state.tokens[endDelim.token] 
    82        token.type = "strong_close" if isStrong else "em_close" 
    83        token.tag = "strong" if isStrong else "em" 
    84        token.nesting = -1 
    85        token.markup = ch + ch if isStrong else ch 
    86        token.content = "" 
    87 
    88        if isStrong: 
    89            state.tokens[delimiters[i - 1].token].content = "" 
    90            state.tokens[delimiters[startDelim.end + 1].token].content = "" 
    91            i -= 1 
    92 
    93        i -= 1 
    94 
    95 
    96def postProcess(state: StateInline) -> None: 
    97    """Walk through delimiter list and replace text tokens with tags.""" 
    98    _postProcess(state, state.delimiters) 
    99 
    100    for token in state.tokens_meta: 
    101        if token and "delimiters" in token: 
    102            _postProcess(state, token["delimiters"])