strikethrough.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. # ~~strike through~~
  2. from __future__ import annotations
  3. from .state_inline import Delimiter, StateInline
  4. def tokenize(state: StateInline, silent: bool) -> bool:
  5. """Insert each marker as a separate text token, and add it to delimiter list"""
  6. start = state.pos
  7. ch = state.src[start]
  8. if silent:
  9. return False
  10. if ch != "~":
  11. return False
  12. scanned = state.scanDelims(state.pos, True)
  13. length = scanned.length
  14. if length < 2:
  15. return False
  16. if length % 2:
  17. token = state.push("text", "", 0)
  18. token.content = ch
  19. length -= 1
  20. i = 0
  21. while i < length:
  22. token = state.push("text", "", 0)
  23. token.content = ch + ch
  24. state.delimiters.append(
  25. Delimiter(
  26. marker=ord(ch),
  27. length=0, # disable "rule of 3" length checks meant for emphasis
  28. token=len(state.tokens) - 1,
  29. end=-1,
  30. open=scanned.can_open,
  31. close=scanned.can_close,
  32. )
  33. )
  34. i += 2
  35. state.pos += scanned.length
  36. return True
  37. def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None:
  38. loneMarkers = []
  39. maximum = len(delimiters)
  40. i = 0
  41. while i < maximum:
  42. startDelim = delimiters[i]
  43. if startDelim.marker != 0x7E: # /* ~ */
  44. i += 1
  45. continue
  46. if startDelim.end == -1:
  47. i += 1
  48. continue
  49. endDelim = delimiters[startDelim.end]
  50. token = state.tokens[startDelim.token]
  51. token.type = "s_open"
  52. token.tag = "s"
  53. token.nesting = 1
  54. token.markup = "~~"
  55. token.content = ""
  56. token = state.tokens[endDelim.token]
  57. token.type = "s_close"
  58. token.tag = "s"
  59. token.nesting = -1
  60. token.markup = "~~"
  61. token.content = ""
  62. if (
  63. state.tokens[endDelim.token - 1].type == "text"
  64. and state.tokens[endDelim.token - 1].content == "~"
  65. ):
  66. loneMarkers.append(endDelim.token - 1)
  67. i += 1
  68. # If a marker sequence has an odd number of characters, it's split
  69. # like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the
  70. # start of the sequence.
  71. #
  72. # So, we have to move all those markers after subsequent s_close tags.
  73. #
  74. while loneMarkers:
  75. i = loneMarkers.pop()
  76. j = i + 1
  77. while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"):
  78. j += 1
  79. j -= 1
  80. if i != j:
  81. token = state.tokens[j]
  82. state.tokens[j] = state.tokens[i]
  83. state.tokens[i] = token
  84. def postProcess(state: StateInline) -> None:
  85. """Walk through delimiter list and replace text tokens with tags."""
  86. tokens_meta = state.tokens_meta
  87. maximum = len(state.tokens_meta)
  88. _postProcess(state, state.delimiters)
  89. curr = 0
  90. while curr < maximum:
  91. try:
  92. curr_meta = tokens_meta[curr]
  93. except IndexError:
  94. pass
  95. else:
  96. if curr_meta and "delimiters" in curr_meta:
  97. _postProcess(state, curr_meta["delimiters"])
  98. curr += 1