|
| 1 | +"""Simple typographic replacements |
| 2 | +
|
| 3 | +* ``(c)``, ``(C)`` → © |
| 4 | +* ``(tm)``, ``(TM)`` → ™ |
| 5 | +* ``(r)``, ``(R)`` → ® |
| 6 | +* ``(p)``, ``(P)`` → § |
| 7 | +* ``+-`` → ± |
| 8 | +* ``...`` → … |
| 9 | +* ``?....`` → ?.. |
| 10 | +* ``!....`` → !.. |
| 11 | +* ``????????`` → ??? |
| 12 | +* ``!!!!!`` → !!! |
| 13 | +* ``,,,`` → , |
| 14 | +* ``--`` → &ndash |
| 15 | +* ``---`` → &mdash |
| 16 | +""" |
| 17 | +import logging |
| 18 | +import re |
| 19 | +from typing import List, Match |
| 20 | + |
| 21 | +from .state_core import StateCore |
| 22 | +from ..token import Token |
| 23 | + |
| 24 | +LOGGER = logging.getLogger(__name__) |
| 25 | + |
| 26 | +# TODO: |
| 27 | +# - fractionals 1/2, 1/4, 3/4 -> ½, ¼, ¾ |
| 28 | +# - miltiplication 2 x 4 -> 2 × 4 |
| 29 | + |
| 30 | +RARE_RE = re.compile(r"\+-|\.\.|\?\?\?\?|!!!!|,,|--") |
| 31 | + |
| 32 | +# Workaround for phantomjs - need regex without /g flag, |
| 33 | +# or root check will fail every second time |
| 34 | +# SCOPED_ABBR_TEST_RE = r"\((c|tm|r|p)\)" |
| 35 | + |
| 36 | +SCOPED_ABBR_RE = re.compile(r"\((c|tm|r|p)\)", flags=re.IGNORECASE) |
| 37 | + |
| 38 | +PLUS_MINUS_RE = re.compile(r"\+-") |
| 39 | + |
| 40 | +ELLIPSIS_RE = re.compile(r"\.{2,}") |
| 41 | + |
| 42 | +ELLIPSIS_QUESTION_EXCLAMATION_RE = re.compile(r"([?!])…") |
| 43 | + |
| 44 | +QUESTION_EXCLAMATION_RE = re.compile(r"([?!]){4,}") |
| 45 | + |
| 46 | +COMMA_RE = re.compile(r",{2,}") |
| 47 | + |
| 48 | +EM_DASH_RE = re.compile(r"(^|[^-])---(?=[^-]|$)", flags=re.MULTILINE) |
| 49 | + |
| 50 | +EN_DASH_RE = re.compile(r"(^|\s)--(?=\s|$)", flags=re.MULTILINE) |
| 51 | + |
| 52 | +EN_DASH_INDENT_RE = re.compile(r"(^|[^-\s])--(?=[^-\s]|$)", flags=re.MULTILINE) |
| 53 | + |
| 54 | + |
| 55 | +SCOPED_ABBR = {"c": "©", "r": "®", "p": "§", "tm": "™"} |
| 56 | + |
| 57 | + |
| 58 | +def replaceFn(match: Match[str]): |
| 59 | + return SCOPED_ABBR[match.group(1).lower()] |
| 60 | + |
| 61 | + |
| 62 | +def replace_scoped(inlineTokens: List[Token]): |
| 63 | + inside_autolink = 0 |
| 64 | + |
| 65 | + for token in inlineTokens: |
| 66 | + if token.type == "text" and not inside_autolink: |
| 67 | + token.content = SCOPED_ABBR_RE.sub(replaceFn, token.content) |
| 68 | + |
| 69 | + if token.type == "link_open" and token.info == "auto": |
| 70 | + inside_autolink -= 1 |
| 71 | + |
| 72 | + if token.type == "link_close" and token.info == "auto": |
| 73 | + inside_autolink += 1 |
| 74 | + |
| 75 | + |
| 76 | +def replace_rare(inlineTokens: List[Token]): |
| 77 | + inside_autolink = 0 |
| 78 | + |
| 79 | + for token in inlineTokens: |
| 80 | + if token.type == "text" and not inside_autolink: |
| 81 | + if RARE_RE.search(token.content): |
| 82 | + # +- -> ± |
| 83 | + token.content = PLUS_MINUS_RE.sub("±", token.content) |
| 84 | + |
| 85 | + # .., ..., ....... -> … |
| 86 | + token.content = ELLIPSIS_RE.sub("…", token.content) |
| 87 | + |
| 88 | + # but ?..... & !..... -> ?.. & !.. |
| 89 | + token.content = ELLIPSIS_QUESTION_EXCLAMATION_RE.sub( |
| 90 | + "\\1..", token.content |
| 91 | + ) |
| 92 | + token.content = QUESTION_EXCLAMATION_RE.sub("\\1\\1\\1", token.content) |
| 93 | + |
| 94 | + # ,, ,,, ,,,, -> , |
| 95 | + token.content = COMMA_RE.sub(",", token.content) |
| 96 | + |
| 97 | + # em-dash |
| 98 | + token.content = EM_DASH_RE.sub("\\1\u2014", token.content) |
| 99 | + |
| 100 | + # en-dash |
| 101 | + token.content = EN_DASH_RE.sub("\\1\u2013", token.content) |
| 102 | + token.content = EN_DASH_INDENT_RE.sub("\\1\u2013", token.content) |
| 103 | + |
| 104 | + if token.type == "link_open" and token.info == "auto": |
| 105 | + inside_autolink -= 1 |
| 106 | + |
| 107 | + if token.type == "link_close" and token.info == "auto": |
| 108 | + inside_autolink += 1 |
| 109 | + |
| 110 | + |
| 111 | +def replace(state: StateCore): |
| 112 | + if not state.md.options.typographer: |
| 113 | + return |
| 114 | + |
| 115 | + for token in state.tokens: |
| 116 | + if token.type != "inline": |
| 117 | + continue |
| 118 | + |
| 119 | + if SCOPED_ABBR_RE.search(token.content): |
| 120 | + replace_scoped(token.children) |
| 121 | + |
| 122 | + if RARE_RE.search(token.content): |
| 123 | + replace_rare(token.children) |
0 commit comments