|
34 | 34 | _RE_LIQUID_INCLUDE: Pattern[str] = re.compile(r"( \{% include.+%\})") |
35 | 35 |
|
36 | 36 |
|
| 37 | +_PRE_PARSE_REPLACEMENTS: tuple[str, str] = ( |
| 38 | + "https://en.wikipedia.org/wiki/Uniform_distribution_(continuous)", |
| 39 | + "https://en.wikipedia.org/wiki/Continuous_uniform_distribution", |
| 40 | +) |
| 41 | +""" |
| 42 | +Replacement to apply *prior* to parsing as markdown. |
| 43 | +
|
| 44 | +**HACK**: Closing parenthesis messes up markdown parsing, replace with resolved redirect wikipedia URL. |
| 45 | +
|
| 46 | +TODO |
| 47 | +---- |
| 48 | +Remove if this gets fixed upstream, via https://github.com/vega/vega/pull/3996 |
| 49 | +""" |
| 50 | + |
| 51 | + |
37 | 52 | class RSTRenderer(_RSTRenderer): |
38 | 53 | def __init__(self) -> None: |
39 | 54 | super().__init__() |
@@ -68,8 +83,11 @@ def __init__( |
68 | 83 | super().__init__(renderer, block, inline, plugins) |
69 | 84 |
|
70 | 85 | def __call__(self, s: str) -> str: |
71 | | - s = super().__call__(s) # pyright: ignore[reportAssignmentType] |
72 | | - return unescape(s).replace(r"\ ,", ",").replace(r"\ ", " ") |
| 86 | + r = super().__call__(s) |
| 87 | + if isinstance(r, str): |
| 88 | + return unescape(r).replace(r"\ ,", ",").replace(r"\ ", " ") |
| 89 | + msg = f"Expected `str` but got {type(r).__name__!r}" |
| 90 | + raise TypeError(msg) |
73 | 91 |
|
74 | 92 | def render_tokens(self, tokens: Iterable[Token], /) -> str: |
75 | 93 | """ |
@@ -129,32 +147,26 @@ def process_text(self, text: str, state: InlineState) -> None: |
129 | 147 | Removes `liquid`_ templating markup. |
130 | 148 |
|
131 | 149 | .. _liquid: |
132 | | - https://shopify.github.io/liquid/ |
| 150 | + https://shopify.github.io/liquid/ |
133 | 151 | """ |
134 | 152 | state.append_token({"type": "text", "raw": _RE_LIQUID_INCLUDE.sub(r"", text)}) |
135 | 153 |
|
136 | 154 |
|
137 | | -def read_ast_tokens( |
138 | | - source: Url | Path, /, replacements: list[tuple[str, str]] | None = None |
139 | | -) -> list[Token]: |
| 155 | +def read_ast_tokens(source: Url | Path, /) -> list[Token]: |
140 | 156 | """ |
141 | 157 | Read from ``source``, drop ``BlockState``. |
142 | 158 |
|
143 | 159 | Factored out to provide accurate typing. |
144 | 160 | """ |
145 | 161 | markdown = _Markdown(renderer=None, inline=InlineParser()) |
146 | 162 | if isinstance(source, Path): |
147 | | - token_text = source.read_text() |
| 163 | + text = source.read_text() |
148 | 164 | else: |
149 | 165 | with request.urlopen(source) as response: |
150 | | - token_text = response.read().decode("utf-8") |
151 | | - |
152 | | - # Apply replacements |
153 | | - if replacements: |
154 | | - for replacement in replacements: |
155 | | - token_text = token_text.replace(replacement[0], replacement[1]) |
| 166 | + text = response.read().decode("utf-8") |
156 | 167 |
|
157 | | - tokens = markdown.parse(token_text, markdown.block.state_cls()) |
| 168 | + text = text.replace(*_PRE_PARSE_REPLACEMENTS) |
| 169 | + tokens: Any = markdown.parse(text) |
158 | 170 | return tokens[0] |
159 | 171 |
|
160 | 172 |
|
|
0 commit comments