Skip to content

Commit 709bbdf

Browse files
authored
chore(typing): fix mypy>=1.14.0 warnings (#3739)
1 parent 582a364 commit 709bbdf

File tree

2 files changed

+30
-31
lines changed

2 files changed

+30
-31
lines changed

tools/markup.py

Lines changed: 26 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,21 @@
3434
_RE_LIQUID_INCLUDE: Pattern[str] = re.compile(r"( \{% include.+%\})")
3535

3636

37+
_PRE_PARSE_REPLACEMENTS: tuple[str, str] = (
38+
"https://en.wikipedia.org/wiki/Uniform_distribution_(continuous)",
39+
"https://en.wikipedia.org/wiki/Continuous_uniform_distribution",
40+
)
41+
"""
42+
Replacement to apply *prior* to parsing as markdown.
43+
44+
**HACK**: Closing parenthesis messes up markdown parsing, replace with resolved redirect wikipedia URL.
45+
46+
TODO
47+
----
48+
Remove if this gets fixed upstream, via https://github.com/vega/vega/pull/3996
49+
"""
50+
51+
3752
class RSTRenderer(_RSTRenderer):
3853
def __init__(self) -> None:
3954
super().__init__()
@@ -68,8 +83,11 @@ def __init__(
6883
super().__init__(renderer, block, inline, plugins)
6984

7085
def __call__(self, s: str) -> str:
71-
s = super().__call__(s) # pyright: ignore[reportAssignmentType]
72-
return unescape(s).replace(r"\ ,", ",").replace(r"\ ", " ")
86+
r = super().__call__(s)
87+
if isinstance(r, str):
88+
return unescape(r).replace(r"\ ,", ",").replace(r"\ ", " ")
89+
msg = f"Expected `str` but got {type(r).__name__!r}"
90+
raise TypeError(msg)
7391

7492
def render_tokens(self, tokens: Iterable[Token], /) -> str:
7593
"""
@@ -129,32 +147,26 @@ def process_text(self, text: str, state: InlineState) -> None:
129147
Removes `liquid`_ templating markup.
130148
131149
.. _liquid:
132-
https://shopify.github.io/liquid/
150+
https://shopify.github.io/liquid/
133151
"""
134152
state.append_token({"type": "text", "raw": _RE_LIQUID_INCLUDE.sub(r"", text)})
135153

136154

137-
def read_ast_tokens(
138-
source: Url | Path, /, replacements: list[tuple[str, str]] | None = None
139-
) -> list[Token]:
155+
def read_ast_tokens(source: Url | Path, /) -> list[Token]:
140156
"""
141157
Read from ``source``, drop ``BlockState``.
142158
143159
Factored out to provide accurate typing.
144160
"""
145161
markdown = _Markdown(renderer=None, inline=InlineParser())
146162
if isinstance(source, Path):
147-
token_text = source.read_text()
163+
text = source.read_text()
148164
else:
149165
with request.urlopen(source) as response:
150-
token_text = response.read().decode("utf-8")
151-
152-
# Apply replacements
153-
if replacements:
154-
for replacement in replacements:
155-
token_text = token_text.replace(replacement[0], replacement[1])
166+
text = response.read().decode("utf-8")
156167

157-
tokens = markdown.parse(token_text, markdown.block.state_cls())
168+
text = text.replace(*_PRE_PARSE_REPLACEMENTS)
169+
tokens: Any = markdown.parse(text)
158170
return tokens[0]
159171

160172

tools/vega_expr.py

Lines changed: 4 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -47,14 +47,6 @@
4747
EXPRESSIONS_DOCS_URL: LiteralString = f"{VEGA_DOCS_URL}expressions/"
4848
EXPRESSIONS_URL_TEMPLATE = "https://raw.githubusercontent.com/vega/vega/refs/tags/{version}/docs/docs/expressions.md"
4949

50-
# Replacements to apply prior to parsing as markdown
51-
PRE_PARSE_REPLACEMENTS = [
52-
# Closing paren messes up markdown parsing, replace with equivalent wikipedia URL
53-
(
54-
"https://en.wikipedia.org/wiki/Uniform_distribution_(continuous)",
55-
"https://en.wikipedia.org/wiki/Continuous_uniform_distribution",
56-
)
57-
]
5850

5951
# NOTE: Regex patterns
6052
FUNCTION_DEF_LINE: Pattern[str] = re.compile(
@@ -939,15 +931,13 @@ def italics_to_backticks(s: str, names: Iterable[str], /) -> str:
939931
return re.sub(pattern, r"\g<not_link_start>``\g<name>``\g<not_link_end>", s)
940932

941933

942-
def parse_expressions(
943-
source: Url | Path, /, replacements: list[tuple[str, str]] | None = None
944-
) -> Iterator[VegaExprDef]:
934+
def parse_expressions(source: Url | Path, /) -> Iterator[VegaExprDef]:
945935
"""
946936
Download remote or read local `.md` resource and eagerly parse signatures of relevant definitions.
947937
948938
Yields with docs to ensure each can use all remapped names, regardless of the order they appear.
949939
"""
950-
tokens = read_ast_tokens(source, replacements=replacements)
940+
tokens = read_ast_tokens(source)
951941
expr_defs = tuple(VegaExprDef.from_tokens(tokens))
952942
VegaExprDef.remap_title.refresh()
953943
for expr_def in expr_defs:
@@ -971,7 +961,7 @@ def write_expr_module(version: str, output: Path, *, header: str) -> None:
971961
# Retrieve all of the links used in expr method docstrings,
972962
# so we can include them in the class docstrings, so that sphinx
973963
# will find them.
974-
expr_defs = parse_expressions(url, replacements=PRE_PARSE_REPLACEMENTS)
964+
expr_defs = parse_expressions(url)
975965

976966
links = {}
977967
rst_renderer = RSTRenderer()
@@ -1001,10 +991,7 @@ def write_expr_module(version: str, output: Path, *, header: str) -> None:
1001991
)
1002992
contents = chain(
1003993
content,
1004-
(
1005-
expr_def.render()
1006-
for expr_def in parse_expressions(url, replacements=PRE_PARSE_REPLACEMENTS)
1007-
),
994+
(expr_def.render() for expr_def in parse_expressions(url)),
1008995
[MODULE_POST],
1009996
)
1010997
print(f"Generating\n {url!s}\n ->{output!s}")

0 commit comments

Comments
 (0)