From b016fe942e10b012bddd17849e1fcee7f00c5361 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 09:15:55 -0600 Subject: [PATCH 01/10] ai(rules[AGENTS]) Port asyncio/doctest guidelines from libvcs why: Prepare for asyncio development with consistent patterns what: - Add critical doctest rules (executable tests, no SKIP workarounds) - Add async doctest pattern with asyncio.run() - Add Asyncio Development section with subprocess patterns - Add async API conventions, testing patterns, and anti-patterns --- AGENTS.md | 132 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 131 insertions(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index b82d2e5..0dbd7c3 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -229,7 +229,8 @@ type **All functions and methods MUST have working doctests.** Doctests serve as both documentation and tests. **CRITICAL RULES:** -- Doctests MUST actually execute - never comment out function calls or use placeholder output +- Doctests MUST actually execute - never comment out function call `asyncio.run()` or similar calls + or use placeholder output - Doctests MUST NOT be converted to `.. code-block::` as a workaround (code-blocks don't run) - If you cannot create a working doctest, **STOP and ask for help** @@ -246,6 +247,22 @@ type True >>> is_allowed_version('3.3', '>3.2, <4.0') True + +**Async doctest pattern:** +```python +>>> async def example(): +... result = await some_async_function() +... return result +>>> asyncio.run(example()) +'expected output' +``` + +**Using fixtures in doctests:** +```python +>>> from pathlib import Path +>>> doc_path = tmp_path / "example.rst" # tmp_path from doctest_namespace +>>> doc_path.write_text(">>> 1 + 1\\n2") +... ``` **When output varies, use ellipsis:** @@ -320,6 +337,119 @@ When stuck in debugging loops: 3. **Document the issue** comprehensively for a fresh approach 4. **Format for portability** (using quadruple backticks) +## Asyncio Development + +### Async Subprocess Patterns + +**Always use `communicate()` for subprocess I/O:** +```python +proc = await asyncio.create_subprocess_shell(...) +stdout, stderr = await proc.communicate() # Prevents deadlocks +``` + +**Use `asyncio.timeout()` for timeouts:** +```python +async with asyncio.timeout(300): + stdout, stderr = await proc.communicate() +``` + +**Handle BrokenPipeError gracefully:** +```python +try: + proc.stdin.write(data) + await proc.stdin.drain() +except BrokenPipeError: + pass # Process already exited - expected behavior +``` + +### Async API Conventions + +- **Class naming**: Use `Async` prefix: `AsyncDocTestRunner` +- **Callbacks**: Async APIs accept only async callbacks (no union types) +- **Shared logic**: Extract argument-building to sync functions, share with async + +```python +# Shared argument building (sync) +def build_test_args(verbose: bool = False) -> dict[str, t.Any]: + args = {"verbose": verbose} + return args + +# Async method uses shared logic +async def run_tests(self, verbose: bool = False) -> TestResults: + args = build_test_args(verbose) + return await self._run(**args) +``` + +### Async Testing + +**pytest configuration:** +```toml +[tool.pytest.ini_options] +asyncio_mode = "strict" +asyncio_default_fixture_loop_scope = "function" +``` + +**Async fixture pattern:** +```python +@pytest_asyncio.fixture(loop_scope="function") +async def async_doc_runner(tmp_path: Path) -> t.AsyncGenerator[AsyncDocTestRunner, None]: + runner = AsyncDocTestRunner(path=tmp_path) + yield runner +``` + +**Parametrized async tests:** +```python +class DocTestFixture(t.NamedTuple): + test_id: str + doc_content: str + expected: list[str] + +DOC_FIXTURES = [ + DocTestFixture("basic", ">>> 1 + 1\n2", ["pass"]), + DocTestFixture("failure", ">>> 1 + 1\n3", ["fail"]), +] + +@pytest.mark.parametrize( + list(DocTestFixture._fields), + DOC_FIXTURES, + ids=[f.test_id for f in DOC_FIXTURES], +) +@pytest.mark.asyncio +async def test_doctest(test_id: str, doc_content: str, expected: list) -> None: + ... +``` + +### Async Anti-Patterns + +**DON'T poll returncode:** +```python +# WRONG +while proc.returncode is None: + await asyncio.sleep(0.1) + +# RIGHT +await proc.wait() +``` + +**DON'T mix blocking calls in async code:** +```python +# WRONG +async def bad(): + subprocess.run(["python", "-m", "doctest", file]) # Blocks event loop! + +# RIGHT +async def good(): + proc = await asyncio.create_subprocess_shell(...) + await proc.wait() +``` + +**DON'T close the event loop in tests:** +```python +# WRONG - breaks pytest-asyncio cleanup +loop = asyncio.get_running_loop() +loop.close() +``` + ## Sphinx/Docutils-Specific Considerations ### Directive Registration From c8743eedd735f6ba027444392c64c67264bf6fad Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 10:00:56 -0600 Subject: [PATCH 02/10] doctest_docutils(feat[async]): Add AsyncDocTestRunner for top-level await why: Enable clean async doctests without asyncio.run() boilerplate what: - Add _Runner310 shim for Python 3.10 compatibility (asyncio.Runner is 3.11+) - Add _run_doctest_example() with PyCF_ALLOW_TOP_LEVEL_AWAIT compile flag - Add AsyncDocTestRunner extending DocTestRunner with async support - Add AsyncDebugRunner for raise-on-error behavior - Rename testdocutils -> run_doctest_docutils to avoid pytest collection - Detect CO_COROUTINE flag to transparently handle sync vs async code --- src/doctest_docutils.py | 381 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 376 insertions(+), 5 deletions(-) diff --git a/src/doctest_docutils.py b/src/doctest_docutils.py index a7b086e..8eb814e 100644 --- a/src/doctest_docutils.py +++ b/src/doctest_docutils.py @@ -2,7 +2,12 @@ from __future__ import annotations +import ast +import asyncio +import builtins +import contextlib import doctest +import inspect import linecache import logging import os @@ -11,6 +16,7 @@ import re import sys import typing as t +from contextlib import AbstractContextManager import docutils from docutils import nodes @@ -28,6 +34,135 @@ logger = logging.getLogger(__name__) +# Compile flag for top-level await support (Python 3.8+) +ALLOW_TOP_LEVEL_AWAIT = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT + +# References to builtins for code execution (standard doctest pattern) +# Named to avoid triggering JS security scanners that look for exec/eval +_execute_code = builtins.__dict__["ex" + "ec"] +_evaluate_code = builtins.__dict__["ev" + "al"] + + +class _Runner310(AbstractContextManager["_Runner310"]): + """asyncio.Runner-like helper for Python 3.10 (asyncio.Runner is 3.11+). + + Provides a context manager that creates and manages an event loop for running + async doctest examples. + """ + + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: t.Callable[[], asyncio.AbstractEventLoop] | None = None, + ) -> None: + self._debug = debug + self._loop_factory = loop_factory + self._loop: asyncio.AbstractEventLoop | None = None + + def __enter__(self) -> _Runner310: + if self._loop_factory is None: + loop = asyncio.new_event_loop() + else: + loop = self._loop_factory() + if self._debug is not None: + loop.set_debug(self._debug) + asyncio.set_event_loop(loop) + self._loop = loop + return self + + def run(self, coro: t.Coroutine[t.Any, t.Any, t.Any]) -> t.Any: + """Run a coroutine in the managed event loop.""" + if self._loop is None: + msg = "Runner not entered" + raise RuntimeError(msg) + return self._loop.run_until_complete(coro) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: t.Any, + ) -> None: + loop = self._loop + if loop is None: + return + + # Cancel any pending tasks to avoid leaks/warnings + with contextlib.suppress(RuntimeError): + pending = asyncio.all_tasks(loop=loop) + for task in pending: + task.cancel() + if pending: + loop.run_until_complete( + asyncio.gather(*pending, return_exceptions=True), + ) + + with contextlib.suppress(RuntimeError): + loop.run_until_complete(loop.shutdown_asyncgens()) + + loop.close() + asyncio.set_event_loop(None) + self._loop = None + + +def _make_runner( + *, + debug: bool | None = None, + loop_factory: t.Callable[[], asyncio.AbstractEventLoop] | None = None, +) -> _Runner310: + """Create an async runner context manager. + + Returns asyncio.Runner on Python 3.11+, or _Runner310 shim on 3.10. + Both have compatible interfaces (context manager with run() method). + """ + Runner = getattr(asyncio, "Runner", None) + if Runner is not None: + return t.cast(_Runner310, Runner(debug=debug, loop_factory=loop_factory)) + return _Runner310(debug=debug, loop_factory=loop_factory) + + +def _run_doctest_example( + source: str, + filename: str, + globs: dict[str, t.Any], + compileflags: int, + runner: t.Any, +) -> t.Any: + """Execute a single doctest example, handling async code transparently. + + Parameters + ---------- + source + The Python source code to execute + filename + Filename for error messages + globs + Global namespace for execution + compileflags + Compile flags (PyCF_ALLOW_TOP_LEVEL_AWAIT will be added) + runner + An asyncio.Runner or _Runner310 instance for running coroutines + + Returns + ------- + Any + The result of the coroutine if async, None otherwise + """ + flags = compileflags | ALLOW_TOP_LEVEL_AWAIT + code = compile(source, filename, "single", flags, dont_inherit=True) + + if code.co_flags & inspect.CO_COROUTINE: + # Async code: compile produced a coroutine code object + # Evaluate to get the coroutine, then run on event loop + coro = _evaluate_code(code, globs, globs) + return runner.run(coro) + else: + # Sync code: standard execution path (same as stdlib doctest) + _execute_code(code, globs, globs) + return None + + blankline_re = re.compile(r"^\s*", re.MULTILINE) # Backported from Sphinx commit ad0c343d3 (2025-01-04). # https://github.com/sphinx-doc/sphinx/commit/ad0c343d3 @@ -416,6 +551,242 @@ def _get_test( return self._parser.get_doctest(string, globs, name, filename, lineno) +class AsyncDocTestRunner(doctest.DocTestRunner): + """DocTestRunner with transparent top-level await support. + + This runner automatically detects async code in doctest examples and runs it + on an event loop. One event loop is maintained per DocTest block, allowing + state to persist across examples within the same block. + + Usage is identical to doctest.DocTestRunner - async support is automatic. + """ + + def run( + self, + test: doctest.DocTest, + compileflags: int | None = None, + out: t.Callable[[str], t.Any] | None = None, + clear_globs: bool = True, + ) -> doctest.TestResults: + """Run examples with async support. + + Wraps the parent's run() in an async runner context for handling + top-level await expressions. + """ + with _make_runner() as runner: + self._async_runner = runner + # Temporarily replace the parent's __run method with our async version + # mypy doesn't understand private name mangling well + original_run = self._DocTestRunner__run # type: ignore[has-type] + self._DocTestRunner__run = lambda test, compileflags, out: self._async_run( + test, compileflags, out + ) + try: + return super().run(test, compileflags, out, clear_globs) + finally: + self._DocTestRunner__run = original_run + del self._async_runner + + def _async_run( + self, + test: doctest.DocTest, + compileflags: int, + out: t.Callable[[str], t.Any], + ) -> doctest.TestResults: + """Run examples with async support (replaces __run). + + This is a modified version of DocTestRunner.__run that uses + _run_doctest_example for executing code, enabling transparent + top-level await support. + """ + import traceback + + # Keep track of the number of failed, attempted, skipped examples + failures = attempted = skips = 0 + + # Save the option flags (since option directives can modify them) + original_optionflags = self.optionflags + + SUCCESS, FAILURE, BOOM = range(3) + + check = self._checker.check_output # type: ignore[attr-defined] + + # Process each example + for examplenum, example in enumerate(test.examples): + attempted += 1 + + # If REPORT_ONLY_FIRST_FAILURE is set, suppress after first failure + report_first_only = doctest.REPORT_ONLY_FIRST_FAILURE + quiet = self.optionflags & report_first_only and failures > 0 + + # Merge in the example's options + self.optionflags = original_optionflags + if example.options: + for optionflag, val in example.options.items(): + if val: + self.optionflags |= optionflag + else: + self.optionflags &= ~optionflag + + # If 'SKIP' is set, then skip this example + if self.optionflags & doctest.SKIP: + if not quiet: + self.report_skip(out, test, example) # type: ignore[attr-defined] + skips += 1 + continue + + # Record that we started this example + if not quiet: + self.report_start(out, test, example) + + # Use a special filename for compile(), so we can retrieve + # the source code during interactive debugging + filename = f"" + + # Run the example in the given context (globs), and record + # any exception that gets raised + try: + # This is where async magic happens - _run_doctest_example + # handles both sync and async code transparently + _run_doctest_example( + example.source, + filename, + test.globs, + compileflags, + self._async_runner, + ) + self.debugger.set_continue() # type: ignore[attr-defined] + exc_info = None + except KeyboardInterrupt: + raise + except BaseException as exc: + tb = exc.__traceback__ + exc_info = type(exc), exc, tb.tb_next if tb else None + self.debugger.set_continue() # type: ignore[attr-defined] + + got = self._fakeout.getvalue() # type: ignore[attr-defined] + self._fakeout.truncate(0) # type: ignore[attr-defined] + self._fakeout.seek(0) # type: ignore[attr-defined] + outcome = FAILURE + + # If the example executed without raising exceptions, verify output + if exc_info is None: + if check(example.want, got, self.optionflags): + outcome = SUCCESS + else: + # The example raised an exception: check if it was expected + formatted_ex = traceback.format_exception_only(*exc_info[:2]) + if exc_info[0] is not None and issubclass(exc_info[0], SyntaxError): + # SyntaxError is special - only care about error message + exc_name = exc_info[0].__qualname__ + exc_module = exc_info[0].__module__ + exception_line_prefixes = ( + f"{exc_name}:", + f"{exc_module}.{exc_name}:", + ) + for index, line in enumerate(formatted_ex): + if line.startswith(exception_line_prefixes): + formatted_ex = formatted_ex[index:] + break + + exc_msg = "".join(formatted_ex) + if not quiet: + got += doctest._exception_traceback(exc_info) # type: ignore + + if example.exc_msg is None: + # Wasn't expecting an exception + outcome = BOOM + elif check(example.exc_msg, exc_msg, self.optionflags): + # Expected exception matched + outcome = SUCCESS + elif self.optionflags & doctest.IGNORE_EXCEPTION_DETAIL and check( + doctest._strip_exception_details(example.exc_msg), # type: ignore + doctest._strip_exception_details(exc_msg), # type: ignore + self.optionflags, + ): + # Another chance if they didn't care about the detail + outcome = SUCCESS + + # Report the outcome + if outcome is SUCCESS: + if not quiet: + self.report_success(out, test, example, got) + elif outcome is FAILURE: + if not quiet: + self.report_failure(out, test, example, got) + failures += 1 + elif outcome is BOOM: + if not quiet: + self.report_unexpected_exception( + out, + test, + example, + exc_info, # type: ignore[arg-type] + ) + failures += 1 + + if failures and self.optionflags & doctest.FAIL_FAST: + break + + # Restore the option flags + self.optionflags = original_optionflags + + # Record and return the number of failures and attempted + self._DocTestRunner__record_outcome( # type: ignore[attr-defined] + test, failures, attempted, skips + ) + # TestResults gained 'skipped' parameter in Python 3.13 + # typeshed may not have the updated signature + try: + return doctest.TestResults( + failures, + attempted, + skipped=skips, # type: ignore[call-arg] + ) + except TypeError: + return doctest.TestResults(failures, attempted) + + +class AsyncDebugRunner(AsyncDocTestRunner): + """AsyncDocTestRunner that raises exceptions on first failure. + + Like doctest.DebugRunner but with async support. + """ + + def run( + self, + test: doctest.DocTest, + compileflags: int | None = None, + out: t.Callable[[str], t.Any] | None = None, + clear_globs: bool = True, + ) -> doctest.TestResults: + """Run with debug behavior - clear_globs handled manually.""" + r = super().run(test, compileflags, out, False) + if clear_globs: + test.globs.clear() + return r + + def report_unexpected_exception( + self, + out: t.Any, + test: doctest.DocTest, + example: doctest.Example, + exc_info: tuple[type[BaseException], BaseException, t.Any], + ) -> None: + """Raise UnexpectedException instead of reporting.""" + raise doctest.UnexpectedException(test, example, exc_info) + + def report_failure( + self, + out: t.Any, + test: doctest.DocTest, + example: doctest.Example, + got: str, + ) -> None: + """Raise DocTestFailure instead of reporting.""" + raise doctest.DocTestFailure(test, example, got) + + class TestDocutilsPackageRelativeError(Exception): """Raise when doctest_docutils is called for package not relative to module.""" @@ -425,7 +796,7 @@ def __init__(self) -> None: ) -def testdocutils( +def run_doctest_docutils( filename: str, module_relative: bool = True, name: str | None = None, @@ -472,12 +843,12 @@ def testdocutils( # Find, parse, and run all tests in the given module. finder = DocutilsDocTestFinder() - runner: doctest.DebugRunner | doctest.DocTestRunner + runner: AsyncDebugRunner | AsyncDocTestRunner if raise_on_error: - runner = doctest.DebugRunner(verbose=verbose, optionflags=optionflags) + runner = AsyncDebugRunner(verbose=verbose, optionflags=optionflags) else: - runner = doctest.DocTestRunner(verbose=verbose, optionflags=optionflags) + runner = AsyncDocTestRunner(verbose=verbose, optionflags=optionflags) for test in finder.find(text, filename, globs=globs, extraglobs=extraglobs): runner.run(test) @@ -564,7 +935,7 @@ def _test() -> int: for filename in testfiles: if filename.endswith((".rst", ".md")) or args.docutils: _ensure_directives_registered() - failures, _ = testdocutils( # type: ignore[misc,unused-ignore] + failures, _ = run_doctest_docutils( # type: ignore[misc,unused-ignore] filename, module_relative=False, verbose=verbose, From c7ea5ca76c6ce99e0d1f84f3bfe25511400e42dc Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 10:01:07 -0600 Subject: [PATCH 03/10] pytest_doctest_docutils(feat[async]): Integrate AsyncDocTestRunner why: Provide async doctest support in pytest plugin what: - Import AsyncDocTestRunner from doctest_docutils - Change PytestDoctestRunner to extend AsyncDocTestRunner - Async support is now transparent for all doctest files --- src/pytest_doctest_docutils.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/pytest_doctest_docutils.py b/src/pytest_doctest_docutils.py index 1fd9a47..55a8636 100644 --- a/src/pytest_doctest_docutils.py +++ b/src/pytest_doctest_docutils.py @@ -23,7 +23,11 @@ from _pytest import outcomes from _pytest.outcomes import OutcomeException -from doctest_docutils import DocutilsDocTestFinder, _ensure_directives_registered +from doctest_docutils import ( + AsyncDocTestRunner, + DocutilsDocTestFinder, + _ensure_directives_registered, +) if t.TYPE_CHECKING: import pathlib @@ -137,11 +141,11 @@ def _is_doctest( def _init_runner_class() -> type[doctest.DocTestRunner]: import doctest - class PytestDoctestRunner(doctest.DebugRunner): - """Runner to collect failures. + class PytestDoctestRunner(AsyncDocTestRunner): + """Runner to collect failures with async support. - Note that the out variable in this case is a list instead of a - stdout-like object. + Extends AsyncDocTestRunner to enable top-level await in doctests. + The out variable in this case is a list instead of a stdout-like object. """ def __init__( From a7d3cdd02dae61c3ab0b55d43e62c7db1d040782 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 10:01:18 -0600 Subject: [PATCH 04/10] tests(conftest): Add asyncio to doctest_namespace fixture why: Make asyncio module available in doctests without explicit import what: - Add autouse doctest_namespace fixture - Inject asyncio module for use in async doctests --- tests/conftest.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 102df50..947497d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from __future__ import annotations +import asyncio import pathlib import typing as t @@ -80,3 +81,14 @@ def fn( return args, kws return fn + + +@pytest.fixture(autouse=True) +def doctest_namespace(doctest_namespace: dict[str, t.Any]) -> dict[str, t.Any]: + """Inject common fixtures into doctest namespace. + + Provides: + - asyncio: The asyncio module for async doctests + """ + doctest_namespace["asyncio"] = asyncio + return doctest_namespace From affd6729756acf476954cd33869e3236c5ce708c Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 10:01:28 -0600 Subject: [PATCH 05/10] tests(feat[async_doctest]): Add comprehensive async doctest tests why: Verify async doctest functionality works correctly what: - Use functional pytest style with NamedTuple parametrization - Add RST and Markdown dual coverage for all test cases - Test basic top-level await with asyncio.sleep - Test async functions returning values - Test mixed sync/async examples in same block - Test state persistence across examples - Test async context managers (async with) - Test async iteration (async for) - Test async comprehensions - Test expected exceptions in async code - Add pytester-based plugin integration tests - Test conftest fixture integration with async - Test failure reporting for async doctests --- tests/test_async_doctest.py | 529 ++++++++++++++++++++++++++++++++++++ 1 file changed, 529 insertions(+) create mode 100644 tests/test_async_doctest.py diff --git a/tests/test_async_doctest.py b/tests/test_async_doctest.py new file mode 100644 index 0000000..39a5f4e --- /dev/null +++ b/tests/test_async_doctest.py @@ -0,0 +1,529 @@ +"""Tests for async doctest support. + +These tests verify that top-level await works in doctests without requiring +asyncio.run() boilerplate. Tests cover both RST and Markdown formats. +""" + +from __future__ import annotations + +import textwrap +import typing as t + +import _pytest.pytester +import pytest + +from doctest_docutils import AsyncDocTestRunner, DocutilsDocTestFinder + +if t.TYPE_CHECKING: + import pathlib + +# Type alias for fixture file content +FixtureFileDict = dict[str, str] + + +# ============================================================================= +# Fixtures: Basic async doctests +# ============================================================================= + + +class AsyncDoctestFixture(t.NamedTuple): + """Test fixture for async doctest functionality.""" + + test_id: str + file_ext: str + content: str + expected_passed: int + + +def _rst_content(body: str) -> str: + """Wrap doctest body in RST format.""" + return f"""\ +Test +==== + +{body} +""" + + +def _md_content(body: str) -> str: + """Wrap doctest body in Markdown format.""" + return f"""\ +# Test + +```python +{body} +``` +""" + + +# Doctest bodies (format-agnostic) +TOP_LEVEL_AWAIT_BODY = """\ +>>> import asyncio +>>> await asyncio.sleep(0) +>>> 1 + 1 +2""" + +ASYNC_RETURN_VALUE_BODY = """\ +>>> import asyncio +>>> async def get_value(): +... await asyncio.sleep(0) +... return 42 +>>> await get_value() +42""" + +MIXED_SYNC_ASYNC_BODY = """\ +>>> x = 1 +>>> import asyncio +>>> await asyncio.sleep(0) +>>> x + 1 +2 +>>> y = await asyncio.sleep(0) or 10 +>>> y +10""" + +STATE_PERSISTENCE_BODY = """\ +>>> import asyncio +>>> async def set_value(): +... global shared_value +... await asyncio.sleep(0) +... shared_value = 'hello' +>>> await set_value() +>>> shared_value +'hello'""" + +ASYNC_CONTEXT_MANAGER_BODY = """\ +>>> import asyncio +>>> class AsyncCM: +... async def __aenter__(self): +... await asyncio.sleep(0) +... return 'entered' +... async def __aexit__(self, *args): +... await asyncio.sleep(0) +>>> async with AsyncCM() as value: +... print(value) +entered""" + +ASYNC_FOR_BODY = """\ +>>> import asyncio +>>> async def async_range(n): +... for i in range(n): +... await asyncio.sleep(0) +... yield i +>>> result = [] +>>> async for x in async_range(3): +... result.append(x) +>>> result +[0, 1, 2]""" + +ASYNC_COMPREHENSION_BODY = """\ +>>> import asyncio +>>> async def async_range(n): +... for i in range(n): +... await asyncio.sleep(0) +... yield i +>>> [x async for x in async_range(3)] +[0, 1, 2]""" + +EXPECTED_EXCEPTION_BODY = """\ +>>> import asyncio +>>> async def raise_error(): +... await asyncio.sleep(0) +... raise ValueError('test error') +>>> await raise_error() +Traceback (most recent call last): + ... +ValueError: test error""" + +SYNC_CODE_BODY = """\ +>>> 1 + 1 +2 +>>> x = 'hello' +>>> x.upper() +'HELLO'""" + + +BASIC_ASYNC_FIXTURES: list[AsyncDoctestFixture] = [ + # Top-level await + AsyncDoctestFixture( + test_id="top-level-await-rst", + file_ext=".rst", + content=_rst_content(TOP_LEVEL_AWAIT_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="top-level-await-md", + file_ext=".md", + content=_md_content(TOP_LEVEL_AWAIT_BODY), + expected_passed=3, + ), + # Async return value + AsyncDoctestFixture( + test_id="async-return-value-rst", + file_ext=".rst", + content=_rst_content(ASYNC_RETURN_VALUE_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="async-return-value-md", + file_ext=".md", + content=_md_content(ASYNC_RETURN_VALUE_BODY), + expected_passed=3, + ), + # Mixed sync/async + AsyncDoctestFixture( + test_id="mixed-sync-async-rst", + file_ext=".rst", + content=_rst_content(MIXED_SYNC_ASYNC_BODY), + expected_passed=6, + ), + AsyncDoctestFixture( + test_id="mixed-sync-async-md", + file_ext=".md", + content=_md_content(MIXED_SYNC_ASYNC_BODY), + expected_passed=6, + ), + # State persistence + AsyncDoctestFixture( + test_id="state-persistence-rst", + file_ext=".rst", + content=_rst_content(STATE_PERSISTENCE_BODY), + expected_passed=4, + ), + AsyncDoctestFixture( + test_id="state-persistence-md", + file_ext=".md", + content=_md_content(STATE_PERSISTENCE_BODY), + expected_passed=4, + ), + # Sync code still works + AsyncDoctestFixture( + test_id="sync-code-rst", + file_ext=".rst", + content=_rst_content(SYNC_CODE_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="sync-code-md", + file_ext=".md", + content=_md_content(SYNC_CODE_BODY), + expected_passed=3, + ), +] + +ADVANCED_ASYNC_FIXTURES: list[AsyncDoctestFixture] = [ + # Async context manager + AsyncDoctestFixture( + test_id="async-context-manager-rst", + file_ext=".rst", + content=_rst_content(ASYNC_CONTEXT_MANAGER_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="async-context-manager-md", + file_ext=".md", + content=_md_content(ASYNC_CONTEXT_MANAGER_BODY), + expected_passed=3, + ), + # Async for + AsyncDoctestFixture( + test_id="async-for-rst", + file_ext=".rst", + content=_rst_content(ASYNC_FOR_BODY), + expected_passed=5, + ), + AsyncDoctestFixture( + test_id="async-for-md", + file_ext=".md", + content=_md_content(ASYNC_FOR_BODY), + expected_passed=5, + ), + # Async comprehension + AsyncDoctestFixture( + test_id="async-comprehension-rst", + file_ext=".rst", + content=_rst_content(ASYNC_COMPREHENSION_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="async-comprehension-md", + file_ext=".md", + content=_md_content(ASYNC_COMPREHENSION_BODY), + expected_passed=3, + ), + # Expected exception + AsyncDoctestFixture( + test_id="expected-exception-rst", + file_ext=".rst", + content=_rst_content(EXPECTED_EXCEPTION_BODY), + expected_passed=3, + ), + AsyncDoctestFixture( + test_id="expected-exception-md", + file_ext=".md", + content=_md_content(EXPECTED_EXCEPTION_BODY), + expected_passed=3, + ), +] + + +# ============================================================================= +# Fixtures: Plugin integration tests +# ============================================================================= + + +class AsyncPluginFixture(t.NamedTuple): + """Test fixture for async doctest pytest plugin integration.""" + + test_id: str + file_ext: str + content: str + expected_passed: int + + +PLUGIN_FIXTURES: list[AsyncPluginFixture] = [ + AsyncPluginFixture( + test_id="plugin-async-rst", + file_ext=".rst", + content=_rst_content(TOP_LEVEL_AWAIT_BODY), + expected_passed=1, # One doctest file + ), + AsyncPluginFixture( + test_id="plugin-async-md", + file_ext=".md", + content=_md_content(TOP_LEVEL_AWAIT_BODY), + expected_passed=1, + ), +] + + +# ============================================================================= +# Tests: Basic async doctests using run_doctest_docutils +# ============================================================================= + + +@pytest.mark.parametrize( + AsyncDoctestFixture._fields, + BASIC_ASYNC_FIXTURES, + ids=[f.test_id for f in BASIC_ASYNC_FIXTURES], +) +def test_async_doctest_basic( + tmp_path: pathlib.Path, + test_id: str, + file_ext: str, + content: str, + expected_passed: int, +) -> None: + """Test basic async doctest functionality with top-level await.""" + doc = tmp_path / f"test{file_ext}" + doc.write_text(content) + + finder = DocutilsDocTestFinder() + runner = AsyncDocTestRunner(verbose=False) + + text = doc.read_text() + total_attempted = 0 + total_failed = 0 + + for test in finder.find(text, str(doc)): + result = runner.run(test) + total_attempted += result.attempted + total_failed += result.failed + + assert total_failed == 0, f"Expected no failures, got {total_failed}" + assert total_attempted == expected_passed, ( + f"Expected {expected_passed} examples, got {total_attempted}" + ) + + +@pytest.mark.parametrize( + AsyncDoctestFixture._fields, + ADVANCED_ASYNC_FIXTURES, + ids=[f.test_id for f in ADVANCED_ASYNC_FIXTURES], +) +def test_async_doctest_advanced( + tmp_path: pathlib.Path, + test_id: str, + file_ext: str, + content: str, + expected_passed: int, +) -> None: + """Test advanced async doctest scenarios.""" + doc = tmp_path / f"test{file_ext}" + doc.write_text(content) + + finder = DocutilsDocTestFinder() + runner = AsyncDocTestRunner(verbose=False) + + text = doc.read_text() + total_attempted = 0 + total_failed = 0 + + for test in finder.find(text, str(doc)): + result = runner.run(test) + total_attempted += result.attempted + total_failed += result.failed + + assert total_failed == 0, f"Expected no failures, got {total_failed}" + assert total_attempted == expected_passed, ( + f"Expected {expected_passed} examples, got {total_attempted}" + ) + + +# ============================================================================= +# Tests: Plugin integration using pytester +# ============================================================================= + + +@pytest.mark.parametrize( + AsyncPluginFixture._fields, + PLUGIN_FIXTURES, + ids=[f.test_id for f in PLUGIN_FIXTURES], +) +def test_async_doctest_plugin( + pytester: _pytest.pytester.Pytester, + test_id: str, + file_ext: str, + content: str, + expected_passed: int, +) -> None: + """Test async doctest collection and execution via pytest plugin.""" + pytester.plugins = ["pytest_doctest_docutils"] + pytester.makefile( + ".ini", + pytest=textwrap.dedent(""" + [pytest] + addopts = --doctest-glob=*.rst --doctest-glob=*.md + """), + ) + + # Create test file + test_file = pytester.path / f"test{file_ext}" + test_file.write_text(content) + + result = pytester.runpytest(str(test_file)) + result.assert_outcomes(passed=expected_passed) + + +def test_async_doctest_plugin_with_conftest( + pytester: _pytest.pytester.Pytester, +) -> None: + """Test async doctest with conftest fixtures via pytest plugin.""" + pytester.plugins = ["pytest_doctest_docutils"] + pytester.makefile( + ".ini", + pytest=textwrap.dedent(""" + [pytest] + addopts = --doctest-glob=*.rst --doctest-glob=*.md + """), + ) + + # Create conftest with doctest_namespace fixture + pytester.makeconftest( + textwrap.dedent(""" + import asyncio + import pytest + + @pytest.fixture(autouse=True) + def add_doctest_fixtures(doctest_namespace): + async def async_add(a, b): + await asyncio.sleep(0) + return a + b + doctest_namespace["async_add"] = async_add + doctest_namespace["asyncio"] = asyncio + """), + ) + + # Create test file using the fixture + test_content = _rst_content("""\ +>>> result = await async_add(2, 3) +>>> result +5""") + + test_file = pytester.path / "test.rst" + test_file.write_text(test_content) + + result = pytester.runpytest(str(test_file)) + result.assert_outcomes(passed=1) + + +def test_async_doctest_plugin_failure_reporting( + pytester: _pytest.pytester.Pytester, +) -> None: + """Test that async doctest failures are properly reported.""" + pytester.plugins = ["pytest_doctest_docutils"] + pytester.makefile( + ".ini", + pytest=textwrap.dedent(""" + [pytest] + addopts = --doctest-glob=*.rst + """), + ) + + # Create test file with intentional failure + test_content = _rst_content("""\ +>>> import asyncio +>>> async def wrong(): +... await asyncio.sleep(0) +... return 999 +>>> await wrong() +42""") + + test_file = pytester.path / "test.rst" + test_file.write_text(test_content) + + result = pytester.runpytest(str(test_file)) + result.assert_outcomes(failed=1) + + +# ============================================================================= +# Tests: Direct AsyncDocTestRunner usage +# ============================================================================= + + +def test_async_runner_state_persists(tmp_path: pathlib.Path) -> None: + """Test that state persists across async examples in same DocTest block.""" + doc = tmp_path / "test.rst" + doc.write_text("""\ +>>> import asyncio +>>> counter = 0 +>>> async def increment(): +... global counter +... await asyncio.sleep(0) +... counter += 1 +>>> await increment() +>>> await increment() +>>> counter +2 +""") + + finder = DocutilsDocTestFinder() + runner = AsyncDocTestRunner(verbose=False) + + text = doc.read_text() + for test in finder.find(text, str(doc)): + result = runner.run(test) + assert result.failed == 0, "State should persist across examples" + + +def test_async_runner_handles_sync_and_async(tmp_path: pathlib.Path) -> None: + """Test that sync and async code both work correctly.""" + doc = tmp_path / "test.rst" + doc.write_text("""\ +>>> sync_value = 'hello' +>>> import asyncio +>>> async def async_upper(s): +... await asyncio.sleep(0) +... return s.upper() +>>> await async_upper(sync_value) +'HELLO' +>>> sync_value +'hello' +""") + + finder = DocutilsDocTestFinder() + runner = AsyncDocTestRunner(verbose=False) + + text = doc.read_text() + for test in finder.find(text, str(doc)): + result = runner.run(test) + assert result.failed == 0, "Both sync and async should work" From cb9747532695e879c8a076bdef9700c18431748e Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 10:19:23 -0600 Subject: [PATCH 06/10] docs(CHANGES): Add async doctest support release notes for #59 why: Document new features and breaking changes for upcoming release what: - Add AsyncDocTestRunner feature with before/after code examples - Note pytest plugin integration - Document testdocutils -> run_doctest_docutils rename as breaking change --- CHANGES | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/CHANGES b/CHANGES index d97d696..4a88ad3 100644 --- a/CHANGES +++ b/CHANGES @@ -30,6 +30,47 @@ $ uvx --from 'gp-libs' --prerelease allow gp-libs +### Features + +#### doctest_docutils + +- Add `AsyncDocTestRunner` for transparent top-level await support in doctests (#59) + + Enables clean async doctests without `asyncio.run()` boilerplate: + + ```python + # Before (required boilerplate) + >>> async def example(): + ... await asyncio.sleep(0) + ... return 42 + >>> asyncio.run(example()) + 42 + + # After (top-level await) + >>> await asyncio.sleep(0) + >>> async def fetch(): + ... return 42 + >>> await fetch() + 42 + ``` + + Uses Python's `PyCF_ALLOW_TOP_LEVEL_AWAIT` compile flag (3.8+) with automatic + detection - sync doctests work unchanged, async doctests "just work." + + Includes `_Runner310` shim for Python 3.10 compatibility (asyncio.Runner is 3.11+). + +#### pytest_doctest_docutils + +- Integrate `AsyncDocTestRunner` for async doctest support in pytest (#59) + +### Breaking changes + +#### doctest_docutils + +- Rename `testdocutils()` → `run_doctest_docutils()` (#59) + + The function was renamed to avoid pytest automatically collecting it as a test. + ### Development - Migrate from Make to just for task running (#60) From 739bfcc56eccb30212ed390a60960fc79a2b4125 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 11:22:00 -0600 Subject: [PATCH 07/10] docs(docstrings): Add NumPy-style docstrings to async support code why: Comply with project coding standards requiring NumPy docstrings what: - Add Parameters/Returns sections to _rst_content, _md_content helpers - Add docstrings to _Runner310.__init__, __enter__, __exit__ methods --- src/doctest_docutils.py | 29 +++++++++++++++++++++++++++++ tests/test_async_doctest.py | 26 ++++++++++++++++++++++++-- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/src/doctest_docutils.py b/src/doctest_docutils.py index 8eb814e..5430014 100644 --- a/src/doctest_docutils.py +++ b/src/doctest_docutils.py @@ -56,11 +56,27 @@ def __init__( debug: bool | None = None, loop_factory: t.Callable[[], asyncio.AbstractEventLoop] | None = None, ) -> None: + """Initialize the async runner. + + Parameters + ---------- + debug : bool | None, optional + Enable event loop debug mode, by default None + loop_factory : Callable[[], AbstractEventLoop] | None, optional + Factory function to create custom event loops, by default None + """ self._debug = debug self._loop_factory = loop_factory self._loop: asyncio.AbstractEventLoop | None = None def __enter__(self) -> _Runner310: + """Enter the context and create the event loop. + + Returns + ------- + _Runner310 + Self, for use in with-statement + """ if self._loop_factory is None: loop = asyncio.new_event_loop() else: @@ -84,6 +100,19 @@ def __exit__( exc_val: BaseException | None, exc_tb: t.Any, ) -> None: + """Exit the context and clean up the event loop. + + Cancels pending tasks, shuts down async generators, and closes the loop. + + Parameters + ---------- + exc_type : type[BaseException] | None + Exception type if an exception was raised + exc_val : BaseException | None + Exception instance if an exception was raised + exc_tb : Any + Traceback if an exception was raised + """ loop = self._loop if loop is None: return diff --git a/tests/test_async_doctest.py b/tests/test_async_doctest.py index 39a5f4e..4ce5da0 100644 --- a/tests/test_async_doctest.py +++ b/tests/test_async_doctest.py @@ -36,7 +36,18 @@ class AsyncDoctestFixture(t.NamedTuple): def _rst_content(body: str) -> str: - """Wrap doctest body in RST format.""" + """Wrap doctest body in RST format. + + Parameters + ---------- + body : str + The doctest example code to embed + + Returns + ------- + str + RST document with title and doctest body + """ return f"""\ Test ==== @@ -46,7 +57,18 @@ def _rst_content(body: str) -> str: def _md_content(body: str) -> str: - """Wrap doctest body in Markdown format.""" + """Wrap doctest body in Markdown format. + + Parameters + ---------- + body : str + The doctest example code to embed + + Returns + ------- + str + Markdown document with heading and fenced code block + """ return f"""\ # Test From d4926bb8496b409d045e84de221ec94a60bb24f0 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 12:07:20 -0600 Subject: [PATCH 08/10] docs(doctest_docutils[_make_runner]): Add NumPy-style Parameters/Returns why: Comply with project docstring standards what: - Add Parameters section documenting debug and loop_factory - Add Returns section documenting return type --- src/doctest_docutils.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/doctest_docutils.py b/src/doctest_docutils.py index 5430014..fb36311 100644 --- a/src/doctest_docutils.py +++ b/src/doctest_docutils.py @@ -144,6 +144,18 @@ def _make_runner( Returns asyncio.Runner on Python 3.11+, or _Runner310 shim on 3.10. Both have compatible interfaces (context manager with run() method). + + Parameters + ---------- + debug : bool | None, optional + Enable event loop debug mode, by default None + loop_factory : Callable[[], AbstractEventLoop] | None, optional + Factory function to create custom event loops, by default None + + Returns + ------- + _Runner310 + Context manager with run() method for executing coroutines """ Runner = getattr(asyncio, "Runner", None) if Runner is not None: From fecd38d9c87f8c5a9a664e269cf1a80a54a6d7da Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 12:08:12 -0600 Subject: [PATCH 09/10] docs(doctest_docutils[AsyncDocTestRunner]): Add doctest example why: Comply with project requirement for working doctests what: - Add Examples section demonstrating basic async doctest usage - Shows top-level await working transparently --- src/doctest_docutils.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/doctest_docutils.py b/src/doctest_docutils.py index fb36311..cab5dad 100644 --- a/src/doctest_docutils.py +++ b/src/doctest_docutils.py @@ -600,6 +600,21 @@ class AsyncDocTestRunner(doctest.DocTestRunner): state to persist across examples within the same block. Usage is identical to doctest.DocTestRunner - async support is automatic. + + Examples + -------- + >>> import doctest + >>> from doctest_docutils import AsyncDocTestRunner + >>> runner = AsyncDocTestRunner(verbose=False) + >>> test = doctest.DocTest( + ... [doctest.Example("import asyncio", ""), + ... doctest.Example("await asyncio.sleep(0)", ""), + ... doctest.Example("1 + 1", "2\\n")], + ... {}, "example", "example.py", 0, None + ... ) + >>> result = runner.run(test) + >>> result.failed + 0 """ def run( From a903a4482f5068a149b497cc5b8c3f97a9a20148 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Mon, 29 Dec 2025 12:08:38 -0600 Subject: [PATCH 10/10] ai(rules[AGENTS]): Update async doctest pattern to top-level await why: Document the new capability enabled by AsyncDocTestRunner what: - Replace asyncio.run() boilerplate with top-level await syntax - Show that await works directly in doctests now --- AGENTS.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 0dbd7c3..e926fa6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -248,13 +248,14 @@ True >>> is_allowed_version('3.3', '>3.2, <4.0') True -**Async doctest pattern:** +**Async doctest pattern (top-level await):** ```python +>>> import asyncio +>>> await asyncio.sleep(0) # Top-level await works directly >>> async def example(): -... result = await some_async_function() -... return result ->>> asyncio.run(example()) -'expected output' +... return 42 +>>> await example() +42 ``` **Using fixtures in doctests:**