Skip to content

[Sphinx Extension] Add offline build support #15

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Apr 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 42 additions & 4 deletions builder/build_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,43 @@
import sys
import requests
import json
import time

# Automatically watch the following extra directories when --serve is used.
EXTRA_WATCH_DIRS = ["exts", "themes"]

SPEC_CHECKSUM_URL = "https://spec.ferrocene.dev/paragraph-ids.json"
SPEC_LOCKFILE = "spec.lock"

def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
def build_docs(
root: Path,
builder: str,
clear: bool,
serve: bool,
debug: bool,
offline: bool,
spec_lock_consistency_check: bool
) -> Path:
"""
Builds the Sphinx documentation with the specified options.

Args:
root: The root directory of the documentation.
builder: The builder to use (e.g., 'html', 'xml').
clear: Whether to disable incremental builds.
serve: Whether to start a local server with live reload.
debug: Whether to enable debug mode.
offline: Whether to build in offline mode.
spec_lock_consistency_check: Whether to check spec lock consistency.

Returns:
Path: The path to the generated documentation.
"""

dest = root / "build"

args = ["-b", builder, "-d", dest / "doctrees"]

if debug:
# Disable parallel builds and show exceptions in debug mode.
#
Expand All @@ -42,6 +68,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
# Add configuration options as needed
if not spec_lock_consistency_check:
conf_opt_values.append("enable_spec_lock_consistency=0")
if offline:
conf_opt_values.append("offline=1")
# Only add the --define argument if there are options to define
if conf_opt_values:
args.append("--define")
Expand All @@ -58,6 +86,9 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
args += ["-W", "--keep-going"]

try:

# Tracking build time
timer_start = time.perf_counter()
subprocess.run(
[
"sphinx-autobuild" if serve else "sphinx-build",
Expand All @@ -73,6 +104,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
print("\nhint: if you see an exception, pass --debug to see the full traceback")
exit(1)

timer_end = time.perf_counter()
print(f"\nBuild finished in {timer_end - timer_start:.2f} seconds.")
return dest / builder

def update_spec_lockfile(spec_checksum_location, lockfile_location):
Expand Down Expand Up @@ -110,16 +143,21 @@ def main(root):
parser.add_argument(
"-c", "--clear", help="disable incremental builds", action="store_true"
)
parser.add_argument(
"--offline",
help="build in offline mode",
action="store_true",
)
group = parser.add_mutually_exclusive_group()
parser.add_argument(
"--ignore-spec-lock-diff",
help="ignore fls.lock file differences with live release -- for WIP branches only",
help="ignore spec.lock file differences with live release -- for WIP branches only",
default=False,
action="store_true"
)
parser.add_argument(
"--update-spec-lock-file",
help="update fls.lock file",
help="update spec.lock file",
action="store_true"
)
group.add_argument(
Expand All @@ -145,6 +183,6 @@ def main(root):
update_spec_lockfile(SPEC_CHECKSUM_URL, root / "src" / SPEC_LOCKFILE)

rendered = build_docs(
root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, not args.ignore_spec_lock_diff
root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, args.offline, not args.ignore_spec_lock_diff
)

2 changes: 1 addition & 1 deletion exts/coding_guidelines/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Coverage of the coding guidlines over the FLS is calculated.
Each coding guideline has its ``:fls:`` option turned into a hyperlink to the corresponding element
within the FLS to be able to navigate there directly.

Further an ``fls.lock`` file located at ``root/src/fls.lock`` is validated against the currently
Further an ``spec.lock`` file located at ``root/src/spec.lock`` is validated against the currently
deployed version of the Ferrocene Language Spec and the build is failed if there is discrepency.

Links to the Rust standard library
Expand Down
5 changes: 5 additions & 0 deletions exts/coding_guidelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ def merge_domaindata(self, docnames, other):
def setup(app):

app.add_domain(CodingGuidelinesDomain)
app.add_config_value(
name = "offline",
default=False,
rebuild= "env"
) # register the offline option
app.add_config_value(
name="spec_std_docs_url",
default="https://doc.rust-lang.org/stable/std",
Expand Down
77 changes: 45 additions & 32 deletions exts/coding_guidelines/fls_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,28 +20,27 @@ def check_fls(app, env):
"""Main checking function for FLS validation"""
# First make sure all guidelines have correctly formatted FLS IDs
check_fls_exists_and_valid_format(app, env)
offline_mode = env.config.offline

# Gather all FLS paragraph IDs from the specification and get the raw JSON
fls_ids, raw_json_data = gather_fls_paragraph_ids(fls_paragraph_ids_url)

fls_ids, raw_json_data = gather_fls_paragraph_ids(app, fls_paragraph_ids_url)
# Error out if we couldn't get the raw JSON data
if not raw_json_data:
error_message = f"Failed to retrieve or parse the FLS specification from {fls_paragraph_ids_url}"
logger.error(error_message)
raise FLSValidationError(error_message)

# Check for differences against lock file
has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data)
if has_differences:
error_message = "The FLS specification has changed since the lock file was created:\n"
for diff in differences:
error_message += f" - {diff}\n"
error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes."
error_message += "\nOnce resolved, you may run the following to update the local spec lock file:"
error_message += "\n\t./make.py --update-spec-lock-file"
logger.error(error_message)
raise FLSValidationError(error_message)

raise FLSValidationError(error_message)
if not offline_mode: # in offline mode, ignore checking against the lock file
# Check for differences against lock file
has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data)
if has_differences:
error_message = "The FLS specification has changed since the lock file was created:\n"
for diff in differences:
error_message += f" - {diff}\n"
error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes."
error_message += "\nOnce resolved, you may run the following to update the local spec lock file:"
error_message += "\n\t./make.py --update-spec-lock-file"
logger.error(error_message)
raise FLSValidationError(error_message)
# Check if all referenced FLS IDs exist
check_fls_ids_correct(app, env, fls_ids)

Expand Down Expand Up @@ -154,37 +153,51 @@ def check_fls_ids_correct(app, env, fls_ids):
logger.info("All FLS references in guidelines are valid")


def gather_fls_paragraph_ids(json_url):
def gather_fls_paragraph_ids(app, json_url):
"""
Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file,
including both container section IDs and individual paragraph IDs.
Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file
or from the lock file in offline mode, including both container section IDs and individual paragraph IDs.

Args:
app: The Sphinx application
json_url: The URL or path to the paragraph-ids.json file

Returns:
Dictionary mapping paragraph IDs to metadata AND the complete raw JSON data
"""
logger.info("Gathering FLS paragraph IDs from %s", json_url)
offline = app.config.offline
lock_path = app.confdir / 'spec.lock'

# Dictionary to store all FLS IDs and their metadata
all_fls_ids = {}
raw_json_data = None

try:
# Load the JSON file
response = requests.get(json_url)
response.raise_for_status() # Raise exception for HTTP errors

# Parse the JSON data
try:
raw_json_data = response.json()
data = raw_json_data # Keep reference to the original data
logger.debug("Successfully parsed JSON data")
except json.JSONDecodeError as e:
logger.error(f"Failed to parse JSON: {e}")
logger.debug(f"Response content preview: {response.text[:500]}...")
raise
if not offline:
logger.info("Gathering FLS paragraph IDs from %s", json_url)
response = requests.get(json_url)
response.raise_for_status() # Raise exception for HTTP errors
# Parse the JSON data
try:
raw_json_data = response.json()
data = raw_json_data # Keep reference to the original data
logger.debug("Successfully parsed JSON data")
except json.JSONDecodeError as e:
logger.error(f"Failed to parse JSON: {e}")
logger.debug(f"Response content preview: {response.text[:500]}...")
raise

else : # if online mode is on read from the lock file

if not lock_path.exists():
logger.warning(f"No FLS lock file found at {lock_path}") # TODO: returns an error
return False, []
logger.info("Gathering FLS paragraph IDs from lock file: %s", lock_path)
with open(lock_path, 'r', encoding='utf-8') as f:
raw_json_data=f.read()
data = json.loads(raw_json_data)


# Check if we have the expected document structure
if 'documents' not in data:
Expand Down
2 changes: 1 addition & 1 deletion exts/coding_guidelines/fls_linking.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def load_fls_ids(app):
"""Load FLS IDs and their URLs."""
try:
from . import fls_checks
fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app.config.fls_paragraph_ids_url)
fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app, app.config.fls_paragraph_ids_url )
return {fls_id: data['url'] for fls_id, data in fls_ids.items()}
except Exception as e:
logger.error(f"Failed to load FLS IDs: {e}")
Expand Down