diff --git a/.gitignore b/.gitignore index e381476..8f67b1d 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ target src/security/alerts*.json src/security/.coverage .coverage +output.txt diff --git a/src/security/utils/issue_sync.py b/src/security/utils/issue_sync.py index 21429b9..e222546 100644 --- a/src/security/utils/issue_sync.py +++ b/src/security/utils/issue_sync.py @@ -33,6 +33,7 @@ gh_issue_edit_body, gh_issue_edit_state, gh_issue_edit_title, + gh_issue_get_sub_issue_numbers, ) from shared.github_projects import ProjectPrioritySync, gh_project_get_priority_field from shared.models import Issue @@ -119,11 +120,16 @@ def maybe_reopen_parent_issue( if dry_run: logging.info( - f"DRY-RUN: would reopen parent issue #{parent_issue.number} (rule_id={rule_id}) " - f"due_to={context} child={child_issue_number or ''}".rstrip() + "DRY-RUN: would reopen parent issue #%d (rule_id=%s) due_to=%s child=%s", + parent_issue.number, + rule_id, + context, + child_issue_number or "", ) logging.info( - f"DRY-RUN: would comment parent reopen sec-event on issue #{parent_issue.number} (rule_id={rule_id})" + "DRY-RUN: would comment parent reopen sec-event on issue #%d (rule_id=%s)", + parent_issue.number, + rule_id, ) parent_issue.state = "open" return @@ -182,21 +188,25 @@ def _close_resolved_parent_issues( if not repo and child_issues: repo = load_secmeta(child_issues[0].body).get("repo", "").strip() if not repo: - logging.debug(f"Skip closing parent issue #{parent_issue.number}: no repo in secmeta") + logging.debug("Skip closing parent issue #%d: no repo in secmeta", parent_issue.number) continue if dry_run: logging.info( - f"DRY-RUN: would close parent issue #{parent_issue.number} (rule_id={rule_id}) " - f"because all {len(child_issues)} child issue(s) are closed" + "DRY-RUN: would close parent issue #%d (rule_id=%s) because all %d child issue(s) are closed", + parent_issue.number, + rule_id, + len(child_issues), ) parent_issue.state = "closed" continue if gh_issue_edit_state(repo, parent_issue.number, "closed"): logging.info( - f"Closed parent issue #{parent_issue.number} (rule_id={rule_id}) " - f"because all {len(child_issues)} child issue(s) are closed" + "Closed parent issue #%d (rule_id=%s) because all %d child issue(s) are closed", + parent_issue.number, + rule_id, + len(child_issues), ) parent_issue.state = "closed" @@ -242,8 +252,11 @@ def ensure_parent_issue( ) if dry_run: logging.info( - f"DRY-RUN: severity change on parent #{existing.number} " - f"(rule_id={rule_id}): {existing_severity_cmp} \u2192 {incoming_severity_cmp}" + "DRY-RUN: severity change on parent #%d (rule_id=%s): %s \u2192 %s", + existing.number, + rule_id, + existing_severity_cmp, + incoming_severity_cmp, ) if severity_changes is not None: severity_changes.append(change) @@ -287,8 +300,10 @@ def ensure_parent_issue( if expected_title != (existing.title or ""): if dry_run: logging.info( - f"DRY-RUN: would update parent issue #{existing.number} title " - f"from {existing.title!r} to {expected_title!r}" + "DRY-RUN: would update parent issue #%d title from %r to %r", + existing.number, + existing.title, + expected_title, ) else: if gh_issue_edit_title(repo_full, existing.number, expected_title): @@ -303,7 +318,7 @@ def ensure_parent_issue( body = build_parent_issue_body(alert) labels = [LABEL_SCOPE_SECURITY, LABEL_TYPE_TECH_DEBT, LABEL_EPIC] if dry_run: - logging.info(f"DRY-RUN: create parent rule_id={rule_id} title={title!r} labels={labels}") + logging.info("DRY-RUN: create parent rule_id=%s title=%r labels=%s", rule_id, title, labels) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("DRY-RUN: body_preview_begin") logging.debug(body) @@ -316,7 +331,7 @@ def ensure_parent_issue( # Parent lifecycle event (human visible): opened/created. if dry_run: - logging.info(f"DRY-RUN: would comment parent open sec-event on issue #{num} (rule_id={rule_id})") + logging.info("DRY-RUN: would comment parent open sec-event on issue #%d (rule_id=%s)", num, rule_id) else: gh_issue_comment( repo_full, @@ -335,7 +350,7 @@ def ensure_parent_issue( created = Issue(number=num, state="open", title=title, body=body) issues[num] = created index.parent_by_rule_id[rule_id] = created - logging.info(f"Created parent issue #{num} for rule_id={rule_id}") + logging.info("Created parent issue #%d for rule_id=%s", num, rule_id) if priority_sync is not None: priority_sync.enqueue( @@ -411,20 +426,33 @@ def _handle_new_child_issue( loc = f"{ctx.path}:{ctx.start_line or ''}".rstrip(":") commit_short = ctx.commit_sha[:8] if ctx.commit_sha else "" logging.info( - "DRY-RUN: create child " - f"alert={ctx.alert_number} rule_id={ctx.rule_id} sev={ctx.severity}" - f" fp={ctx.fingerprint[:8]} tool={ctx.tool} commit={commit_short}" - f" loc={loc} title={title!r} labels=[{','.join(labels)}]" - f" | secmeta:first_seen={ctx.first_seen} last_seen={ctx.last_seen}" - f" occurrence_count=1 gh_alert_numbers=[{ctx.alert_number}]" + "DRY-RUN: create child alert=%d rule_id=%s sev=%s" + " fp=%s tool=%s commit=%s loc=%s title=%r labels=[%s]" + " | secmeta:first_seen=%s last_seen=%s occurrence_count=1 gh_alert_numbers=[%d]", + ctx.alert_number, + ctx.rule_id, + ctx.severity, + ctx.fingerprint[:8], + ctx.tool, + commit_short, + loc, + title, + ",".join(labels), + ctx.first_seen, + ctx.last_seen, + ctx.alert_number, ) if parent_issue is None and ctx.rule_id: logging.info( - f"DRY-RUN: add sub-issue link parent_rule_id={ctx.rule_id} child=(new) alert={ctx.alert_number}" + "DRY-RUN: add sub-issue link parent_rule_id=%s child=(new) alert=%d", + ctx.rule_id, + ctx.alert_number, ) elif parent_issue is not None: logging.info( - f"DRY-RUN: add sub-issue link parent=#{parent_issue.number} child=(new) alert={ctx.alert_number}" + "DRY-RUN: add sub-issue link parent=#%d child=(new) alert=%d", + parent_issue.number, + ctx.alert_number, ) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("DRY-RUN: body_preview_begin") @@ -448,7 +476,7 @@ def _handle_new_child_issue( if num is None: return - logging.info(f"Created issue #{num} for alert {ctx.alert_number} (fp={ctx.fingerprint[:8]})") + logging.info("Created issue #%d for alert %d (fp=%s)", num, ctx.alert_number, ctx.fingerprint[:8]) created = Issue(number=num, state="open", title=title, body=body) sync.issues[num] = created sync.index.by_fingerprint[ctx.fingerprint] = created @@ -472,7 +500,7 @@ def _handle_new_child_issue( context="new_child", child_issue_number=num, ) - logging.info(f"Add sub-issue link parent=#{parent_issue.number} child=#{num} (alert {ctx.alert_number})") + logging.info("Add sub-issue link parent=#%d child=#%d (alert %d)", parent_issue.number, num, ctx.alert_number) gh_issue_add_sub_issue_by_number(ctx.repo, parent_issue.number, num) gh_issue_comment( @@ -514,10 +542,10 @@ def _maybe_reopen_child( reopened = False if sync.dry_run: reopened = True - logging.info(f"DRY-RUN: would reopen issue #{issue.number} (alert {ctx.alert_number})") + logging.info("DRY-RUN: would reopen issue #%d (alert %d)", issue.number, ctx.alert_number) elif gh_issue_edit_state(ctx.repo, issue.number, "open"): reopened = True - logging.info(f"Reopened issue #{issue.number} (alert {ctx.alert_number})") + logging.info("Reopened issue #%d (alert %d)", issue.number, ctx.alert_number) if reopened: maybe_reopen_parent_issue( @@ -613,7 +641,7 @@ def _rebuild_and_apply_child_body( if new_body != issue.body: if sync.dry_run: - logging.info(f"DRY-RUN: would update issue #{issue.number} body to template (alert {ctx.alert_number})") + logging.info("DRY-RUN: would update issue #%d body to template (alert %d)", issue.number, ctx.alert_number) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("DRY-RUN: body_preview_begin") logging.debug(new_body) @@ -633,7 +661,7 @@ def _comment_child_event( """Post a reopen sec-event comment on the child issue.""" if reopened: if sync.dry_run: - logging.info(f"DRY-RUN: would comment reopen event on issue #{issue.number} (alert {ctx.alert_number})") + logging.info("DRY-RUN: would comment reopen event on issue #%d (alert %d)", issue.number, ctx.alert_number) else: gh_issue_comment( ctx.repo, @@ -660,7 +688,10 @@ def _sync_child_title_and_labels( if expected_title != (issue.title or ""): if sync.dry_run: logging.info( - f"DRY-RUN: would update issue #{issue.number} title " f"from {issue.title!r} to {expected_title!r}" + "DRY-RUN: would update issue #%d title from %r to %r", + issue.number, + issue.title, + expected_title, ) else: if gh_issue_edit_title(ctx.repo, issue.number, expected_title): @@ -668,8 +699,10 @@ def _sync_child_title_and_labels( if sync.dry_run: logging.info( - f"DRY-RUN: would ensure labels on issue #{issue.number}: " - f"[{LABEL_SCOPE_SECURITY}, {LABEL_TYPE_TECH_DEBT}]" + "DRY-RUN: would ensure labels on issue #%d: [%s, %s]", + issue.number, + LABEL_SCOPE_SECURITY, + LABEL_TYPE_TECH_DEBT, ) else: gh_issue_add_labels(ctx.repo, issue.number, [LABEL_SCOPE_SECURITY, LABEL_TYPE_TECH_DEBT]) @@ -678,6 +711,41 @@ def _sync_child_title_and_labels( sync.priority_sync.enqueue(ctx.repo, issue.number, ctx.severity, sync.severity_priority_map) +def _ensure_child_linked_to_parent( + *, + ctx: AlertContext, + sync: SyncContext, + issue: Issue, + parent_issue: Issue, +) -> None: + """Detect and repair a missing parent-to-child sub-issue link.""" + cache = sync.parent_sub_issues_cache + if parent_issue.number not in cache: + cache[parent_issue.number] = gh_issue_get_sub_issue_numbers(ctx.repo, parent_issue.number) + + if issue.number in cache[parent_issue.number]: + return + + if sync.dry_run: + logging.info( + "DRY-RUN: would add missing sub-issue link parent=#%d child=#%d (rule_id=%s)", + parent_issue.number, + issue.number, + ctx.rule_id, + ) + cache[parent_issue.number].add(issue.number) + return + + logging.info( + "Adding missing sub-issue link parent=#%d child=#%d (rule_id=%s)", + parent_issue.number, + issue.number, + ctx.rule_id, + ) + if gh_issue_add_sub_issue_by_number(ctx.repo, parent_issue.number, issue.number): + cache[parent_issue.number].add(issue.number) + + def _handle_existing_child_issue( *, ctx: AlertContext, @@ -695,16 +763,14 @@ def _handle_existing_child_issue( _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=reopened) _sync_child_title_and_labels(ctx=ctx, sync=sync, issue=issue) + if parent_issue is not None: + _ensure_child_linked_to_parent(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) + def ensure_issue( alert: Alert, - issues: dict[int, Issue], - index: IssueIndex, + sync: SyncContext, *, - dry_run: bool = False, - notifications: list[NotifiedIssue] | None = None, - severity_priority_map: dict[str, str] | None = None, - priority_sync: ProjectPrioritySync | None = None, severity_changes: list[SeverityChange] | None = None, parent_original_bodies: dict[int, tuple[str, str]] | None = None, ) -> None: @@ -715,7 +781,7 @@ def ensure_issue( if alert_state and alert_state != "open": # This script is designed to process open alerts only! # Input is typically produced by collect_alert.py with --state open (default). - logging.debug(f"Skip alert {alert_number}: state={alert_state!r} (only 'open' processed)") + logging.debug("Skip alert %d: state=%r (only 'open' processed)", alert_number, alert_state) return rule_id = alert.metadata.rule_id @@ -740,20 +806,18 @@ def ensure_issue( first_seen = iso_date(alert.metadata.created_at) last_seen = iso_date(alert.metadata.updated_at) - _spm = severity_priority_map or {} - parent_issue = ensure_parent_issue( alert, - issues, - index, - dry_run=dry_run, - severity_priority_map=_spm, - priority_sync=priority_sync, + sync.issues, + sync.index, + dry_run=sync.dry_run, + severity_priority_map=sync.severity_priority_map, + priority_sync=sync.priority_sync, severity_changes=severity_changes, parent_original_bodies=parent_original_bodies, ) matched = find_issue_in_index( - index, + sync.index, fingerprint=fingerprint, ) @@ -775,20 +839,12 @@ def ensure_issue( end_line=end_line, commit_sha=commit_sha, ) - sync_ctx = SyncContext( - issues=issues, - index=index, - dry_run=dry_run, - notifications=notifications, - severity_priority_map=_spm, - priority_sync=priority_sync, - ) if matched is None: - _handle_new_child_issue(ctx=ctx, sync=sync_ctx, parent_issue=parent_issue) + _handle_new_child_issue(ctx=ctx, sync=sync, parent_issue=parent_issue) return - _handle_existing_child_issue(ctx=ctx, sync=sync_ctx, issue=matched, parent_issue=parent_issue) + _handle_existing_child_issue(ctx=ctx, sync=sync, issue=matched, parent_issue=parent_issue) def _init_priority_sync( @@ -816,7 +872,7 @@ def _init_priority_sync( pf = gh_project_get_priority_field(org, project_number) if pf is None: - logging.warning(f"Could not load project #{project_number} metadata – priority sync disabled") + logging.warning("Could not load project #%d metadata – priority sync disabled", project_number) return None return ProjectPrioritySync(org, project_number, pf, dry_run=dry_run) @@ -835,7 +891,7 @@ def _flush_parent_body_updates( continue if issue.body != original_body: if dry_run: - logging.info(f"DRY-RUN: would update parent issue #{num} body to template") + logging.info("DRY-RUN: would update parent issue #%d body to template", num) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("DRY-RUN: body_preview_begin") logging.debug(issue.body) @@ -864,29 +920,35 @@ def _label_orphan_issues( logging.debug("No orphan child issues detected \u2013 skipping sec:adept-to-close labelling") return - logging.info(f"Detected {len(orphan_fps)} orphan child issue(s) (open issue without matching alert)") + logging.info("Detected %d orphan child issue(s) (open issue without matching alert)", len(orphan_fps)) for fp in orphan_fps: issue = index.by_fingerprint[fp] repo = load_secmeta(issue.body).get("repo", "") if not repo: - logging.debug(f"Skip orphan labelling for issue #{issue.number}: no repo in secmeta") + logging.debug("Skip orphan labelling for issue #%d: no repo in secmeta", issue.number) continue if issue.labels and LABEL_SEC_ADEPT_TO_CLOSE in issue.labels: logging.debug( - f"Label {LABEL_SEC_ADEPT_TO_CLOSE!r} already on issue #{issue.number} " - f"(fingerprint={fp[:12]}…) – skipping" + "Label %r already on issue #%d (fingerprint=%s\u2026) \u2013 skipping", + LABEL_SEC_ADEPT_TO_CLOSE, + issue.number, + fp[:12], ) continue if dry_run: logging.info( - f"DRY-RUN: would add label {LABEL_SEC_ADEPT_TO_CLOSE!r} " - f"to issue #{issue.number} (fingerprint={fp[:12]}\u2026) \u2013 no matching open alert" + "DRY-RUN: would add label %r to issue #%d (fingerprint=%s\u2026) \u2013 no matching open alert", + LABEL_SEC_ADEPT_TO_CLOSE, + issue.number, + fp[:12], ) else: logging.info( - f"Adding label {LABEL_SEC_ADEPT_TO_CLOSE!r} to issue #{issue.number} " - f"(fingerprint={fp[:12]}…) – no matching open alert" + "Adding label %r to issue #%d (fingerprint=%s\u2026) \u2013 no matching open alert", + LABEL_SEC_ADEPT_TO_CLOSE, + issue.number, + fp[:12], ) gh_issue_add_labels(repo, issue.number, [LABEL_SEC_ADEPT_TO_CLOSE]) @@ -916,15 +978,19 @@ def sync_alerts_and_issues( dry_run=dry_run, ) + sync = SyncContext( + issues=issues, + index=index, + dry_run=dry_run, + notifications=notifications, + severity_priority_map=spm, + priority_sync=priority_sync, + ) + for alert in alerts.values(): ensure_issue( alert, - issues, - index, - dry_run=dry_run, - notifications=notifications, - severity_priority_map=severity_priority_map, - priority_sync=priority_sync, + sync, severity_changes=severity_changes, parent_original_bodies=parent_original_bodies, ) diff --git a/src/security/utils/models.py b/src/security/utils/models.py index c495db7..3ba4bc0 100644 --- a/src/security/utils/models.py +++ b/src/security/utils/models.py @@ -247,3 +247,4 @@ class SyncContext: notifications: list[NotifiedIssue] | None severity_priority_map: dict[str, str] priority_sync: ProjectPrioritySync | None + parent_sub_issues_cache: dict[int, set[int]] = field(default_factory=dict) diff --git a/src/shared/github_issues.py b/src/shared/github_issues.py index 00a5604..8d31966 100644 --- a/src/shared/github_issues.py +++ b/src/shared/github_issues.py @@ -22,21 +22,66 @@ import json import logging import re +import subprocess +import time from .common import run_gh from .models import Issue +_NOT_FOUND_MARKERS = ( + "HTTP 404", + "Not Found", + "Could not resolve to an issue or pull request", +) + + +def _is_not_found_error(res: subprocess.CompletedProcess[str]) -> bool: + """Return ``True`` when *res* contains a GitHub 404 / not-found indicator.""" + combined = (res.stderr or "") + (res.stdout or "") + return any(marker in combined for marker in _NOT_FOUND_MARKERS) + + +def _not_found_hint(res: subprocess.CompletedProcess[str]) -> str: + """Return a log context hint when the error looks like a missing/stale issue.""" + return " (issue may no longer exist – deleted or transferred)" if _is_not_found_error(res) else "" + + +def _gh_with_retry(args: list[str], *, retries: int = 3, backoff_base: float = 2.0) -> subprocess.CompletedProcess[str]: + """Run a ``gh`` command, retrying up to *retries* times on 404 responses. + + Waits ``backoff_base ** attempt`` seconds between attempts (2 s, 4 s, 8 s by + default) to tolerate GitHub API replication lag immediately after issue creation. + """ + res = run_gh(args) + for attempt in range(1, retries + 1): + if res.returncode == 0 or not _is_not_found_error(res): + break + wait = backoff_base**attempt + logging.debug( + "gh 404 on attempt %d/%d, retrying in %.0fs (cmd=%s)", + attempt, + retries, + wait, + " ".join(str(a) for a in args[:3]), + ) + time.sleep(wait) + res = run_gh(args) + return res + def gh_issue_get_rest_id(repo: str, number: int) -> int | None: - """Fetch the REST API numeric ID for issue *number*.""" - res = run_gh(["api", f"repos/{repo}/issues/{number}", "--jq", ".id"]) + """Fetch the REST API numeric ID for issue *number*. + + Retries on 404 to tolerate GitHub API replication lag after issue creation. + """ + res = _gh_with_retry(["api", f"repos/{repo}/issues/{number}", "--jq", ".id"]) if res.returncode != 0: - logging.warning(f"Failed to fetch REST issue id for #{number}: {res.stderr}") + logging.warning("Failed to fetch REST issue id for #%d%s: %s", number, _not_found_hint(res), res.stderr) return None try: return int((res.stdout or "").strip()) - except Exception: - logging.warning(f"Failed to parse REST issue id for #{number}: {res.stdout!r}") + except ValueError: + logging.warning("Failed to parse REST issue id for #%d: %r", number, res.stdout) return None @@ -54,8 +99,12 @@ def gh_issue_add_sub_issue(repo: str, parent_number: int, sub_issue_id: int) -> ) if res.returncode != 0: - logging.warning( - f"Failed to add sub-issue link parent=#{parent_number} sub_issue_id={sub_issue_id}: {res.stderr}" + logging.error( + "Failed to add sub-issue link parent=#%d sub_issue_id=%d%s: %s", + parent_number, + sub_issue_id, + _not_found_hint(res), + res.stderr, ) return False @@ -72,6 +121,32 @@ def gh_issue_add_sub_issue_by_number(repo: str, parent_number: int, child_number return gh_issue_add_sub_issue(repo, parent_number, child_id) +def gh_issue_get_sub_issue_numbers(repo: str, parent_number: int) -> set[int]: + """Return the set of child issue numbers currently linked to *parent_number*. + + Retries on 404 to tolerate GitHub API replication lag after issue creation. + Uses ``--paginate`` to handle parents with more than 30 sub-issues. + """ + res = _gh_with_retry( + [ + "api", + "--paginate", + f"repos/{repo}/issues/{parent_number}/sub_issues", + "--jq", + "[.[].number]", + ] + ) + if res.returncode != 0: + logging.error("Failed to list sub-issues for parent #%d%s: %s", parent_number, _not_found_hint(res), res.stderr) + return set() + try: + numbers = json.loads((res.stdout or "").strip() or "[]") + return {int(n) for n in numbers} + except json.JSONDecodeError, ValueError: + logging.error("Failed to parse sub-issues for parent #%d: %r", parent_number, res.stdout) + return set() + + def gh_issue_list_by_label(repo: str, label: str) -> dict[int, Issue]: """Load issues with a given label. @@ -100,19 +175,19 @@ def gh_issue_list_by_label(repo: str, label: str) -> dict[int, Issue]: ) if res.returncode != 0: - logging.error(f"gh issue list by label failed: {res.stderr}") + logging.error("gh issue list by label failed: %s", res.stderr) return {} try: items = json.loads(res.stdout or "[]") - except Exception: + except json.JSONDecodeError: return {} issues: dict[int, Issue] = {} for obj in items or []: try: number = int(obj.get("number")) - except Exception: + except TypeError, ValueError: continue raw_labels = obj.get("labels") or [] label_names = [str(lbl.get("name") or lbl) if isinstance(lbl, dict) else str(lbl) for lbl in raw_labels] @@ -124,7 +199,7 @@ def gh_issue_list_by_label(repo: str, label: str) -> dict[int, Issue]: labels=label_names, ) - logging.info(f"Loaded {len(issues)} issues with label {label!r} from repository {repo}") + logging.info("Loaded %d issues with label %r from repository %s", len(issues), label, repo) return issues @@ -141,7 +216,7 @@ def gh_issue_edit_state(repo: str, number: int, state: str) -> bool: stderr = (res.stderr or "") + (res.stdout or "") if "unknown flag: --state" not in stderr: - logging.error(f"Failed to edit state for #{number}: {res.stderr}") + logging.error("Failed to edit state for #%d%s: %s", number, _not_found_hint(res), res.stderr) return False # Fallback for older gh versions that don't support `issue edit --state`. @@ -155,7 +230,12 @@ def gh_issue_edit_state(repo: str, number: int, state: str) -> bool: # Last resort: REST API. res3 = run_gh(["api", "--method", "PATCH", f"repos/{repo}/issues/{number}", "-f", f"state={desired}"]) if res3.returncode != 0: - logging.error(f"Failed to edit state for #{number}: {res2.stderr or res2.stdout or res.stderr}") + logging.error( + "Failed to edit state for #%d%s: %s", + number, + _not_found_hint(res3), + res2.stderr or res2.stdout or res.stderr, + ) return False return True @@ -165,10 +245,10 @@ def gh_issue_edit_title(repo: str, number: int, title: str) -> bool: res = run_gh(["issue", "edit", str(number), "--repo", repo, "--title", title]) if res.returncode != 0: - logging.error(f"Failed to edit title for #{number}: {res.stderr}") + logging.error("Failed to edit title for #%d%s: %s", number, _not_found_hint(res), res.stderr) return False - logging.info(f"Updated issue #{number} title") + logging.info("Updated issue #%d title", number) return True @@ -177,10 +257,10 @@ def gh_issue_edit_body(repo: str, number: int, body: str) -> bool: res = run_gh(["issue", "edit", str(number), "--repo", repo, "--body", body]) if res.returncode != 0: - logging.error(f"Failed to edit body for #{number}: {res.stderr}") + logging.error("Failed to edit body for #%d%s: %s", number, _not_found_hint(res), res.stderr) return False - logging.info(f"Updated issue #{number} body") + logging.info("Updated issue #%d body", number) return True @@ -197,15 +277,18 @@ def gh_issue_add_labels(repo: str, number: int, labels: list[str]) -> None: res = run_gh(args) if res.returncode != 0: # Labels may not exist; don't fail the whole run. - logging.warning(f"Failed to add labels to #{number}: {res.stderr}") + logging.warning("Failed to add labels to #%d%s: %s", number, _not_found_hint(res), res.stderr) def gh_issue_comment(repo: str, number: int, body: str) -> bool: - """Post a comment with *body* on issue *number*.""" - res = run_gh(["issue", "comment", str(number), "--repo", repo, "--body", body]) + """Post a comment with *body* on issue *number*. + + Retries on 404 to tolerate GitHub API replication lag after issue creation. + """ + res = _gh_with_retry(["issue", "comment", str(number), "--repo", repo, "--body", body]) if res.returncode != 0: - logging.error(f"Failed to comment on #{number}: {res.stderr}") + logging.error("Failed to comment on #%d%s: %s", number, _not_found_hint(res), res.stderr) return False return True @@ -220,7 +303,7 @@ def gh_issue_create(repo: str, title: str, body: str, labels: list[str]) -> int res = run_gh(args) if res.returncode != 0: - logging.error(f"Failed to create issue: {res.stderr}") + logging.error("Failed to create issue: %s", res.stderr) return None out = (res.stdout or "").strip() diff --git a/tests/security/test_github_issues.py b/tests/security/test_github_issues.py new file mode 100644 index 0000000..ec880d6 --- /dev/null +++ b/tests/security/test_github_issues.py @@ -0,0 +1,368 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Unit tests for ``shared.github_issues`` – all ``gh`` CLI calls are mocked +via ``run_gh``; ``time.sleep`` is always patched to keep tests instant. +""" + +import json +import logging +import subprocess +from typing import Any + +import pytest +from pytest_mock import MockerFixture + +from shared.github_issues import ( + _gh_with_retry, + _is_not_found_error, + _not_found_hint, + gh_issue_add_labels, + gh_issue_add_sub_issue, + gh_issue_add_sub_issue_by_number, + gh_issue_comment, + gh_issue_create, + gh_issue_edit_body, + gh_issue_edit_state, + gh_issue_edit_title, + gh_issue_get_rest_id, + gh_issue_get_sub_issue_numbers, + gh_issue_list_by_label, +) + + +def _completed(*, returncode: int = 0, stdout: str = "", stderr: str = "") -> subprocess.CompletedProcess: + """Build a fake ``subprocess.CompletedProcess`` result.""" + return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) + + +def _ok(**kwargs: Any) -> subprocess.CompletedProcess: + return _completed(returncode=0, **kwargs) + + +def _err(stderr: str = "some error", **kwargs: Any) -> subprocess.CompletedProcess: + return _completed(returncode=1, stderr=stderr, **kwargs) + + +def _not_found(via: str = "stderr") -> subprocess.CompletedProcess: + """Return a 404-style failure (both REST and GraphQL flavours are covered by separate tests).""" + if via == "stdout": + return _completed(returncode=1, stdout="Not Found", stderr="") + return _completed(returncode=1, stderr="gh: Not Found (HTTP 404)") + + +def test_is_not_found_http404_in_stderr() -> None: + assert _is_not_found_error(_completed(returncode=1, stderr="gh: Not Found (HTTP 404)")) is True + +def test_is_not_found_not_found_in_stdout() -> None: + assert _is_not_found_error(_completed(returncode=1, stdout="Not Found", stderr="")) is True + +def test_is_not_found_graphql_message() -> None: + assert _is_not_found_error( + _completed(returncode=1, stderr="GraphQL: Could not resolve to an issue or pull request with the number of 42. (repository.issue)") + ) is True + +def test_is_not_found_unrelated_error() -> None: + assert _is_not_found_error(_completed(returncode=1, stderr="gh: timeout")) is False + +def test_is_not_found_success_response() -> None: + assert _is_not_found_error(_ok(stdout="12345")) is False + + +def test_not_found_hint_returns_hint_on_404() -> None: + hint = _not_found_hint(_not_found()) + assert "deleted or transferred" in hint + assert hint.startswith(" (") + +def test_not_found_hint_empty_on_other_error() -> None: + assert _not_found_hint(_err("rate limit exceeded")) == "" + +def test_not_found_hint_empty_on_success() -> None: + assert _not_found_hint(_ok()) == "" + + +def test_retry_succeeds_first_attempt(mocker: MockerFixture) -> None: + """No retries when the first call succeeds.""" + mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="ok")) + mock_sleep = mocker.patch("shared.github_issues.time.sleep") + result = _gh_with_retry(["some", "cmd"]) + assert result.returncode == 0 + mock_run.assert_called_once() + mock_sleep.assert_not_called() + +def test_retry_succeeds_on_second_attempt(mocker: MockerFixture) -> None: + """Retries once on 404 then succeeds.""" + mock_run = mocker.patch( + "shared.github_issues.run_gh", + side_effect=[_not_found(), _ok(stdout="42")], + ) + mocker.patch("shared.github_issues.time.sleep") + result = _gh_with_retry(["some", "cmd"], retries=3) + assert result.returncode == 0 + assert mock_run.call_count == 2 + +def test_retry_exhausts_all_attempts(mocker: MockerFixture) -> None: + """Returns the last failure after all retries are consumed.""" + mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + result = _gh_with_retry(["some", "cmd"], retries=3) + assert result.returncode != 0 + # 1 initial + 3 retries = 4 total + assert mock_run.call_count == 4 + +def test_retry_does_not_retry_non_404_error(mocker: MockerFixture) -> None: + """Non-404 errors are not retried.""" + mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_err("server error")) + mocker.patch("shared.github_issues.time.sleep") + result = _gh_with_retry(["some", "cmd"], retries=3) + assert result.returncode != 0 + mock_run.assert_called_once() # no retries + +def test_retry_sleeps_with_exponential_backoff(mocker: MockerFixture) -> None: + """Sleep duration grows as backoff_base ** attempt.""" + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("shared.github_issues.time.sleep") + _gh_with_retry(["cmd"], retries=3, backoff_base=2.0) + sleep_calls = [c.args[0] for c in mock_sleep.call_args_list] + # attempts 1, 2, 3 → 2**1=2, 2**2=4, 2**3=8 + assert sleep_calls == [2.0, 4.0, 8.0] + +def test_retry_zero_retries_no_sleep(mocker: MockerFixture) -> None: + """retries=0 means a single attempt with no sleep.""" + mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("shared.github_issues.time.sleep") + _gh_with_retry(["cmd"], retries=0) + mock_run.assert_called_once() + mock_sleep.assert_not_called() + + +def test_get_rest_id_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="987654\n")) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_rest_id("org/repo", 42) == 987654 + +def test_get_rest_id_retries_on_404(mocker: MockerFixture) -> None: + mock_run = mocker.patch( + "shared.github_issues.run_gh", + side_effect=[_not_found(), _ok(stdout="1111\n")], + ) + mocker.patch("shared.github_issues.time.sleep") + result = gh_issue_get_rest_id("org/repo", 5) + assert result == 1111 + assert mock_run.call_count == 2 + +def test_get_rest_id_returns_none_after_all_retries(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_rest_id("org/repo", 5) is None + +def test_get_rest_id_parse_failure(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-a-number")) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_rest_id("org/repo", 1) is None + +def test_get_rest_id_not_found_hint_in_log(mocker: MockerFixture, caplog) -> None: + """Log message includes the not-found hint for 404 errors.""" + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + with caplog.at_level(logging.WARNING, logger="root"): + gh_issue_get_rest_id("org/repo", 99) + assert any("deleted or transferred" in r.message for r in caplog.records) + + +def test_add_sub_issue_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + assert gh_issue_add_sub_issue("org/repo", 10, 9999) is True + +def test_add_sub_issue_failure_logs_hint(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_add_sub_issue("org/repo", 10, 9999) + assert result is False + assert any("deleted or transferred" in r.message for r in caplog.records) + +def test_add_sub_issue_failure_plain_error(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_err("rate limited")) + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_add_sub_issue("org/repo", 10, 9999) + assert result is False + assert not any("deleted or transferred" in r.message for r in caplog.records) + + +def test_add_sub_issue_by_number_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", side_effect=[_ok(stdout="5555\n"), _ok()]) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is True + +def test_add_sub_issue_by_number_rest_id_fails(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is False + + +def test_get_sub_issue_numbers_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[1, 2, 3]\n")) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_sub_issue_numbers("org/repo", 10) == {1, 2, 3} + +def test_get_sub_issue_numbers_empty(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[]\n")) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() + +def test_get_sub_issue_numbers_not_found_error(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_get_sub_issue_numbers("org/repo", 10) + assert result == set() + assert any("deleted or transferred" in r.message for r in caplog.records) + +def test_get_sub_issue_numbers_parse_error(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-json")) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() + + +def test_issue_comment_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_comment("org/repo", 1, "hello") is True + +def test_issue_comment_retries_on_404(mocker: MockerFixture) -> None: + mock_run = mocker.patch( + "shared.github_issues.run_gh", + side_effect=[_not_found(), _ok()], + ) + mocker.patch("shared.github_issues.time.sleep") + assert gh_issue_comment("org/repo", 1, "hello") is True + assert mock_run.call_count == 2 + +def test_issue_comment_fails_after_all_retries(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("shared.github_issues.time.sleep") + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_comment("org/repo", 1, "hello") + assert result is False + assert any("deleted or transferred" in r.message for r in caplog.records) + +def test_issue_comment_graphql_not_found_hint(mocker: MockerFixture, caplog) -> None: + """GraphQL-style 404 also triggers the not-found hint.""" + graphql_err = _completed( + returncode=1, + stderr="GraphQL: Could not resolve to an issue or pull request with the number of 42. (repository.issue)", + ) + mocker.patch("shared.github_issues.run_gh", return_value=graphql_err) + mocker.patch("shared.github_issues.time.sleep") + with caplog.at_level(logging.ERROR, logger="root"): + gh_issue_comment("org/repo", 42, "body") + assert any("deleted or transferred" in r.message for r in caplog.records) + + +def test_edit_state_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + assert gh_issue_edit_state("org/repo", 1, "open") is True + +def test_edit_state_not_found_hint(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_edit_state("org/repo", 1, "open") + assert result is False + assert any("deleted or transferred" in r.message for r in caplog.records) + +def test_edit_state_invalid_state_raises() -> None: + with pytest.raises(ValueError, match="Unsupported issue state"): + gh_issue_edit_state("org/repo", 1, "unknown") + + +def test_edit_title_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + assert gh_issue_edit_title("org/repo", 1, "New title") is True + +def test_edit_title_not_found_hint(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_edit_title("org/repo", 1, "New title") + assert result is False + assert any("deleted or transferred" in r.message for r in caplog.records) + + +def test_edit_body_success(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + assert gh_issue_edit_body("org/repo", 1, "new body") is True + +def test_edit_body_not_found_hint(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + with caplog.at_level(logging.ERROR, logger="root"): + result = gh_issue_edit_body("org/repo", 1, "new body") + assert result is False + assert any("deleted or transferred" in r.message for r in caplog.records) + + +def test_add_labels_success(mocker: MockerFixture) -> None: + mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + gh_issue_add_labels("org/repo", 1, ["bug", "security"]) + mock_run.assert_called_once() + +def test_add_labels_no_labels_skips_call(mocker: MockerFixture) -> None: + mock_run = mocker.patch("shared.github_issues.run_gh") + gh_issue_add_labels("org/repo", 1, []) + mock_run.assert_not_called() + +def test_add_labels_not_found_hint(mocker: MockerFixture, caplog) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + with caplog.at_level(logging.WARNING, logger="root"): + gh_issue_add_labels("org/repo", 1, ["bug"]) + assert any("deleted or transferred" in r.message for r in caplog.records) + + +def test_create_issue_success_url(mocker: MockerFixture) -> None: + mocker.patch( + "shared.github_issues.run_gh", + return_value=_ok(stdout="https://github.com/org/repo/issues/123\n"), + ) + num = gh_issue_create("org/repo", "title", "body", ["label"]) + assert num == 123 + +def test_create_issue_success_bare_number(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="issues/456")) + assert gh_issue_create("org/repo", "t", "b", []) == 456 + +def test_create_issue_failure_returns_none(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_err("permission denied")) + assert gh_issue_create("org/repo", "t", "b", []) is None + + +def test_list_by_label_success(mocker: MockerFixture) -> None: + payload = [ + {"number": 1, "state": "open", "title": "T1", "body": "b1", "labels": [{"name": "bug"}]}, + {"number": 2, "state": "closed", "title": "T2", "body": "b2", "labels": []}, + ] + mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout=json.dumps(payload))) + issues = gh_issue_list_by_label("org/repo", "bug") + assert len(issues) == 2 + assert issues[1].title == "T1" + assert issues[1].labels == ["bug"] + assert issues[2].labels == [] + +def test_list_by_label_empty_label_returns_empty(mocker: MockerFixture) -> None: + mock_run = mocker.patch("shared.github_issues.run_gh") + assert gh_issue_list_by_label("org/repo", "") == {} + mock_run.assert_not_called() + +def test_list_by_label_gh_failure_returns_empty(mocker: MockerFixture) -> None: + mocker.patch("shared.github_issues.run_gh", return_value=_err("network error")) + assert gh_issue_list_by_label("org/repo", "bug") == {} diff --git a/tests/security/utils/test_issue_sync.py b/tests/security/utils/test_issue_sync.py index 5fa7422..3e373da 100644 --- a/tests/security/utils/test_issue_sync.py +++ b/tests/security/utils/test_issue_sync.py @@ -28,6 +28,7 @@ _append_notification, _close_resolved_parent_issues, _comment_child_event, + _ensure_child_linked_to_parent, _flush_parent_body_updates, _handle_existing_child_issue, _handle_new_child_issue, @@ -615,6 +616,74 @@ def test_handle_existing_child_updates_body(mocker: MockerFixture, sast_alert: A mock_body.assert_called_once() +# ===================================================================== +# _ensure_child_linked_to_parent +# ===================================================================== + + +def test_ensure_child_linked_already_linked(mocker: MockerFixture) -> None: + """No-op when the child is already in the parent's sub-issues.""" + mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5}) + mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + parent = Issue(number=1, state="open", title="P", body="pb") + child = Issue(number=5, state="open", title="C", body="cb") + ctx = _make_alert_context() + sync = _make_sync_context() + _ensure_child_linked_to_parent(ctx=ctx, sync=sync, issue=child, parent_issue=parent) + mock_add.assert_not_called() + + +def test_ensure_child_linked_missing_adds_link(mocker: MockerFixture) -> None: + """Adds the sub-issue link when the child is missing from the parent.""" + mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=True) + parent = Issue(number=1, state="open", title="P", body="pb") + child = Issue(number=5, state="open", title="C", body="cb") + ctx = _make_alert_context() + sync = _make_sync_context() + _ensure_child_linked_to_parent(ctx=ctx, sync=sync, issue=child, parent_issue=parent) + mock_add.assert_called_once_with("test-org/test-repo", 1, 5) + + +def test_ensure_child_linked_missing_dry_run(mocker: MockerFixture) -> None: + """In dry-run mode logs intent without calling the add-sub-issue API.""" + mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + parent = Issue(number=1, state="open", title="P", body="pb") + child = Issue(number=5, state="open", title="C", body="cb") + ctx = _make_alert_context() + sync = _make_sync_context(dry_run=True) + _ensure_child_linked_to_parent(ctx=ctx, sync=sync, issue=child, parent_issue=parent) + mock_add.assert_not_called() + + +def test_ensure_child_linked_cache_populated(mocker: MockerFixture) -> None: + """gh_issue_get_sub_issue_numbers is called only once per parent (cached).""" + mock_list = mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5, 6}) + mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + parent = Issue(number=1, state="open", title="P", body="pb") + child_a = Issue(number=5, state="open", title="A", body="ab") + child_b = Issue(number=6, state="open", title="B", body="bb") + ctx_a = _make_alert_context(fingerprint="fp_a") + ctx_b = _make_alert_context(fingerprint="fp_b") + sync = _make_sync_context() + _ensure_child_linked_to_parent(ctx=ctx_a, sync=sync, issue=child_a, parent_issue=parent) + _ensure_child_linked_to_parent(ctx=ctx_b, sync=sync, issue=child_b, parent_issue=parent) + mock_list.assert_called_once_with("test-org/test-repo", 1) + + +def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) -> None: + """When the API call to add the link fails, the cache is not updated.""" + mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=False) + parent = Issue(number=1, state="open", title="P", body="pb") + child = Issue(number=5, state="open", title="C", body="cb") + ctx = _make_alert_context() + sync = _make_sync_context() + _ensure_child_linked_to_parent(ctx=ctx, sync=sync, issue=child, parent_issue=parent) + assert 5 not in sync.parent_sub_issues_cache.get(1, set()) + + # ===================================================================== # ensure_parent_issue # ===================================================================== @@ -872,10 +941,8 @@ def test_ensure_issue_new_alert_creates_parent_and_child( issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) notifications: list[NotifiedIssue] = [] - ensure_issue( - sast_alert, issues, index, - dry_run=False, notifications=notifications, - ) + sync = _make_sync_context(issues=issues, index=index, dry_run=False, notifications=notifications) + ensure_issue(sast_alert, sync) assert mock_create.call_count == 2 # parent + child assert len(notifications) == 1 @@ -884,10 +951,8 @@ def test_ensure_issue_dry_run(sast_alert: Alert) -> None: issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) notifications: list[NotifiedIssue] = [] - ensure_issue( - sast_alert, issues, index, - dry_run=True, notifications=notifications, - ) + sync = _make_sync_context(issues=issues, index=index, dry_run=True, notifications=notifications) + ensure_issue(sast_alert, sync) assert len(notifications) == 1 assert notifications[0].issue_number == 0 @@ -900,7 +965,7 @@ def test_ensure_issue_skips_non_open() -> None: }) issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) - ensure_issue(alert, issues, index, dry_run=True) + ensure_issue(alert, _make_sync_context(issues=issues, index=index, dry_run=True)) def test_ensure_issue_missing_alert_hash_raises() -> None: """Raises SystemExit when alert hash is missing.""" @@ -912,7 +977,7 @@ def test_ensure_issue_missing_alert_hash_raises() -> None: issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) with pytest.raises(SystemExit, match="alert_hash"): - ensure_issue(alert, issues, index, dry_run=True) + ensure_issue(alert, _make_sync_context(issues=issues, index=index, dry_run=True)) def test_ensure_issue_missing_alert_details_raises() -> None: """Raises SystemExit when alert_details has no alert_hash.""" @@ -924,7 +989,7 @@ def test_ensure_issue_missing_alert_details_raises() -> None: issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) with pytest.raises(SystemExit, match="alert_hash"): - ensure_issue(alert, issues, index, dry_run=True) + ensure_issue(alert, _make_sync_context(issues=issues, index=index, dry_run=True)) def test_ensure_issue_existing_child_updates(mocker: MockerFixture, sast_alert: Alert) -> None: """When a child issue already exists, it is updated (not duplicated).""" @@ -943,10 +1008,8 @@ def test_ensure_issue_existing_child_updates(mocker: MockerFixture, sast_alert: issues = {5: child, 10: parent} index = build_issue_index(issues) notifications: list[NotifiedIssue] = [] - ensure_issue( - sast_alert, issues, index, - dry_run=True, notifications=notifications, - ) + sync = _make_sync_context(issues=issues, index=index, dry_run=True, notifications=notifications) + ensure_issue(sast_alert, sync) # =====================================================================