Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
121 changes: 121 additions & 0 deletions .builders/promote.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
"""Promote dependency wheels from dev to stable storage.

Reads lockfiles from .deps/resolved/, identifies every wheel that lives
under the ``dev/`` prefix in GCS, and copies it to the ``stable/`` prefix.
Invoked via ``ddev promote <PR_URL>`` which dispatches the promote workflow.
"""
from __future__ import annotations

import re
import sys
from pathlib import Path, PurePosixPath

from google.cloud import storage

BUCKET_NAME = "deps-agent-int-datadoghq-com"
REPO_DIR = Path(__file__).resolve().parent.parent
LOCK_FILE_DIR = REPO_DIR / ".deps" / "resolved"

DEV_PREFIX = "dev/"
STABLE_PREFIX = "stable/"

LOCKFILE_ENTRY = re.compile(
r"^(?P<name>\S+)\s+@\s+(?P<url>\S+)$"
)


def parse_lockfile_urls(lockfile: Path) -> list[str]:
"""Extract wheel URLs from a lockfile."""
urls: list[str] = []
for line in lockfile.read_text().splitlines():
line = line.strip()
if not line:
continue
m = LOCKFILE_ENTRY.match(line)
if m:
urls.append(m.group("url").split("#")[0])
return urls


STORAGE_BASE = "https://agent-int-packages.datadoghq.com/"
STORAGE_TEMPLATE_PREFIX = f"{STORAGE_BASE}${{INTEGRATIONS_WHEELS_STORAGE}}/"


def url_to_blob_path(url: str) -> str | None:
"""Convert a wheel URL to its GCS blob path, or None if not a templated storage URL.

Handles the templated ``https://agent-int-packages.datadoghq.com/${INTEGRATIONS_WHEELS_STORAGE}/...``
format used in lockfiles.
"""
if url.startswith(STORAGE_TEMPLATE_PREFIX):
return url[len(STORAGE_TEMPLATE_PREFIX):]
return None


def collect_relative_paths() -> list[str]:
"""Read all lockfiles and return relative wheel paths from ${INTEGRATIONS_WHEELS_STORAGE} entries."""
if not LOCK_FILE_DIR.is_dir():
print(f"No lockfile directory found at {LOCK_FILE_DIR}", file=sys.stderr)
sys.exit(1)

lockfiles = list(LOCK_FILE_DIR.glob("*.txt"))
if not lockfiles:
print(f"No lockfiles found in {LOCK_FILE_DIR}", file=sys.stderr)
sys.exit(1)

rel_paths: list[str] = []
for lockfile in sorted(lockfiles):
print(f"Reading {lockfile.name}")
for url in parse_lockfile_urls(lockfile):
rel_path = url_to_blob_path(url)
if rel_path:
rel_paths.append(rel_path)

return rel_paths


def promote(rel_paths: list[str]) -> None:
"""Copy blobs from dev/ to stable/ in GCS."""
if not rel_paths:
print("No templated wheels found in lockfiles — nothing to promote.")
return
Comment on lines +79 to +81
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Fail promotion when lockfiles yield zero wheel paths

Returning success when rel_paths is empty can falsely mark promotion as complete even when nothing was copied to stable storage. In this commit, resolved lockfiles use ${PACKAGE_BASE_URL}/... entries, while url_to_blob_path only recognizes ${INTEGRATIONS_WHEELS_STORAGE}; this drops every wheel path, hits this early return, and lets the workflow set dependency-wheel-promotion to success without promoting any artifacts.

Useful? React with 👍 / 👎.


unique_paths = sorted(set(rel_paths))
print(f"\nPromoting {len(unique_paths)} wheels from dev to stable...\n")

client = storage.Client()
bucket = client.bucket(BUCKET_NAME)

failed: list[str] = []
for rel_path in unique_paths:
dev_path = DEV_PREFIX + rel_path
stable_path = STABLE_PREFIX + rel_path
name = PurePosixPath(rel_path).name
source_blob = bucket.blob(dev_path)

if not source_blob.exists():
print(f" MISSING {name}")
failed.append(dev_path)
continue

bucket.copy_blob(source_blob, bucket, stable_path)
print(f" OK {name}")

print()
if failed:
print(
f"ERROR: {len(failed)} wheel(s) not found in dev storage.\n"
"The resolve-build-deps workflow may not have finished yet.\n"
"Wait for it to complete, then run ddev promote again.",
file=sys.stderr,
)
for p in failed:
print(f" - {p}", file=sys.stderr)
sys.exit(1)

print(f"Done. {len(unique_paths)} wheel(s) promoted to stable.")


if __name__ == "__main__":
rel_paths = collect_relative_paths()
promote(rel_paths)
156 changes: 156 additions & 0 deletions .builders/tests/test_promote.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
from pathlib import Path
from unittest import mock

import pytest
import promote

BASE = "https://agent-int-packages.datadoghq.com/${INTEGRATIONS_WHEELS_STORAGE}"


def write_lockfile(path: Path, entries: list[str]) -> None:
path.write_text("\n".join(entries))


def test_parse_lockfile_urls_templated(tmp_path):
"""parse_lockfile_urls extracts URLs from ${INTEGRATIONS_WHEELS_STORAGE} lockfile entries."""
lockfile = tmp_path / "linux-x86_64_3.13.txt"
write_lockfile(lockfile, [
f"aerospike @ {BASE}/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl#sha256=abc",
f"requests @ {BASE}/external/requests/requests-2.32.0-py3-none-any.whl#sha256=def",
"",
])

urls = promote.parse_lockfile_urls(lockfile)

assert urls == [
f"{BASE}/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl",
f"{BASE}/external/requests/requests-2.32.0-py3-none-any.whl",
]


def test_url_to_blob_path_templated():
"""url_to_blob_path extracts the relative path from a ${INTEGRATIONS_WHEELS_STORAGE} URL."""
url = f"{BASE}/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl"
assert promote.url_to_blob_path(url) == "built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl"


def test_url_to_blob_path_returns_none_for_other_urls():
"""url_to_blob_path returns None for non-templated URLs."""
assert promote.url_to_blob_path("https://example.com/some.whl") is None
assert promote.url_to_blob_path("https://agent-int-packages.datadoghq.com/built/foo/foo-1.0.whl") is None
assert promote.url_to_blob_path("https://agent-int-packages.datadoghq.com/stable/built/foo/foo-1.0.whl") is None


def test_collect_relative_paths(tmp_path):
"""collect_relative_paths reads all lockfiles and returns relative paths."""
lock_dir = tmp_path / ".deps" / "resolved"
lock_dir.mkdir(parents=True)

write_lockfile(lock_dir / "linux-x86_64_3.13.txt", [
f"aerospike @ {BASE}/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl#sha256=abc",
])
write_lockfile(lock_dir / "linux-aarch64_3.13.txt", [
f"aerospike @ {BASE}/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_aarch64.whl#sha256=xyz",
])

with mock.patch.object(promote, "LOCK_FILE_DIR", lock_dir):
paths = promote.collect_relative_paths()

assert sorted(paths) == [
"built/aerospike/aerospike-7.1.1-cp313-cp313-linux_aarch64.whl",
"built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl",
]


def test_collect_relative_paths_deduplicates(tmp_path):
"""collect_relative_paths returns all paths even when shared across lockfiles."""
lock_dir = tmp_path / ".deps" / "resolved"
lock_dir.mkdir(parents=True)

shared_entry = f"requests @ {BASE}/external/requests/requests-2.32.0-py3-none-any.whl#sha256=def"
write_lockfile(lock_dir / "linux-x86_64_3.13.txt", [shared_entry])
write_lockfile(lock_dir / "linux-aarch64_3.13.txt", [shared_entry])

with mock.patch.object(promote, "LOCK_FILE_DIR", lock_dir):
paths = promote.collect_relative_paths()

assert paths.count("external/requests/requests-2.32.0-py3-none-any.whl") == 2


def test_promote_copies_blobs():
"""promote copies each relative path from dev/ to stable/ in GCS."""
rel_paths = [
"built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl",
"external/requests/requests-2.32.0-py3-none-any.whl",
]

mock_client = mock.Mock()
mock_bucket = mock.Mock()
mock_client.bucket.return_value = mock_bucket

source_blob = mock.Mock()
source_blob.exists.return_value = True
mock_bucket.blob.return_value = source_blob

with mock.patch("promote.storage.Client", return_value=mock_client):
promote.promote(rel_paths)

assert mock_bucket.blob.call_count == 2
mock_bucket.blob.assert_any_call("dev/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl")
mock_bucket.blob.assert_any_call("dev/external/requests/requests-2.32.0-py3-none-any.whl")

assert mock_bucket.copy_blob.call_count == 2
mock_bucket.copy_blob.assert_any_call(
source_blob, mock_bucket, "stable/built/aerospike/aerospike-7.1.1-cp313-cp313-linux_x86_64.whl"
)
mock_bucket.copy_blob.assert_any_call(
source_blob, mock_bucket, "stable/external/requests/requests-2.32.0-py3-none-any.whl"
)


def test_promote_is_idempotent():
"""promote succeeds even if the destination blob already exists (GCS copy is idempotent)."""
rel_paths = ["built/foo/foo-1.0-cp313-cp313-linux_x86_64.whl"]

mock_client = mock.Mock()
mock_bucket = mock.Mock()
mock_client.bucket.return_value = mock_bucket

source_blob = mock.Mock()
source_blob.exists.return_value = True
mock_bucket.blob.return_value = source_blob

with mock.patch("promote.storage.Client", return_value=mock_client):
promote.promote(rel_paths)
promote.promote(rel_paths)

assert mock_bucket.copy_blob.call_count == 2


def test_promote_fails_if_source_missing(capsys):
"""promote exits with error if a source blob is not found in dev/."""
rel_paths = ["built/missing/missing-1.0-cp313-cp313-linux_x86_64.whl"]

mock_client = mock.Mock()
mock_bucket = mock.Mock()
mock_client.bucket.return_value = mock_bucket

source_blob = mock.Mock()
source_blob.exists.return_value = False
mock_bucket.blob.return_value = source_blob

with mock.patch("promote.storage.Client", return_value=mock_client):
with pytest.raises(SystemExit) as exc_info:
promote.promote(rel_paths)

assert exc_info.value.code == 1
captured = capsys.readouterr()
assert "MISSING" in captured.out or "not found" in captured.err


def test_promote_nothing_to_promote():
"""promote prints a message and returns early when given no paths."""
with mock.patch("promote.storage.Client") as mock_client_cls:
promote.promote([])

mock_client_cls.assert_not_called()
Loading
Loading