diff --git a/.github/actions/clean_up_package_registry/action.yml b/.github/actions/clean_up_package_registry/action.yml index 7b5eb5c563..0686b1e71f 100644 --- a/.github/actions/clean_up_package_registry/action.yml +++ b/.github/actions/clean_up_package_registry/action.yml @@ -32,8 +32,8 @@ runs: using: 'composite' steps: - - name: Install Python - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 with: python-version: '3.10' cache: 'pip' @@ -48,5 +48,5 @@ runs: INPUT_PACKAGES: ${{ inputs.packages }} INPUT_DRY_RUN: ${{ inputs.dry-run}} run: | - pip install -q -U pip requests rich + pip install -r ./.github/actions/clean_up_package_registry/requirements.txt python ./.github/actions/clean_up_package_registry/clean_up_package_registry.py diff --git a/.github/actions/clean_up_package_registry/clean_up_package_registry.py b/.github/actions/clean_up_package_registry/clean_up_package_registry.py index 3438f5374e..b31134a2ed 100644 --- a/.github/actions/clean_up_package_registry/clean_up_package_registry.py +++ b/.github/actions/clean_up_package_registry/clean_up_package_registry.py @@ -8,6 +8,9 @@ import os +import sys +from typing import Any +from urllib.parse import parse_qs, urlparse import requests from rich import print @@ -34,41 +37,89 @@ pkg_url: str = f"https://api.github.com/orgs/{org}/packages" +def get_last_page(headers: dict[str, Any]) -> int: + if "link" not in headers: + return 1 + + links = headers["link"].split(", ") + + last_page = None + for link in links: + if 'rel="last"' in link: + last_page = link + break + + if last_page: + parsed_url = urlparse( + last_page[last_page.index("<") + 1 : last_page.index(">")] + ) + return int(parse_qs(parsed_url.query)["page"][0]) + + return 1 + + def delete_packages(): for package in packages: print(f":package: {package}") - url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?per_page=100" - response = requests.get(url, headers=headers) - - if response.status_code == 404: - print(f":cross_mark: Not found - {url}") - continue - - # Sort all images on id. - images = sorted(response.json(), key=lambda x: x["id"], reverse=True) - - # Slice and remove all - if len(images) > keep: - for image in images[keep + 1 :]: - url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions/{image['id']}" - - # Never remove latest or non snapshot tagged images - if restrict_delete_tags(image["metadata"]["container"]["tags"]): - print( - f":package: Skip tagged {package} id {image['id']} tags {image['metadata']['container']['tags']}" - ) - continue - - if not dry_run: - response = requests.delete(url, headers=headers) - if response.status_code != 204: + + # Start page is 1 as stated by documentation + url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?page=1&per_page=50" + + # Get the header + response = requests.head(url, headers=headers) + pages: int | None = get_last_page(response.headers) + + for page in range(pages, 0, -1): + print(f"Page: {page}") + url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?page={page}&per_page=50" + response = requests.get(url, headers=headers) + if response.status_code == 404: + print(f":cross_mark: Not found - {url}") + continue + elif response.status_code == 401: + print(f":cross_mark: Requires authentication - {url}") + sys.exit(1) + elif response.status_code == 403: + print(f":cross_mark: Forbidden - {url}") + sys.exit(1) + + # Sort all images on id. + images = sorted(response.json(), key=lambda x: x["id"], reverse=True) + + # Slice and remove all + if len(images) > keep: + for image in images if page != 1 else images[keep + 1 :]: + url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions/{image['id']}" + + # Never remove latest or non snapshot tagged images + if restrict_delete_tags(image["metadata"]["container"]["tags"]): print( - f":cross_mark: Failed to delete package {package} version id {image['id']}." + f":package: Skip tagged {package} id {image['id']} tags {image['metadata']['container']['tags']}" ) continue - print( - f":white_heavy_check_mark: Deleted package {package} version id {image['id']}." - ) + + if not dry_run: + response = requests.delete(url, headers=headers) + if response.status_code == 404: + print(f":cross_mark: Failed to delete package {package} version id {image['id']}.") + continue + elif response.status_code == 401: + print(f":cross_mark: Requires authentication - {url}") + sys.exit(1) + elif response.status_code == 403: + print(f":cross_mark: Forbidden - {url}") + sys.exit(1) + + tags = image["metadata"]["container"]["tags"] + if tags: + print( + f":white_heavy_check_mark: Deleted tagged package {package} version id {image['id']}" + f"with tags {tags}." + ) + else: + print( + f":white_heavy_check_mark: Deleted untagged package {package} version id {image['id']}" + ) def restrict_delete_tags(tags: list) -> bool: diff --git a/.github/actions/clean_up_package_registry/requirements.txt b/.github/actions/clean_up_package_registry/requirements.txt new file mode 100644 index 0000000000..65f11586b3 --- /dev/null +++ b/.github/actions/clean_up_package_registry/requirements.txt @@ -0,0 +1,9 @@ +certifi==2023.7.22 +charset-normalizer==3.3.2 +idna==3.4 +markdown-it-py==3.0.0 +mdurl==0.1.2 +Pygments==2.16.1 +requests==2.31.0 +rich==13.6.0 +urllib3==2.1.0 diff --git a/.github/workflows/clean_up_package_registry.yml b/.github/workflows/clean_up_package_registry.yml index bf358d19a9..bbf206f1ed 100644 --- a/.github/workflows/clean_up_package_registry.yml +++ b/.github/workflows/clean_up_package_registry.yml @@ -26,8 +26,8 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} packages: |- - thrift - clucene - base - binaries + sw360/thrift + sw360/clucene + sw360/base + sw360/binaries sw360