Skip to content

Commit

Permalink
ci(fix): Missing code checkout on clean workflow
Browse files Browse the repository at this point in the history
Signed-off-by: Helio Chissini de Castro <[email protected]>
  • Loading branch information
heliocastro committed Nov 14, 2023
1 parent 0b713d8 commit d630785
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 36 deletions.
6 changes: 3 additions & 3 deletions .github/actions/clean_up_package_registry/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ runs:
using: 'composite'

steps:
- name: Install Python
uses: actions/setup-python@v4
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.10'
cache: 'pip'
Expand All @@ -48,5 +48,5 @@ runs:
INPUT_PACKAGES: ${{ inputs.packages }}
INPUT_DRY_RUN: ${{ inputs.dry-run}}
run: |
pip install -q -U pip requests rich
pip install -r ./.github/actions/clean_up_package_registry/requirements.txt
python ./.github/actions/clean_up_package_registry/clean_up_package_registry.py
109 changes: 80 additions & 29 deletions .github/actions/clean_up_package_registry/clean_up_package_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@


import os
import sys
from typing import Any
from urllib.parse import parse_qs, urlparse

import requests
from rich import print
Expand All @@ -34,41 +37,89 @@
pkg_url: str = f"https://api.github.com/orgs/{org}/packages"


def get_last_page(headers: dict[str, Any]) -> int:
if "link" not in headers:
return 1

links = headers["link"].split(", ")

last_page = None
for link in links:
if 'rel="last"' in link:
last_page = link
break

if last_page:
parsed_url = urlparse(
last_page[last_page.index("<") + 1 : last_page.index(">")]
)
return int(parse_qs(parsed_url.query)["page"][0])

return 1


def delete_packages():
for package in packages:
print(f":package: {package}")
url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?per_page=100"
response = requests.get(url, headers=headers)

if response.status_code == 404:
print(f":cross_mark: Not found - {url}")
continue

# Sort all images on id.
images = sorted(response.json(), key=lambda x: x["id"], reverse=True)

# Slice and remove all
if len(images) > keep:
for image in images[keep + 1 :]:
url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions/{image['id']}"

# Never remove latest or non snapshot tagged images
if restrict_delete_tags(image["metadata"]["container"]["tags"]):
print(
f":package: Skip tagged {package} id {image['id']} tags {image['metadata']['container']['tags']}"
)
continue

if not dry_run:
response = requests.delete(url, headers=headers)
if response.status_code != 204:

# Start page is 1 as stated by documentation
url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?page=1&per_page=50"

# Get the header
response = requests.head(url, headers=headers)
pages: int | None = get_last_page(response.headers)

for page in range(pages, 0, -1):
print(f"Page: {page}")
url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions?page={page}&per_page=50"
response = requests.get(url, headers=headers)
if response.status_code == 404:
print(f":cross_mark: Not found - {url}")
continue
elif response.status_code == 401:
print(f":cross_mark: Requires authentication - {url}")
sys.exit(1)
elif response.status_code == 403:
print(f":cross_mark: Forbidden - {url}")
sys.exit(1)

# Sort all images on id.
images = sorted(response.json(), key=lambda x: x["id"], reverse=True)

# Slice and remove all
if len(images) > keep:
for image in images if page != 1 else images[keep + 1 :]:
url = f"{pkg_url}/container/{package.replace('/', '%2F')}/versions/{image['id']}"

# Never remove latest or non snapshot tagged images
if restrict_delete_tags(image["metadata"]["container"]["tags"]):
print(
f":cross_mark: Failed to delete package {package} version id {image['id']}."
f":package: Skip tagged {package} id {image['id']} tags {image['metadata']['container']['tags']}"
)
continue
print(
f":white_heavy_check_mark: Deleted package {package} version id {image['id']}."
)

if not dry_run:
response = requests.delete(url, headers=headers)
if response.status_code == 404:
print(f":cross_mark: Failed to delete package {package} version id {image['id']}.")
continue
elif response.status_code == 401:
print(f":cross_mark: Requires authentication - {url}")
sys.exit(1)
elif response.status_code == 403:
print(f":cross_mark: Forbidden - {url}")
sys.exit(1)

tags = image["metadata"]["container"]["tags"]
if tags:
print(
f":white_heavy_check_mark: Deleted tagged package {package} version id {image['id']}"
f"with tags {tags}."
)
else:
print(
f":white_heavy_check_mark: Deleted untagged package {package} version id {image['id']}"
)


def restrict_delete_tags(tags: list) -> bool:
Expand Down
9 changes: 9 additions & 0 deletions .github/actions/clean_up_package_registry/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
certifi==2023.7.22
charset-normalizer==3.3.2
idna==3.4
markdown-it-py==3.0.0
mdurl==0.1.2
Pygments==2.16.1
requests==2.31.0
rich==13.6.0
urllib3==2.1.0
8 changes: 4 additions & 4 deletions .github/workflows/clean_up_package_registry.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
packages: |-
thrift
clucene
base
binaries
sw360/thrift
sw360/clucene
sw360/base
sw360/binaries
sw360

0 comments on commit d630785

Please sign in to comment.