diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 0a100afe..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - -**What happened**: - -**What you expected to happen**: - -**Steps to reproduce the issue**: - -**Anything else we need to know?**: - -**Environment**: -- Output of `syft version`: -- OS (e.g: `cat /etc/os-release` or similar): diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 3009b9c0..00000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,6 +0,0 @@ -contact_links: - - - name: Join our Discourse community đŸ’Ŧ - # link to our community Discourse site - url: https://anchore.com/discourse - about: 'Come chat with us! Ask for help, join our software development efforts, or just give us feedback!' diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index d07c5f15..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: enhancement -assignees: '' - ---- - -**What would you like to be added**: - -**Why is this needed**: - -**Additional context**: - diff --git a/.github/actions/bootstrap/action.yaml b/.github/actions/bootstrap/action.yaml deleted file mode 100644 index 6150113a..00000000 --- a/.github/actions/bootstrap/action.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: "Bootstrap" - -description: "Bootstrap all tools and dependencies" -inputs: - go-version: - description: "Go version to install" - required: true - default: "1.22.x" - go-dependencies: - description: "Download go dependencies" - required: true - default: "true" - cache-key-prefix: - description: "Prefix all cache keys with this value" - required: true - default: "181053ac82" - download-test-fixture-cache: - description: "Download test fixture cache from OCI and github actions" - required: true - default: "false" - bootstrap-apt-packages: - description: "Space delimited list of tools to install via apt" - default: "libxml2-utils" - -runs: - using: "composite" - steps: - # note: go mod and build is automatically cached on default with v4+ - - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1 - if: inputs.go-version != '' - with: - go-version: ${{ inputs.go-version }} - - - name: Restore tool cache - id: tool-cache - uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2 - with: - path: ${{ github.workspace }}/.tool - key: ${{ inputs.cache-key-prefix }}-${{ runner.os }}-tool-${{ hashFiles('.binny.yaml') }} - - - name: Install project tools - shell: bash - run: make tools - - - name: Install go dependencies - if: inputs.go-dependencies == 'true' - shell: bash - run: make ci-bootstrap-go - - - name: Install apt packages - if: inputs.bootstrap-apt-packages != '' - shell: bash - run: | - DEBIAN_FRONTEND=noninteractive sudo apt update && sudo -E apt install -y ${{ inputs.bootstrap-apt-packages }} - - - name: Restore ORAS cache from github actions - if: inputs.download-test-fixture-cache == 'true' - uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2 - with: - path: ${{ github.workspace }}/.tmp/oras-cache - key: ${{ inputs.cache-key-prefix }}-oras-cache - - - name: Download test fixture cache - if: inputs.download-test-fixture-cache == 'true' - shell: bash - run: make download-test-fixture-cache diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 0009183a..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,27 +0,0 @@ -version: 2 - -updates: - - - package-ecosystem: gomod - directory: "/" - schedule: - interval: "daily" - open-pull-requests-limit: 10 - labels: - - "dependencies" - - - package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "daily" - open-pull-requests-limit: 10 - labels: - - "dependencies" - - - package-ecosystem: "github-actions" - directory: "/.github/actions/boostrap" - schedule: - interval: "daily" - open-pull-requests-limit: 10 - labels: - - "dependencies" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index de2b78a6..00000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,25 +0,0 @@ -# Description - -Please include a summary of the changes along with any relevant motivation and context, -or link to an issue where this is explained. - - - -- Fixes # - -## Type of change - - - -- [ ] Bug fix (non-breaking change which fixes an issue) -- [ ] New feature (non-breaking change which adds functionality) -- [ ] Breaking change (please discuss with the team first; Syft is 1.0 software and we won't accept breaking changes without going to 2.0) -- [ ] Documentation (updates the documentation) -- [ ] Chore (improve the developer experience, fix a test flake, etc, without changing the visible behavior of Syft) -- [ ] Performance (make Syft run faster or use less memory, without changing visible behavior much) - -# Checklist: - -- [ ] I have added unit tests that cover changed behavior -- [ ] I have tested my code in common scenarios and confirmed there are no regressions -- [ ] I have added comments to my code, particularly in hard-to-understand sections diff --git a/.github/scripts/check_binary_fixture_size.sh b/.github/scripts/check_binary_fixture_size.sh deleted file mode 100755 index 76482482..00000000 --- a/.github/scripts/check_binary_fixture_size.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# current limit for fixture size -size=1000 - -if [ $# -eq 0 ]; then - echo "Usage: $0 " - exit 1 -fi - -directory="$1" - -# Remove trailing slash using parameter expansion -directory="${directory%/}" - -if [ ! -d "$directory" ]; then - echo "Directory not found: $directory" - exit 1 -fi - -found_large_files=0 -while IFS= read -r -d '' file; do - if [ $(wc -c < "$file") -gt $size ]; then - echo "File $file is greater than ${size} bytes." - found_large_files=1 - fi -done < <(find "$directory" -type f -print0) - -if [ "$found_large_files" -eq 1 ]; then - echo "Script failed: Some files are greater than ${size} bytes." - exit 1 -else - echo "All files in $directory and its subdirectories are ${size} bytes or smaller. Check passed." - exit 0 -fi - diff --git a/.github/scripts/ci-check.sh b/.github/scripts/ci-check.sh deleted file mode 100755 index 0ab83a31..00000000 --- a/.github/scripts/ci-check.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -red=$(tput setaf 1) -bold=$(tput bold) -normal=$(tput sgr0) - -# assert we are running in CI (or die!) -if [[ -z "$CI" ]]; then - echo "${bold}${red}This step should ONLY be run in CI. Exiting...${normal}" - exit 1 -fi diff --git a/.github/scripts/coverage.py b/.github/scripts/coverage.py deleted file mode 100755 index db14135c..00000000 --- a/.github/scripts/coverage.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import sys -import shlex - - -class bcolors: - HEADER = '\033[95m' - OKBLUE = '\033[94m' - OKCYAN = '\033[96m' - OKGREEN = '\033[92m' - WARNING = '\033[93m' - FAIL = '\033[91m' - ENDC = '\033[0m' - BOLD = '\033[1m' - UNDERLINE = '\033[4m' - - -if len(sys.argv) < 3: - print("Usage: coverage.py [threshold] [go-coverage-report]") - sys.exit(1) - - -threshold = float(sys.argv[1]) -report = sys.argv[2] - - -args = shlex.split(f"go tool cover -func {report}") -p = subprocess.run(args, capture_output=True, text=True) - -percent_coverage = float(p.stdout.splitlines()[-1].split()[-1].replace("%", "")) -print(f"{bcolors.BOLD}Coverage: {percent_coverage}%{bcolors.ENDC}") - -if percent_coverage < threshold: - print(f"{bcolors.BOLD}{bcolors.FAIL}Coverage below threshold of {threshold}%{bcolors.ENDC}") - sys.exit(1) diff --git a/.github/scripts/find_cache_paths.py b/.github/scripts/find_cache_paths.py deleted file mode 100755 index cc2e4081..00000000 --- a/.github/scripts/find_cache_paths.py +++ /dev/null @@ -1,135 +0,0 @@ -#!/usr/bin/env python3 -from __future__ import annotations - -import os -import glob -import sys -import json -import hashlib - - -IGNORED_PREFIXES = [] - - -def find_fingerprints_and_check_dirs(base_dir): - all_fingerprints = set(glob.glob(os.path.join(base_dir, '**', 'test*', '**', '*.fingerprint'), recursive=True)) - - all_fingerprints = {os.path.relpath(fp) for fp in all_fingerprints - if not any(fp.startswith(prefix) for prefix in IGNORED_PREFIXES)} - - if not all_fingerprints: - show("No .fingerprint files or cache directories found.") - exit(1) - - missing_content = [] - valid_paths = set() - fingerprint_contents = [] - - for fingerprint in all_fingerprints: - path = fingerprint.replace('.fingerprint', '') - - if not os.path.exists(path): - missing_content.append(path) - continue - - if not os.path.isdir(path): - valid_paths.add(path) - continue - - if os.listdir(path): - valid_paths.add(path) - else: - missing_content.append(path) - - with open(fingerprint, 'r') as f: - content = f.read().strip() - fingerprint_contents.append((fingerprint, content)) - - return sorted(valid_paths), missing_content, fingerprint_contents - - -def parse_fingerprint_contents(fingerprint_content): - input_map = {} - for line in fingerprint_content.splitlines(): - digest, path = line.split() - input_map[path] = digest - return input_map - - -def calculate_sha256(fingerprint_contents): - sorted_fingerprint_contents = sorted(fingerprint_contents, key=lambda x: x[0]) - - concatenated_contents = ''.join(content for _, content in sorted_fingerprint_contents) - - sha256_hash = hashlib.sha256(concatenated_contents.encode()).hexdigest() - - return sha256_hash - - -def calculate_file_sha256(file_path): - sha256_hash = hashlib.sha256() - with open(file_path, 'rb') as f: - for byte_block in iter(lambda: f.read(4096), b""): - sha256_hash.update(byte_block) - return sha256_hash.hexdigest() - - -def show(*s: str): - print(*s, file=sys.stderr) - - -def main(file_path: str | None): - base_dir = '.' - valid_paths, missing_content, fingerprint_contents = find_fingerprints_and_check_dirs(base_dir) - - if missing_content: - show("The following paths are missing or have no content, but have corresponding .fingerprint files:") - for path in sorted(missing_content): - show(f"- {path}") - show("Please ensure these paths exist and have content if they are directories.") - exit(1) - - sha256_hash = calculate_sha256(fingerprint_contents) - - paths_with_digests = [] - for path in sorted(valid_paths): - fingerprint_file = f"{path}.fingerprint" - try: - if os.path.exists(fingerprint_file): - file_digest = calculate_file_sha256(fingerprint_file) - - # Parse the fingerprint file to get the digest/path tuples - with open(fingerprint_file, 'r') as f: - fingerprint_content = f.read().strip() - input_map = parse_fingerprint_contents(fingerprint_content) - - paths_with_digests.append({ - "path": path, - "digest": file_digest, - "input": input_map - }) - - except Exception as e: - show(f"Error processing {fingerprint_file}: {e}") - raise e - - - output = { - "digest": sha256_hash, - "paths": paths_with_digests - } - - content = json.dumps(output, indent=2, sort_keys=True) - - if file_path: - with open(file_path, 'w') as f: - f.write(content) - - print(content) - - -if __name__ == "__main__": - file_path = None - if len(sys.argv) > 1: - file_path = sys.argv[1] - main(file_path) diff --git a/.github/scripts/fingerprint_docker_fixtures.py b/.github/scripts/fingerprint_docker_fixtures.py deleted file mode 100755 index 4a74420e..00000000 --- a/.github/scripts/fingerprint_docker_fixtures.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python3 - -import os -import subprocess -import hashlib - -BOLD = '\033[1m' -YELLOW = '\033[0;33m' -RESET = '\033[0m' - - -def print_message(message): - print(f"{YELLOW}{message}{RESET}") - - -def sha256sum(filepath): - h = hashlib.sha256() - with open(filepath, 'rb') as f: - for chunk in iter(lambda: f.read(4096), b""): - h.update(chunk) - return h.hexdigest() - - -def is_git_tracked_or_untracked(directory): - """Returns a sorted list of files in the directory that are tracked or not ignored by Git.""" - result = subprocess.run( - ["git", "ls-files", "--cached", "--others", "--exclude-standard"], - cwd=directory, - stdout=subprocess.PIPE, - text=True - ) - return sorted(result.stdout.strip().splitlines()) - - -def find_test_fixture_dirs_with_images(base_dir): - """Find directories that contain 'test-fixtures' and at least one 'image-*' directory.""" - for root, dirs, files in os.walk(base_dir): - if 'test-fixtures' in root: - image_dirs = [d for d in dirs if d.startswith('image-')] - if image_dirs: - yield os.path.realpath(root) - - -def generate_fingerprints(): - print_message("creating fingerprint files for docker fixtures...") - - for test_fixture_dir in find_test_fixture_dirs_with_images('.'): - cache_fingerprint_path = os.path.join(test_fixture_dir, 'cache.fingerprint') - - with open(cache_fingerprint_path, 'w') as fingerprint_file: - for image_dir in find_image_dirs(test_fixture_dir): - for file in is_git_tracked_or_untracked(image_dir): - file_path = os.path.join(image_dir, file) - checksum = sha256sum(file_path) - path_from_fixture_dir = os.path.relpath(file_path, test_fixture_dir) - fingerprint_file.write(f"{checksum} {path_from_fixture_dir}\n") - - -def find_image_dirs(test_fixture_dir): - """Find all 'image-*' directories inside a given test-fixture directory.""" - result = [] - for root, dirs, files in os.walk(test_fixture_dir): - for dir_name in dirs: - if dir_name.startswith('image-'): - result.append(os.path.join(root, dir_name)) - return sorted(result) - - -if __name__ == "__main__": - generate_fingerprints() diff --git a/.github/scripts/go-mod-tidy-check.sh b/.github/scripts/go-mod-tidy-check.sh deleted file mode 100755 index 28f22fcd..00000000 --- a/.github/scripts/go-mod-tidy-check.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash -set -eu - -ORIGINAL_STATE_DIR=$(mktemp -d "TEMP-original-state-XXXXXXXXX") -TIDY_STATE_DIR=$(mktemp -d "TEMP-tidy-state-XXXXXXXXX") - -trap "cp -p ${ORIGINAL_STATE_DIR}/* ./ && git update-index -q --refresh && rm -fR ${ORIGINAL_STATE_DIR} ${TIDY_STATE_DIR}" EXIT - -# capturing original state of files... -cp go.mod go.sum "${ORIGINAL_STATE_DIR}" - -# capturing state of go.mod and go.sum after running go mod tidy... -go mod tidy -cp go.mod go.sum "${TIDY_STATE_DIR}" - -set +e - -# detect difference between the git HEAD state and the go mod tidy state -DIFF_MOD=$(diff -u "${ORIGINAL_STATE_DIR}/go.mod" "${TIDY_STATE_DIR}/go.mod") -DIFF_SUM=$(diff -u "${ORIGINAL_STATE_DIR}/go.sum" "${TIDY_STATE_DIR}/go.sum") - -if [[ -n "${DIFF_MOD}" || -n "${DIFF_SUM}" ]]; then - echo "go.mod diff:" - echo "${DIFF_MOD}" - echo "go.sum diff:" - echo "${DIFF_SUM}" - echo "" - printf "FAILED! go.mod and/or go.sum are NOT tidy; please run 'go mod tidy'.\n\n" - exit 1 -fi diff --git a/.github/scripts/json-schema-drift-check.sh b/.github/scripts/json-schema-drift-check.sh deleted file mode 100755 index 3002236d..00000000 --- a/.github/scripts/json-schema-drift-check.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -set -u - -if [ "$(git status --porcelain | wc -l)" -ne "0" ]; then - echo " 🔴 there are uncommitted changes, please commit them before running this check" - exit 1 -fi - -if ! make generate-json-schema; then - echo "Generating json schema failed" - exit 1 -fi - -if [ "$(git status --porcelain | wc -l)" -ne "0" ]; then - echo " 🔴 there are uncommitted changes, please commit them before running this check" - exit 1 -fi diff --git a/.github/scripts/labeler.py b/.github/scripts/labeler.py deleted file mode 100755 index 2efd3320..00000000 --- a/.github/scripts/labeler.py +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import annotations - -import sys -import glob -import subprocess -import os -import re - -DRY_RUN = False - -JSON_SCHEMA_LABEL = "json-schema" - -# note: we can't use "breaking-change" as the label since that might be applied manually by a user. This is a -# distinct label that we can use to indicate that the label was applied (or removed) by automation. -BREAKING_CHANGE_LABEL = "detected-breaking-change" - - -def main(changed_files: str | None = None, merge_base_schema_files: str | None = None): - global DRY_RUN - - pr_number = os.environ.get("GITHUB_PR_NUMBER") - comment_file_path = os.environ.get("CI_COMMENT_FILE") - - if not comment_file_path: - print("CI_COMMENT_FILE not set") - sys.exit(1) - - if not pr_number: - DRY_RUN = True - - if changed_files: - DRY_RUN = True - - # read lines from file... this is useful for local testing - with open(changed_files) as f: - pr_changed_files = f.read().splitlines() - - with open(merge_base_schema_files) as f: - og_json_schema_files = sort_json_schema_files(f.read().splitlines()) - - else: - if not is_ci(): - print("Not in CI") - sys.exit(1) - - if not pr_number: - print("Not a PR") - sys.exit(1) - - pr_changed_files = get_pr_changed_files(pr_number) - # since we are running this in the context of the pull_request_target, the checkout is the merge base.. - # that is the main branch of the original repo, NOT the branch in the forked repo (or branch in the target - # repo for non-forked PRs). This means we just need to list the current checkedout files to get a sense of - # the changes before a merge. - og_json_schema_files = list_json_schema_files() - - pr_json_schema_files = filter_to_schema_files(pr_changed_files) - - pr_labels = get_pr_labels(pr_number) - - # print("schema files in pr: ", summarize_schema_files(pr_json_schema_files)) - # print("og schema files: ", summarize_schema_files(og_json_schema_files)) - - if not og_json_schema_files: - print("No schema files found in merge base") - sys.exit(1) - - # pr_json_schema_files = set of PR files are added, removed, and changed files - new_schema_files = set(pr_json_schema_files) - set(og_json_schema_files) - removed_or_modified_schema_files = set(pr_json_schema_files) - set(new_schema_files) - - print("new schemas: ", summarize_schema_files(new_schema_files)) - print("removed or modified schemas:", summarize_schema_files(removed_or_modified_schema_files)) - - # if there is a new or modified schema, we should add the "json-schema" label to the PR... - if new_schema_files or removed_or_modified_schema_files: - print("\nAdding json-schema label...") - add_label(pr_number, JSON_SCHEMA_LABEL) - - else: - if JSON_SCHEMA_LABEL in pr_labels: - remove_label(pr_number, JSON_SCHEMA_LABEL) - - # new schema files should be scrutinized, comparing the latest and added versions to see if it's a breaking - # change (major version bump). Warn about it on the PR via adding a breaking-change label... - if is_breaking_change(new_schema_files, og_json_schema_files[-1]): - print("\nBreaking change detected...") - add_label(pr_number, BREAKING_CHANGE_LABEL) - else: - if BREAKING_CHANGE_LABEL in pr_labels: - remove_label(pr_number, BREAKING_CHANGE_LABEL) - - # modifying an existing schema could be a breaking change, we should warn about it on the PR via a comment... - # removing schema files should never be allowed, we should warn about it on the PR via a comment... - if removed_or_modified_schema_files: - print("\nRemoved or modified schema detected...") - schemas = sort_json_schema_files(list(removed_or_modified_schema_files)) - schemas_str = "\n".join([f" - {schema}" for schema in schemas]) - add_comment(comment_file_path, f"Detected modification or removal of existing json schemas:\n{schemas_str}", warning=True) - - -def add_comment(comment_file_path: str, comment: str, warning: bool = False, important: bool = False): - if warning or important: - comment_lines = comment.splitlines() - comment = "\n".join([f"> {line}" for line in comment_lines]) - - if warning: - comment = f"> [!WARNING]\n{comment}" - elif important: - comment = f"> [!IMPORTANT]\n{comment}" - - # create any parent directories if they don't exist - os.makedirs(os.path.dirname(comment_file_path), exist_ok=True) - - with open(comment_file_path, "w") as f: - f.write(comment) - - print(f"Comment file contents: {comment_file_path}") - print(comment) - - -def add_label(pr_number: str, label: str): - # run "gh pr edit --add-label