Skip to content

feat: Add SigV4A authentication support for global AWS endpoints with auto-detection #297

feat: Add SigV4A authentication support for global AWS endpoints with auto-detection

feat: Add SigV4A authentication support for global AWS endpoints with auto-detection #297

Workflow file for this run

name: Python
on:
push:
pull_request:
workflow_dispatch:
permissions: {}
jobs:
build:
strategy:
fail-fast: false
matrix:
job_id: ["Build"]
defaults:
run:
working-directory: .
name: Build MCP Proxy for AWS
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
security-events: write
actions: read
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version-file: ".python-version"
# cache: uv (not supported)
- name: Install dependencies
run: uv sync --frozen --all-extras --dev
- name: Validate test directory exists
run: |
if [ ! -d "tests" ]; then
echo "ERROR: Tests directory not found!"
echo "This project requires tests to be present in the 'tests/' directory."
echo "Please add tests before proceeding."
exit 1
fi
echo "✓ Tests directory found"
- name: Run pre-commit
run: |
uv run pre-commit run --all-files
- name: Run tests
run: |
uv run --frozen pytest -m unit --cov --cov-branch --cov-report=term-missing --cov-report=xml:${{ matrix.package }}-coverage.xml
- name: Check unit test coverage sufficient
run: |
uv run --frozen coverage report --fail-under=80
- name: Run pyright
run: uv run --frozen --all-packages pyright
- name: Run ruff format
run: uv run --frozen ruff format .
- name: Run ruff check
run: uv run --frozen ruff check .
- name: Build package
run: uv build
- name: Upload distribution
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
path: dist/
- name: Generate Software Bill of Materials (SBOM)
run: |
source .venv/bin/activate
echo "Attempt to convert to proper UTF-8 files https://github.com/CycloneDX/cyclonedx-python/issues/868"
find .venv -type f -path '*/*.dist-info/*' > .venv/FILES
# because grep with xargs returns 123 have to do this the long and hard way...
while IFS= read -r line; do
(grep -s -q -axv '.*' $line &&
if [[ "$(file -b --mime-encoding $line)" != "binary" ]]; then
echo "illegal utf-8 characters in $line...converting...";
iconv -f $(file -b --mime-encoding $line) -t utf-8 $line > $line.utf8;
mv $line.utf8 $line;
fi;
) || echo "good $line"
done < .venv/FILES;
uv tool run --from cyclonedx-bom==6.1.3 cyclonedx-py environment $VIRTUAL_ENV --PEP-639 --gather-license-texts --pyproject pyproject.toml --mc-type library --output-format JSON > sbom.json
- name: Display SBOM
run: |
cat <<EOT |
import re
import json
import importlib.metadata as metadata
def parse_bom(json_file):
# Parse the JSON file
with open(json_file, 'r') as file:
data = json.load(file)
# Extract components
components = []
for component in data['components']:
comp_info = {}
# Get name, version, description, and purl
comp_info['name'] = component.get('name', 'Unknown')
comp_info['version'] = component.get('version', 'Unknown')
comp_info['description'] = component.get('description', 'Unknown')
comp_info['purl'] = component.get('purl', 'Unknown')
# Get licenses
comp_info['licenses'] = []
licenses = component.get('licenses', [])
for license in licenses:
if license.get('license', {}).get('id'):
comp_info['licenses'].append(license.get('license').get('id'))
if len(comp_info['licenses']) == 0:
comp_info['licenses'].append("No licenses")
# Extract additional information (copyright, etc.)
copyright_info = extract_copyright_from_metadata(comp_info['name'])
comp_info['copyright'] = copyright_info if copyright_info else "No copyright information"
components.append(comp_info)
return components
def extract_copyright_from_metadata(package_name):
try:
# Use importlib.metadata to retrieve metadata from the installed package
dist = metadata.distribution(package_name)
metadata_info = dist.metadata
# Extract relevant metadata
copyright_info = []
author = metadata_info.get('Author')
author_email = metadata_info.get('Author-email')
license_info = metadata_info.get('License')
if author:
copyright_info.append(f"Author: {author}")
if author_email:
copyright_info.append(f"Author Email: {author_email}")
if license_info:
copyright_info.append(f"License: {license_info}")
# Check for classifiers or any extra metadata fields
if 'Classifier' in metadata_info:
for classifier in metadata_info.get_all('Classifier'):
if 'copyright' in classifier.lower():
copyright_info.append(classifier)
return ', '.join(copyright_info) if copyright_info else None
except metadata.PackageNotFoundError:
return None
def main():
bom_file = 'sbom.json' # Replace with your BOM file path
components = parse_bom(bom_file)
for component in components:
print(f"Name: {component['name']}")
print(f"Version: {component['version']}")
print(f"Description: {component['description']}")
print(f"PURL: {component['purl']}")
print(f"Licenses: {', '.join(component['licenses'])}")
print(f"Copyright: {component['copyright']}")
print("-" * 40)
if __name__ == "__main__":
main()
EOT
python -
- name: Upload Software Bill of Materials
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: sbom-mcp-proxy-for-aws
path: sbom.json