From fcfe28feb3cb48a8795dadef298678de6b55ed6a Mon Sep 17 00:00:00 2001 From: Kirill Kouzoubov Date: Wed, 8 Nov 2023 11:13:56 +1100 Subject: [PATCH] maint: use netlify instead of S3 for rendered nb publish to S3 no longer works, use netlify instead --- .github/workflows/main.yml | 14 ++++++++++- .github/workflows/publish-s3.yml | 42 -------------------------------- .github/workflows/render.yml | 15 ------------ docs/conf.py | 36 ++++++++++++--------------- scripts/notebook_hash.py | 8 +++--- 5 files changed, 33 insertions(+), 82 deletions(-) delete mode 100644 .github/workflows/publish-s3.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7c790d4..61f1131 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -413,6 +413,17 @@ jobs: run: | echo "/tmp/test_env/bin" >> $GITHUB_PATH + - name: Config + id: cfg + run: | + find notebooks/ -maxdepth 1 -name '*.py' -type f | sort -f -d + + nb_dir="docs/notebooks" + nb_hash=$(python scripts/notebook_hash.py) + echo "Notebooks hash: ${nb_hash}" + echo "nb-hash=${nb_hash}" >> $GITHUB_OUTPUT + echo "nb-hash-short=${nb_hash:0:16}" >> $GITHUB_OUTPUT + - name: Install in Edit mode shell: bash run: | @@ -428,12 +439,13 @@ jobs: if: github.event_name == 'pull_request' uses: nwtgck/actions-netlify@v2 with: - production-branch: "main" + production-branch: "develop" publish-dir: "docs/_build/html" deploy-message: "Deploy from GitHub Actions" github-token: ${{ secrets.GITHUB_TOKEN }} enable-pull-request-comment: true enable-commit-comment: false + alias: ${{ steps.cfg.outputs.nb-hash-short }} env: NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} diff --git a/.github/workflows/publish-s3.yml b/.github/workflows/publish-s3.yml deleted file mode 100644 index 1516d2c..0000000 --- a/.github/workflows/publish-s3.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Publish to S3 - -on: - workflow_run: - workflows: ["Run Code Checks"] - branches: [develop] - types: - - completed - -jobs: - publish-s3: - if: | - github.repository == 'opendatacube/odc-stac' - && github.event.workflow_run.conclusion == 'success' - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - uses: actions/cache@v3 - id: wheels_cache - with: - path: ./wheels - key: wheels-${{ github.sha }} - - - name: Prepare for upload to S3 - run: | - mkdir -p ./pips - ./scripts/mk-pip-tree.sh ./wheels/dev/ ./pips - find ./pips -type f - - - name: Upload to S3 - run: | - echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}" - aws s3 ls "${S3_DST}" - aws s3 sync ./pips/ "${S3_DST}" - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }} - AWS_DEFAULT_REGION: 'ap-southeast-2' - AWS_REGION: 'ap-southeast-2' - S3_DST: 's3://datacube-core-deployment/' diff --git a/.github/workflows/render.yml b/.github/workflows/render.yml index 8f1da3f..78673de 100644 --- a/.github/workflows/render.yml +++ b/.github/workflows/render.yml @@ -120,21 +120,6 @@ jobs: ls -lh "${nb_archive}" tar tzf "${nb_archive}" - - name: Upload to S3 - run: | - nb_archive="${{ steps.cfg.outputs.nb-archive }}" - echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}" - echo "Testing permissions" - aws s3 ls "${S3_DST}/" || true - aws s3 cp "${nb_archive}" "${S3_DST}/${nb_archive}" - - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }} - AWS_DEFAULT_REGION: "ap-southeast-2" - AWS_REGION: "ap-southeast-2" - S3_DST: "s3://datacube-core-deployment/odc-stac/nb" - - name: Upload results (artifact) uses: actions/upload-artifact@v3 with: diff --git a/docs/conf.py b/docs/conf.py index 791a32e..886d28d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,10 +13,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # import os -import subprocess import sys from pathlib import Path +import requests from sphinx.util import logging sys.path.insert(0, os.path.abspath("..")) @@ -39,40 +39,36 @@ def filter(self, record: pylogging.LogRecord) -> bool: # End of a workaround -def ensure_notebooks(https_url, dst_folder): +def ensure_notebooks(dst_folder): """ Download pre-rendered notebooks from a tar archive """ - dst_folder = Path(dst_folder) if dst_folder.exists(): print(f"Found pre-rendered notebooks in {dst_folder}") return True - print(f"Testing: {https_url}") - result = subprocess.run([f"curl -f -s -I {https_url}"], shell=True) - if result.returncode != 0: - print(f"Cached notebook URL does not exist: {https_url}") - return False - dst_folder.mkdir() - print(f"Fetching: {https_url} to {dst_folder}") - log = subprocess.check_output( - ["/bin/bash", "-c", f"curl -s {https_url} | tar xz -C {dst_folder}"] - ).decode("utf-8") - print(log) + nb_hash, nb_paths = notebook_hash.compute("../notebooks") + nb_names = [p.rsplit("/", 1)[-1].rsplit(".", 1)[0] + ".ipynb" for p in nb_paths] + + for nb in nb_names: + url = f"https://{nb_hash[:16]}--odc-stac-docs.netlify.app/notebooks/{nb}" + print(f"{url} -> notebooks/{nb}") + rr = requests.get(url, timeout=5) + if not rr: + return False + with open(dst_folder / nb, "wt", encoding="utf") as dst: + dst.write(rr.text) + return True # working directory is docs/ # download pre-rendered notebooks unless folder is already populated -nb_hash = notebook_hash.compute("../notebooks") -https_url = ( - f"https://packages.dea.ga.gov.au/odc-stac/nb/odc-stac-notebooks-{nb_hash}.tar.gz" -) -if not ensure_notebooks(https_url, "notebooks"): +if not ensure_notebooks("notebooks"): notebooks_directory = os.path.abspath("../notebooks") - raise Exception( + raise RuntimeException( "There is no cached version of these notebooks. " "Build the notebooks before building the documentation. " f"Notebooks are located in {notebooks_directory}." diff --git a/scripts/notebook_hash.py b/scripts/notebook_hash.py index 3b3f4bd..518d6c0 100644 --- a/scripts/notebook_hash.py +++ b/scripts/notebook_hash.py @@ -1,6 +1,5 @@ -import os.path - import hashlib +import os.path def compute(folder: str) -> str: @@ -15,9 +14,10 @@ def compute(folder: str) -> str: with open(path, "rb") as file: bytes = file.read() hash.update(bytes) - return hash.hexdigest() + return hash.hexdigest(), paths if __name__ == "__main__": folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "notebooks")) - print(compute(folder)) + hsh, _ = compute(folder) + print(hsh)