Skip to content

Commit

Permalink
maint: use netlify instead of S3 for rendered nb
Browse files Browse the repository at this point in the history
publish to S3 no longer works, use netlify instead
  • Loading branch information
Kirill888 committed Nov 8, 2023
1 parent bce799e commit fcfe28f
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 82 deletions.
14 changes: 13 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,17 @@ jobs:
run: |
echo "/tmp/test_env/bin" >> $GITHUB_PATH
- name: Config
id: cfg
run: |
find notebooks/ -maxdepth 1 -name '*.py' -type f | sort -f -d
nb_dir="docs/notebooks"
nb_hash=$(python scripts/notebook_hash.py)
echo "Notebooks hash: ${nb_hash}"
echo "nb-hash=${nb_hash}" >> $GITHUB_OUTPUT
echo "nb-hash-short=${nb_hash:0:16}" >> $GITHUB_OUTPUT

- name: Install in Edit mode
shell: bash
run: |
Expand All @@ -428,12 +439,13 @@ jobs:
if: github.event_name == 'pull_request'
uses: nwtgck/actions-netlify@v2
with:
production-branch: "main"
production-branch: "develop"
publish-dir: "docs/_build/html"
deploy-message: "Deploy from GitHub Actions"
github-token: ${{ secrets.GITHUB_TOKEN }}
enable-pull-request-comment: true
enable-commit-comment: false
alias: ${{ steps.cfg.outputs.nb-hash-short }}

env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
Expand Down
42 changes: 0 additions & 42 deletions .github/workflows/publish-s3.yml

This file was deleted.

15 changes: 0 additions & 15 deletions .github/workflows/render.yml
Original file line number Diff line number Diff line change
Expand Up @@ -120,21 +120,6 @@ jobs:
ls -lh "${nb_archive}"
tar tzf "${nb_archive}"
- name: Upload to S3
run: |
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}"
echo "Testing permissions"
aws s3 ls "${S3_DST}/" || true
aws s3 cp "${nb_archive}" "${S3_DST}/${nb_archive}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: "ap-southeast-2"
AWS_REGION: "ap-southeast-2"
S3_DST: "s3://datacube-core-deployment/odc-stac/nb"

- name: Upload results (artifact)
uses: actions/upload-artifact@v3
with:
Expand Down
36 changes: 16 additions & 20 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import subprocess
import sys
from pathlib import Path

import requests
from sphinx.util import logging

sys.path.insert(0, os.path.abspath(".."))
Expand All @@ -39,40 +39,36 @@ def filter(self, record: pylogging.LogRecord) -> bool:
# End of a workaround


def ensure_notebooks(https_url, dst_folder):
def ensure_notebooks(dst_folder):
"""
Download pre-rendered notebooks from a tar archive
"""

dst_folder = Path(dst_folder)
if dst_folder.exists():
print(f"Found pre-rendered notebooks in {dst_folder}")
return True

print(f"Testing: {https_url}")
result = subprocess.run([f"curl -f -s -I {https_url}"], shell=True)
if result.returncode != 0:
print(f"Cached notebook URL does not exist: {https_url}")
return False

dst_folder.mkdir()
print(f"Fetching: {https_url} to {dst_folder}")
log = subprocess.check_output(
["/bin/bash", "-c", f"curl -s {https_url} | tar xz -C {dst_folder}"]
).decode("utf-8")
print(log)
nb_hash, nb_paths = notebook_hash.compute("../notebooks")
nb_names = [p.rsplit("/", 1)[-1].rsplit(".", 1)[0] + ".ipynb" for p in nb_paths]

for nb in nb_names:
url = f"https://{nb_hash[:16]}--odc-stac-docs.netlify.app/notebooks/{nb}"
print(f"{url} -> notebooks/{nb}")
rr = requests.get(url, timeout=5)
if not rr:
return False
with open(dst_folder / nb, "wt", encoding="utf") as dst:
dst.write(rr.text)

return True


# working directory is docs/
# download pre-rendered notebooks unless folder is already populated
nb_hash = notebook_hash.compute("../notebooks")
https_url = (
f"https://packages.dea.ga.gov.au/odc-stac/nb/odc-stac-notebooks-{nb_hash}.tar.gz"
)
if not ensure_notebooks(https_url, "notebooks"):
if not ensure_notebooks("notebooks"):
notebooks_directory = os.path.abspath("../notebooks")
raise Exception(
raise RuntimeException(
"There is no cached version of these notebooks. "
"Build the notebooks before building the documentation. "
f"Notebooks are located in {notebooks_directory}."
Expand Down
8 changes: 4 additions & 4 deletions scripts/notebook_hash.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os.path

import hashlib
import os.path


def compute(folder: str) -> str:
Expand All @@ -15,9 +14,10 @@ def compute(folder: str) -> str:
with open(path, "rb") as file:
bytes = file.read()
hash.update(bytes)
return hash.hexdigest()
return hash.hexdigest(), paths


if __name__ == "__main__":
folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "notebooks"))
print(compute(folder))
hsh, _ = compute(folder)
print(hsh)

0 comments on commit fcfe28f

Please sign in to comment.