Skip to content

Commit

Permalink
Don't use docker for building notebooks
Browse files Browse the repository at this point in the history
Instead build conda environment from binder folder and user that
  • Loading branch information
Kirill888 committed Dec 8, 2022
1 parent 3e76efd commit 056f928
Show file tree
Hide file tree
Showing 2 changed files with 194 additions and 83 deletions.
69 changes: 63 additions & 6 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,51 @@ jobs:
run: |
mamba env export -p /tmp/test_env
build-binder-env:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- uses: actions/cache@v3
id: binder_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}

- uses: conda-incubator/setup-miniconda@v2
if: steps.binder_cache.outputs.cache-hit != 'true'
with:
channels: conda-forge
channel-priority: true
activate-environment: ""
mamba-version: "*"
use-mamba: true

- name: Dump Conda Environment Info
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
conda info
conda list
mamba -V
conda config --show-sources
conda config --show
printenv | sort
- name: Build Python Environment for Notebooks
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
cd binder
mamba env create -f environment.yml -p /tmp/binder_env
- name: Check Python Env
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
mamba env export -p /tmp/binder_env
run-black-check:
runs-on: ubuntu-latest
needs:
Expand Down Expand Up @@ -303,6 +348,9 @@ jobs:
build-notebooks:
runs-on: ubuntu-latest

needs:
- build-binder-env

steps:
- uses: actions/checkout@v3

Expand All @@ -312,23 +360,32 @@ jobs:
path: docs/notebooks
key: docs-notebooks-${{ hashFiles('notebooks/*.py') }}

- name: Run Notebooks
- name: Get Conda Environment from Cache
if: steps.nb_cache.outputs.cache-hit != 'true'
uses: actions/cache@v3
id: conda_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}

- name: Update PATH
if: steps.nb_cache.outputs.cache-hit != 'true'
shell: bash
run: |
docker pull $DKR
echo "/tmp/binder_env/bin" >> $GITHUB_PATH
- name: Run Notebooks
if: steps.nb_cache.outputs.cache-hit != 'true'
run: |
nb_dir=docs/notebooks
mkdir -p $nb_dir
for src in $(find notebooks -type f -maxdepth 1 -name '*py'); do
dst="$nb_dir/$(basename ${src%%.py}.ipynb)"
echo "$src -> $dst"
docker run -i --entrypoint ./binder/render-nb-pipe.sh $DKR <$src >$dst
./binder/render-nb-pipe.sh <$src >$dst
done
ls -lh $nb_dir
env:
DKR: kirillodc/odc-stac-binder:latest

check-docs:
runs-on: ubuntu-latest

Expand Down
208 changes: 131 additions & 77 deletions .github/workflows/render.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,86 +4,140 @@ on:
workflow_dispatch:
push:
paths:
- 'notebooks/*py'
- '.github/workflows/render.yml'

env:
DKR: kirillodc/odc-stac-binder:latest
- "notebooks/*py"
- ".github/workflows/render.yml"

jobs:
build-binder-env:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- uses: actions/cache@v3
id: binder_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}

- uses: conda-incubator/setup-miniconda@v2
if: steps.binder_cache.outputs.cache-hit != 'true'
with:
channels: conda-forge
channel-priority: true
activate-environment: ""
mamba-version: "*"
use-mamba: true

- name: Dump Conda Environment Info
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
conda info
conda list
mamba -V
conda config --show-sources
conda config --show
printenv | sort
- name: Build Python Environment for Notebooks
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
cd binder
mamba env create -f environment.yml -p /tmp/binder_env
- name: Check Python Env
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
mamba env export -p /tmp/binder_env
render:
runs-on: ubuntu-latest

needs:
- build-binder-env

steps:
- uses: actions/checkout@v3

- name: Config
id: cfg
run: |
find notebooks/ -maxdepth 1 -name '*.py' -type f | sort -f -d
nb_dir="docs/notebooks"
nb_hash=$(python scripts/notebook_hash.py)
echo "Notebooks hash: ${nb_hash}"
echo "nb-dir=${nb_dir}" >> $GITHUB_OUTPUT
echo "nb-hash=${nb_hash}" >> $GITHUB_OUTPUT
echo "nb-archive=odc-stac-notebooks-${nb_hash}.tar.gz" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
id: nb_cache
with:
path: ${{ steps.cfg.outputs.nb-dir }}
key: docs-notebooks-${{ hashFiles('notebooks/*.py') }}

- name: Pull Docker
if: steps.nb_cache.outputs.cache-hit != 'true'
run: |
docker pull $DKR
- name: Run Notebooks
if: steps.nb_cache.outputs.cache-hit != 'true'
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
mkdir -p $nb_dir
for src in $(find notebooks -type f -maxdepth 1 -name '*py'); do
dst="${nb_dir}/$(basename ${src%%.py}.ipynb)"
echo "$src -> $dst"
docker run -i --entrypoint ./binder/render-nb-pipe.sh $DKR <$src >$dst
done
ls -lh ${nb_dir}/
- name: Package Notebooks
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
nb_hash="${{ steps.cfg.outputs.nb-hash }}"
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "DIR: ${nb_dir}"
echo "NB hash: $nb_hash"
echo "Archive: $nb_archive"
(cd $nb_dir && tar cvz .) > "${nb_archive}"
ls -lh "${nb_archive}"
tar tzf "${nb_archive}"
- name: Upload to S3
run: |
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}"
echo "Testing permissions"
aws s3 ls "${S3_DST}/" || true
aws s3 cp "${nb_archive}" "${S3_DST}/${nb_archive}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: 'ap-southeast-2'
AWS_REGION: 'ap-southeast-2'
S3_DST: 's3://datacube-core-deployment/odc-stac/nb'

- name: Upload results (artifact)
uses: actions/upload-artifact@v3
with:
name: rendered-notebooks
path: docs/notebooks
if-no-files-found: error
- uses: actions/checkout@v3

- name: Config
id: cfg
run: |
find notebooks/ -maxdepth 1 -name '*.py' -type f | sort -f -d
nb_dir="docs/notebooks"
nb_hash=$(python scripts/notebook_hash.py)
echo "Notebooks hash: ${nb_hash}"
echo "nb-dir=${nb_dir}" >> $GITHUB_OUTPUT
echo "nb-hash=${nb_hash}" >> $GITHUB_OUTPUT
echo "nb-archive=odc-stac-notebooks-${nb_hash}.tar.gz" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
id: nb_cache
with:
path: ${{ steps.cfg.outputs.nb-dir }}
key: docs-notebooks-${{ hashFiles('notebooks/*.py') }}

- name: Get Conda Environment from Cache
if: steps.nb_cache.outputs.cache-hit != 'true'
uses: actions/cache@v3
id: conda_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}

- name: Update PATH
if: steps.nb_cache.outputs.cache-hit != 'true'
shell: bash
run: |
echo "/tmp/binder_env/bin" >> $GITHUB_PATH
- name: Run Notebooks
if: steps.nb_cache.outputs.cache-hit != 'true'
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
mkdir -p $nb_dir
for src in $(find notebooks -type f -maxdepth 1 -name '*py'); do
dst="${nb_dir}/$(basename ${src%%.py}.ipynb)"
echo "$src -> $dst"
./binder/render-nb-pipe.sh <$src >$dst
done
ls -lh ${nb_dir}/
- name: Package Notebooks
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
nb_hash="${{ steps.cfg.outputs.nb-hash }}"
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "DIR: ${nb_dir}"
echo "NB hash: $nb_hash"
echo "Archive: $nb_archive"
(cd $nb_dir && tar cvz .) > "${nb_archive}"
ls -lh "${nb_archive}"
tar tzf "${nb_archive}"
- name: Upload to S3
run: |
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}"
echo "Testing permissions"
aws s3 ls "${S3_DST}/" || true
aws s3 cp "${nb_archive}" "${S3_DST}/${nb_archive}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: "ap-southeast-2"
AWS_REGION: "ap-southeast-2"
S3_DST: "s3://datacube-core-deployment/odc-stac/nb"

- name: Upload results (artifact)
uses: actions/upload-artifact@v3
with:
name: rendered-notebooks
path: docs/notebooks
if-no-files-found: error

0 comments on commit 056f928

Please sign in to comment.