-
Notifications
You must be signed in to change notification settings - Fork 1
117 lines (96 loc) · 3.65 KB
/
benchmark.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
name: Benchmarks
on:
push:
branches:
- main
pull_request:
branches:
- '*'
workflow_dispatch: # allows to trigger manually
schedule:
- cron: "0 5 * * 4" # Once a week at 0500 on Thursday
# Sets permissions of the GITHUB_TOKEN to allow writing back to `master`
permissions:
contents: write
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# Cancel in-progress deployment jobs so only the latest one succeeds.
concurrency:
group: "pages"
cancel-in-progress: true
env:
MACHINE_NAME: gcp-n1-standard-8
jobs:
run-benchmarks:
runs-on: "cirun-benchmark-runner--${{ github.run_id }}"
steps:
# Install git first; otherwise actions/checkout silently falls back
# to github REST API for downloading the repo
- name: Install dependencies
run: |
sudo apt update -y
sudo apt install git zlib1g-dev build-essential pkg-config rsync lsof -y
- uses: actions/checkout@v4
with:
repository: deshaw/versioned-hdf5
path: ./versioned-hdf5/
fetch-depth: 0 # Needed for asv to be able to run benchmarks on old commits
- name: Get commit hash
working-directory: ./versioned-hdf5
run: echo "PROJECT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- uses: actions/checkout@v4
with:
path: ./versioned-hdf5-benchmarks/
- name: Setup python with miniconda
uses: conda-incubator/setup-miniconda@v3
with:
python-version: 3.11
channels: conda-forge
- name: Install versioned-hdf5
working-directory: ./versioned-hdf5
run: |
conda install -n test pip hdf5 openmpi h5py ndindex -c conda-forge -y
conda run -n test pip install '.[bench]'
- name: Dump conda environment
run: conda list -n test
- name: Print hdf5 configuration
working-directory: ./versioned-hdf5-benchmarks
run: |
conda run -n test python -c 'import utils; utils.debug_libhdf5_so()'
conda run -n test h5cc -showconfig
- name: Run benchmarks
working-directory: ./versioned-hdf5
run: |
# Copy the old asv results back into ./versioned-hdf5 to avoid
# running old benchmarks again
rsync -r ../versioned-hdf5-benchmarks/.asv ./
# set the machine name; otherwise this is unique to each run
echo "Setting machine name to $MACHINE_NAME"
conda run -n test asv machine --machine $MACHINE_NAME --yes -v
cat ~/.asv-machine.json
# Don't return exit code 1 if results are slower
conda run -n test asv run --skip-existing --machine $MACHINE_NAME 1.7.0.. || true;
# Copy the new benchmark results to the benchmarks repo to commit them
rsync -r .asv ../versioned-hdf5-benchmarks/
- name: Add and commit benchmarks
if: github.event_name != 'pull_request'
uses: EndBug/add-and-commit@v9
with:
cwd: ./versioned-hdf5-benchmarks
add: .asv/
message: "Update benchmarks for commit ${{ env.PROJECT_SHA }}"
- name: Generate html
working-directory: ./versioned-hdf5
run: conda run -n test asv publish -o ./html
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: ./versioned-hdf5/html
retention-days: 30
- name: Deploy to GitHub Pages
if: github.event_name != 'pull_request'
id: deployment
uses: actions/deploy-pages@v4