-
Notifications
You must be signed in to change notification settings - Fork 19
459 lines (433 loc) · 21.5 KB
/
Perf_Env_Build_Test_CI.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
# This is Test CI Pipeline against Performance environment when merging to main
name: Perf Env Build Test CI
on:
push:
branches: [ main ]
workflow_dispatch:
# Ensures that only one deploy task per branch/environment will run at a time.
concurrency:
group: performance-environment
cancel-in-progress: false
jobs:
unittest:
# run all python versions, in other jobs run the latest python
name: unittest
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.7', '3.8', '3.9', '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest pytest-cov
if [[ -f requirements.txt ]]; then pip install -r requirements.txt; fi
if [[ -f tests_requirements.txt ]]; then pip install -r tests_requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: ⚙️ Set Kubeconfig and hosts
env:
KUBECONFIG_CONTENTS: ${{ secrets.PERF_KUBECONFIG }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
OCP_HOSTS: ${{ secrets.PERF_OCP_HOSTS }}
run: |
mkdir -p "$RUNNER_PATH/.kube/"
echo "$KUBECONFIG_CONTENTS" > "$RUNNER_PATH/.kube/config"
echo "KUBECONFIG_PATH=$RUNNER_PATH/.kube/config" >> "$GITHUB_ENV"
sudo tee -a /etc/hosts <<< "$OCP_HOSTS" > /dev/null
- name: 📃 Unittest with pytest
env:
KUBEADMIN_PASSWORD: ${{ secrets.PERF_KUBEADMIN_PASSWORD }}
PIN_NODE1: ${{ secrets.PERF_PIN_NODE1 }}
PIN_NODE2: ${{ secrets.PERF_PIN_NODE2 }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
IBM_REGION_NAME: ${{ secrets.IBM_REGION_NAME }}
IBM_ENDPOINT_URL: ${{ secrets.IBM_ENDPOINT_URL }}
IBM_ACCESS_KEY_ID: ${{ secrets.IBM_ACCESS_KEY_ID }}
IBM_SECRET_ACCESS_KEY: ${{ secrets.IBM_SECRET_ACCESS_KEY }}
IBM_BUCKET: ${{ secrets.IBM_BUCKET }}
IBM_KEY: ${{ secrets.IBM_KEY }}
RUN_ARTIFACTS_URL: ${{ secrets.PERF_RUN_ARTIFACTS_URL }}
run: |
# Install Dockerfile content for pytest
# install oc/kubectl
oc_version="4.13.0-0.okd-2023-06-04-080300"
curl -L "https://github.com/openshift/okd/releases/download/${oc_version}/openshift-client-linux-${oc_version}.tar.gz" -o "$RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz"
tar -xzvf $RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz -C $RUNNER_PATH/
rm $RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz
cp $RUNNER_PATH/kubectl /usr/local/bin/kubectl
cp $RUNNER_PATH/oc /usr/local/bin/oc
# clone benchmark-operator v1.0.2
git clone -b v1.0.2 https://github.com/cloud-bulldozer/benchmark-operator $RUNNER_PATH/benchmark-operator
# run pytest - only unittest
pytest -v tests/unittest/
integration_test:
name: integration_test
needs: [unittest]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest pytest-cov
if [[ -f requirements.txt ]]; then pip install -r requirements.txt; fi
if [[ -f tests_requirements.txt ]]; then pip install -r tests_requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: ⚙️ Set Kubeconfig and hosts
env:
KUBECONFIG_CONTENTS: ${{ secrets.PERF_KUBECONFIG }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
OCP_HOSTS: ${{ secrets.PERF_OCP_HOSTS }}
run: |
mkdir -p "$RUNNER_PATH/.kube/"
echo "$KUBECONFIG_CONTENTS" > "$RUNNER_PATH/.kube/config"
echo "KUBECONFIG_PATH=$RUNNER_PATH/.kube/config" >> "$GITHUB_ENV"
sudo tee -a /etc/hosts <<< "$OCP_HOSTS" > /dev/null
- name: 📃 Integration test with pytest
env:
KUBEADMIN_PASSWORD: ${{ secrets.PERF_KUBEADMIN_PASSWORD }}
PIN_NODE1: ${{ secrets.PERF_PIN_NODE1 }}
PIN_NODE2: ${{ secrets.PERF_PIN_NODE2 }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
IBM_REGION_NAME: ${{ secrets.IBM_REGION_NAME }}
IBM_ENDPOINT_URL: ${{ secrets.IBM_ENDPOINT_URL }}
IBM_ACCESS_KEY_ID: ${{ secrets.IBM_ACCESS_KEY_ID }}
IBM_SECRET_ACCESS_KEY: ${{ secrets.IBM_SECRET_ACCESS_KEY }}
IBM_BUCKET: ${{ secrets.IBM_BUCKET }}
IBM_KEY: ${{ secrets.IBM_KEY }}
RUN_ARTIFACTS_URL: ${{ secrets.PERF_RUN_ARTIFACTS_URL }}
GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Install Dockerfile content for pytest
# install oc/kubectl
oc_version="4.13.0-0.okd-2023-06-04-080300"
curl -L "https://github.com/openshift/okd/releases/download/${oc_version}/openshift-client-linux-${oc_version}.tar.gz" -o "$RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz"
tar -xzvf $RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz -C $RUNNER_PATH/
rm $RUNNER_PATH/openshift-client-linux-${oc_version}.tar.gz
cp $RUNNER_PATH/kubectl /usr/local/bin/kubectl
cp $RUNNER_PATH/oc /usr/local/bin/oc
rm -rf $RUNNER_PATH/kubectl
rm -rf $RUNNER_PATH/oc
# install virtctl for VNC
virtctl_version="0.59.1"
curl -L "https://github.com/kubevirt/kubevirt/releases/download/v${virtctl_version}/virtctl-v${virtctl_version}-linux-amd64" -o "$RUNNER_PATH/virtctl"
chmod +x $RUNNER_PATH/virtctl
cp $RUNNER_PATH/virtctl /usr/local/bin/virtctl
rm -rf $RUNNER_PATH/virtctl
# clone benchmark-operator
git clone -b v1.0.2 https://github.com/cloud-bulldozer/benchmark-operator $RUNNER_PATH/benchmark-operator
# run pytest
pytest -v tests --cov=benchmark_runner --cov-report=term-missing
coverage run -m pytest -v tests
sleep 60
coverage report -m
- name: 🎥 Publish to coveralls.io
env:
GITHUB_TOKEN: ${{ secrets.GIT_TOKEN }}
run: |
pip install coveralls
COVERALLS_REPO_TOKEN=${{ secrets.COVERALLS_REPO_TOKEN }} coveralls
pypi_upload:
name: pypi_upload
needs: [unittest, integration_test]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: 🔨 Build and publish distribution 📦 to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
python setup.py sdist bdist_wheel
twine upload dist/*
pypi_validate:
name: pypi_validate
needs: [unittest, integration_test, pypi_upload]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Validate upload to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
echo '⌛ Wait till package will be updated in PyPI'
# Verify and wait till latest benchmark-runner version will be updated in Pypi (timeout 900 seconds)
timeout=900
sleep_time=30
expected_version=$(python3 setup.py --version)
pip --no-cache-dir install benchmark-runner --upgrade
build=$(pip freeze | grep benchmark-runner | sed 's/==/=/g')
actual_version="$(cut -d'=' -f2 <<<"$build")"
current_wait_time=0
while [[ $current_wait_time < $timeout ]]; do
if [[ "$expected_version" = "$actual_version" ]]; then
echo 'benchmark runner version: ' $expected_version 'was updated in Pypi'
break;
else
# sleep for x seconds
echo 'wait' "$((current_wait_time+sleep_time))" 'seconds'
sleep $sleep_time
current_wait_time="$((current_wait_time+sleep_time))"
pip --no-cache-dir install benchmark-runner --upgrade
build=$(pip freeze | grep benchmark-runner | sed 's/==/=/g')
actual_version="$(cut -d'=' -f2 <<<"$build")"
if (( $current_wait_time == $timeout )); then
echo "Benchamrk runner lastet version did not update in Pypi after 900 seconds - raise failure"
exit 1
fi
fi
done
quay_upload:
name: quay_upload
needs: [ unittest, integration_test, pypi_upload, pypi_validate]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: ⌛ Build and Upload 🐋 to quay.io
run: |
version=$(python3 setup.py --version)
sudo docker build --build-arg VERSION=$version -t quay.io/${{ secrets.QAUYIO_REPOSITORY }}/${{ secrets.PACKAGE_NAME }}:v$version .
sudo docker build --build-arg VERSION=latest -t quay.io/${{ secrets.QAUYIO_REPOSITORY }}/${{ secrets.PACKAGE_NAME }}:latest .
sudo docker login quay.io -u ${{ secrets.QAUYIO_ROBOT_USER }} -p ${{ secrets.QAUYIO_ROBOT_TOKEN }}
sudo docker push quay.io/${{ secrets.QAUYIO_REPOSITORY }}/${{ secrets.PACKAGE_NAME }}:v$version
sudo docker push quay.io/${{ secrets.QAUYIO_REPOSITORY }}/${{ secrets.PACKAGE_NAME }}:latest
echo '⌛ Wait 30 sec till image will be updated in quay.io'
sleep 30
update_grafana_dashboard:
# update grafana dashboard with latest product versions/ grafonnet code
name: update_grafana_dashboard
needs: [ unittest, integration_test, pypi_upload, pypi_validate ]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [[ -f requirements.txt ]]; then pip install -r requirements.txt; fi
- name: Update latest product versions in main.libsonnet
env:
MAIN_LIBSONNET_PATH: ${{ secrets.PERF_MAIN_LIBSONNET_PATH }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
run: |
python "$GITHUB_WORKSPACE"/grafonnet_generator/grafana/update_versions_main_libsonnet.py
- name: Generate grafana dashboard.json using grafonnet container
run: |
sudo podman run --rm --name run_grafonnet -v "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf:/app --privileged quay.io/ebattat/run_grafonnet:latest
- name: Check for changes in dashboard.json
id: json_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Update the Grafana dashboard if any changes are detected in dashboard.json
if: steps.json_check_changes.outputs.changes == 'true'
env:
PERF_GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
PERF_GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Backup dashboard.json before before adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json
# Updates new product versions and override Grafana dashboard
python "$GITHUB_WORKSPACE"/benchmark_runner/grafana/update_grafana_dashboard.py
# Revert dashboard.json after adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
- name: Commit dashboard.json if any changes are detected
if: steps.json_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
- name: Check if adding new products versions in main.libsonnet
id: libsonnet_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Commit main.libsonnet if any changes are detected
if: steps.libsonnet_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
bump_version:
name: bump_version
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload, update_grafana_dashboard]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install latest benchmark-runner
run: |
python -m pip install --upgrade pip
pip install benchmark-runner wheel
- name: 🎁 Bump Version
run: |
version=$(python3 setup.py --version)
git checkout main
pip install bumpversion
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git config pull.rebase false # merge (the default strategy)
bumpversion patch
# GITHUB_REPOSITORY already taken => GIT_REPOSITORY
git commit .bumpversion.cfg setup.py -m "bump version to exist version v$version"
git pull https://${{ secrets.GIT_TOKEN }}@${{ secrets.GIT_REPOSITORY}} main
git push https://${{ secrets.GIT_TOKEN }}@${{ secrets.GIT_REPOSITORY}} main
git push https://${{ secrets.GIT_TOKEN }}@${{ secrets.GIT_REPOSITORY}} --tag
e2e:
name: e2e
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload, update_grafana_dashboard, bump_version]
runs-on: ubuntu-latest
strategy:
# run one job every time
max-parallel: 1
# continue to next job if failed
fail-fast: false
matrix:
workload: [ 'stressng_pod', 'vdbench_pod' ]
python-version: [ '3.10' ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install latest benchmark-runner
run: |
python -m pip install --upgrade pip
pip install benchmark-runner
- name: ⚙️ SET SSH key
env:
PROVISION_PRIVATE_KEY: ${{ secrets.PERF_PROVISION_PRIVATE_KEY }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
PROVISION_IP: ${{ secrets.PERF_PROVISION_IP }}
PROVISION_USER: ${{ secrets.PERF_PROVISION_USER }}
run: |
umask 77
mkdir -p "$RUNNER_PATH/.ssh/"
echo "$PROVISION_PRIVATE_KEY" > $RUNNER_PATH/private_key.txt
sudo chmod 600 $RUNNER_PATH/private_key.txt
echo "PROVISION_PRIVATE_KEY_PATH=$RUNNER_PATH/private_key.txt" >> "$GITHUB_ENV"
cat >> "$RUNNER_PATH/.ssh/config" <<END
Host provision
HostName $PROVISION_IP
User $PROVISION_USER
IdentityFile $RUNNER_PATH/private_key.txt
StrictHostKeyChecking no
ServerAliveInterval 30
ServerAliveCountMax 5
END
- name: ⚙️ Set Kubeconfig and hosts
env:
KUBECONFIG_CONTENTS: ${{ secrets.PERF_KUBECONFIG }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
OCP_HOSTS: ${{ secrets.PERF_OCP_HOSTS }}
run: |
mkdir -p "$RUNNER_PATH/.kube/"
echo "$KUBECONFIG_CONTENTS" > "$RUNNER_PATH/.kube/config"
echo "KUBECONFIG_PATH=$RUNNER_PATH/.kube/config" >> "$GITHUB_ENV"
sudo tee -a /etc/hosts <<< "$OCP_HOSTS" > /dev/null
- name: ✔️ E2E tests using latest quay.io
env:
KUBEADMIN_PASSWORD: ${{ secrets.PERF_KUBEADMIN_PASSWORD }}
PIN_NODE_BENCHMARK_OPERATOR: ${{ secrets.PERF_PIN_NODE_BENCHMARK_OPERATOR }}
PIN_NODE1: ${{ secrets.PERF_PIN_NODE1 }}
PIN_NODE2: ${{ secrets.PERF_PIN_NODE2 }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
RUNNER_PATH: ${{ secrets.RUNNER_PATH }}
CONTAINER_KUBECONFIG_PATH: ${{ secrets.CONTAINER_KUBECONFIG_PATH }}
IBM_REGION_NAME: ${{ secrets.IBM_REGION_NAME }}
IBM_ENDPOINT_URL: ${{ secrets.IBM_ENDPOINT_URL }}
IBM_ACCESS_KEY_ID: ${{ secrets.IBM_ACCESS_KEY_ID }}
IBM_SECRET_ACCESS_KEY: ${{ secrets.IBM_SECRET_ACCESS_KEY }}
IBM_BUCKET: ${{ secrets.IBM_BUCKET }}
IBM_KEY: ${{ secrets.IBM_KEY }}
RUN_ARTIFACTS_URL: ${{ secrets.PERF_RUN_ARTIFACTS_URL }}
run: |
build=$(pip freeze | grep benchmark-runner | sed 's/==/=/g')
build_version="$(cut -d'=' -f2 <<<"$build")"
echo '>>>>>>>>>>>>>>>>>>>>>>>>>> Start E2E workload: ${{ matrix.workload }} >>>>>>>>>>>>>>>>>>>>>>>>>>'
scp -r "$RUNNER_PATH/.kube/config" provision:"$CONTAINER_KUBECONFIG_PATH"
ssh -t provision "podman run --rm -t -e WORKLOAD='${{ matrix.workload }}' -e KUBEADMIN_PASSWORD='$KUBEADMIN_PASSWORD' -e PIN_NODE_BENCHMARK_OPERATOR='$PIN_NODE_BENCHMARK_OPERATOR' -e PIN_NODE1='$PIN_NODE1' -e PIN_NODE2='$PIN_NODE2' -e ELASTICSEARCH='$ELASTICSEARCH' -e ELASTICSEARCH_PORT='$ELASTICSEARCH_PORT' -e ELASTICSEARCH_USER='$ELASTICSEARCH_USER' -e ELASTICSEARCH_PASSWORD='$ELASTICSEARCH_PASSWORD' -e IBM_REGION_NAME='$IBM_REGION_NAME' -e IBM_ENDPOINT_URL='$IBM_ENDPOINT_URL' -e IBM_ACCESS_KEY_ID='$IBM_ACCESS_KEY_ID' -e IBM_SECRET_ACCESS_KEY='$IBM_SECRET_ACCESS_KEY' -e IBM_BUCKET='$IBM_BUCKET' -e IBM_KEY='$IBM_KEY' -e RUN_ARTIFACTS_URL='$RUN_ARTIFACTS_URL' -e BUILD_VERSION='$build_version' -e RUN_TYPE='test_ci' -e TIMEOUT='2000' -e log_level='INFO' -v '$CONTAINER_KUBECONFIG_PATH':'$CONTAINER_KUBECONFIG_PATH' --privileged 'quay.io/ebattat/benchmark-runner:v$build_version'"
ssh -t provision "podman rmi -f 'quay.io/ebattat/benchmark-runner:v$build_version'"
echo '>>>>>>>>>>>>>>>>>>>>>>>>>> End E2E workload: ${{ matrix.workload }} >>>>>>>>>>>>>>>>>>>>>>>>>>>>'