-
Notifications
You must be signed in to change notification settings - Fork 1
347 lines (304 loc) · 11.8 KB
/
full_test.yaml
File metadata and controls
347 lines (304 loc) · 11.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
name: Deeplake tests
on:
workflow_call:
inputs:
testMatrix:
type: string
required: true
secrets:
aws_role_arn:
required: true
gcp_sa_credentials_json:
required: true
oauth_client_id:
required: true
oauth_client_secret:
required: true
oauth_refresh_token:
required: true
hub_token:
required: true
hub_username:
required: true
azure_creds_json:
required: true
sonar_token:
required: false
labelbox_token:
required: false
jobs:
test:
name: Py${{ matrix.python-version }} | ${{ matrix.os }} | ${{ matrix.storage }}
runs-on: ${{ matrix.os }}
timeout-minutes: 150
env:
BUGGER_OFF: "true"
strategy:
fail-fast: false
matrix: ${{ fromJSON(inputs.testMatrix) }}
steps:
- name: setup
shell: python
run: |
import os
with open(os.environ.get("GITHUB_ENV"), "a") as file:
file.write("matrix_storage_filename=%s" % "${{ matrix.storage }}".replace(" ","").replace("--", "_")[1:])
- name: checkout
uses: actions/checkout@v5.0.0
with:
repository: ${{ inputs.repo }}
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: set up python ${{ matrix.python-version }}
uses: actions/setup-python@v6.0.0
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: deeplake/requirements/*.txt
#### Set up ffmpeg
- name: setup ffmpeg (windows)
uses: federicocarboni/setup-ffmpeg@v3.1
if: startsWith(runner.os, 'Windows')
with:
github-token: ${{ secrets.github_token }}
- name: setup ffmpeg (linux)
if: startsWith(runner.os, 'Linux')
run: sudo apt-get update && sudo apt-get install -y ffmpeg libavcodec-dev libavformat-dev libswscale-dev libavdevice-dev libavfilter-dev
- name: setup ffmpeg (mac)
if: startsWith(runner.os, 'Mac')
run: |
brew install ffmpeg@4
brew link ffmpeg@4
echo 'export PATH="/usr/local/opt/ffmpeg@4/bin:$PATH"' >> /Users/runner/.bash_profile
export LDFLAGS="-L/usr/local/opt/ffmpeg@4/lib"
export CPPFLAGS="-I/usr/local/opt/ffmpeg@4/include"
export PKG_CONFIG_PATH="/usr/local/opt/ffmpeg@4/lib/pkgconfig"
### Setup authentication/credentials
- name: authenticate (aws)
if: contains(matrix.storage, 's3') || contains(matrix.storage, 's3path')
uses: aws-actions/configure-aws-credentials@v5.0.0
with:
role-to-assume: ${{ secrets.aws_role_arn }}
aws-region: us-east-1
role-duration-seconds: 21600
role-session-name: deeplake-${{ github.sha }}
- name: authenticate (gcp)
if: contains(matrix.storage, 'gcs')
uses: google-github-actions/auth@v3
with:
credentials_json: ${{ secrets.gcp_sa_credentials_json }}
create_credentials_file: true
export_environment_variables: true
- name: authenticate (gcp)
if: contains(matrix.storage, 'azure')
uses: Azure/login@v2.3.0
with:
creds: ${{ secrets.azure_creds_json }}
- name: install requirements
run: |
pip3 install --upgrade pip --user
pip3 install --upgrade setuptools
pip3 install numpy==1.26.4
pip3 install TTS --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/common.txt --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/tests.txt --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/plugins.txt --use-deprecated=legacy-resolver
pip3 install -e .[all]
- name: run fast tests
id: fast-tests
timeout-minutes: 30
run: |
coverage run --data-file=fast.coverage --omit="test_*.py" -m pytest ${{ matrix.storage }} --junit-xml=fast.results.xml --capture=sys -o junit_logging=all -m "not slow and not flaky" --timeout=60
env:
DEEPLAKE_PYTEST_ENABLED: true
GDRIVE_CLIENT_ID: ${{ secrets.oauth_client_id }}
GDRIVE_CLIENT_SECRET: ${{ secrets.oauth_client_secret }}
GDRIVE_REFRESH_TOKEN: ${{ secrets.oauth_refresh_token }}
ACTIVELOOP_HUB_USERNAME: ${{ secrets.hub_username }}
ACTIVELOOP_HUB_TOKEN: ${{ secrets.hub_token }}
LABELBOX_TOKEN: ${{ secrets.labelbox_token }}
- name: run slow tests
id: slow-tests
if: success() || steps.fast-tests.conclusion == 'failure'
timeout-minutes: 120
run: |
coverage run --data-file=slow.coverage --omit="test_*.py" -m pytest ${{ matrix.storage }} --junit-xml=slow.results.xml --capture=sys -o junit_logging=all -m "slow and not flaky"
env:
DEEPLAKE_PYTEST_ENABLED: true
GDRIVE_CLIENT_ID: ${{ secrets.oauth_client_id }}
GDRIVE_CLIENT_SECRET: ${{ secrets.oauth_client_secret }}
GDRIVE_REFRESH_TOKEN: ${{ secrets.oauth_refresh_token }}
ACTIVELOOP_HUB_USERNAME: ${{ secrets.hub_username }}
ACTIVELOOP_HUB_TOKEN: ${{ secrets.hub_token }}
LABELBOX_TOKEN: ${{ secrets.labelbox_token }}
- name: save test results
uses: actions/upload-artifact@v4.6.2
if: always()
with:
name: test-results-py${{ matrix.python-version }}-${{ matrix.os }}-${{ env.matrix_storage_filename }}_${{ github.run_number }}-${{ github.run_attempt }}
path: |
fast.results.xml
slow.results.xml
fast.coverage
slow.coverage
flaky-test:
name: Flaky Tests
runs-on: ubuntu-latest
env:
BUGGER_OFF: "true"
steps:
- name: checkout
uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- name: set up python
uses: actions/setup-python@v6.0.0
with:
python-version: "3.10"
cache: pip
cache-dependency-path: deeplake/requirements/*.txt
- name: install requirements
run: |
pip3 install --upgrade pip --user
pip3 install --upgrade setuptools
pip3 install TTS --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/common.txt --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/tests.txt --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/plugins.txt --use-deprecated=legacy-resolver
pip3 install -e .[all]
- name: run flaky tests
id: flaky-tests
uses: nick-fields/retry@v3.0.2
with:
timeout_minutes: 20
max_attempts: 3
shell: bash
# Retry seems to only check the last command's exit code, so need to have just one command
command: |
coverage run --data-file=flaky.coverage --omit="test_*.py" -m pytest --local --hub-cloud --s3 --junit-xml=flaky.results.xml --capture=sys -o junit_logging=all -m "flaky"
env:
DEEPLAKE_PYTEST_ENABLED: true
GDRIVE_CLIENT_ID: ${{ secrets.oauth_client_id }}
GDRIVE_CLIENT_SECRET: ${{ secrets.oauth_client_secret }}
GDRIVE_REFRESH_TOKEN: ${{ secrets.oauth_refresh_token }}
ACTIVELOOP_HUB_USERNAME: ${{ secrets.hub_username }}
ACTIVELOOP_HUB_TOKEN: ${{ secrets.hub_token }}
LABELBOX_TOKEN: ${{ secrets.labelbox_token }}
- name: save test results
uses: actions/upload-artifact@v4.6.2
if: always()
with:
name: test-results-flaky_${{ github.run_number }}-${{ github.run_attempt }}
path: |
flaky.results.xml
flaky.coverage
buh-test:
name: Backwards Compatibility Test
runs-on: ubuntu-latest
timeout-minutes: 120
env:
BUGGER_OFF: "true"
steps:
- name: checkout
uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- name: checkout the buH source code
uses: actions/checkout@v5.0.0
with:
path: buH
repository: activeloopai/buH
fetch-depth: 0
# This will slowly get behind as new versions are released that are not in the cache. The cache can be dropped through the github UI when creation takes too long
- name: cache datasets_clean
uses: actions/cache@v4.2.4
with:
path: datasets_clean/*
key: buH-datasets-clean
- name: set up python
uses: actions/setup-python@v6.0.0
with:
python-version: "3.10"
cache: pip
cache-dependency-path: deeplake/requirements/*.txt
- name: install libraries
run: |
pip3 install --upgrade pip --user
pip3 install --upgrade setuptools
pip3 install TTS --use-deprecated=legacy-resolver
pip3 install "numpy<2.0"
pip3 install -r deeplake/requirements/common.txt --use-deprecated=legacy-resolver
pip3 install -r deeplake/requirements/tests.txt --use-deprecated=legacy-resolver
pip3 install -e .[all]
pip3 install -e buH
- name: create datasets
run: buH/buh/scripts/create_all.sh
- name: cleanup libraries
run: |
pip3 install --upgrade --force-reinstall -e .
pip3 install "numpy<2.0"
- name: run backwards compatibility tests
run: python3 -m pytest --junitxml=buh.results.xml --capture=sys -o junit_logging=all buH/
env:
ACTIVELOOP_HUB_USERNAME: ${{ secrets.hub_username }}
ACTIVELOOP_HUB_TOKEN: ${{ secrets.hub_token }}
# With the worked-on dataset plus the original clean, we can run out of disk space by the time we archive the results
- name: cleanup datasets
run: rm -rf datasets
- name: save test results
uses: actions/upload-artifact@v4.6.2
if: always()
with:
name: test-results-backwards-compat_${{ github.run_attempt }}
path: |
buh.results.xml
report:
name: Final Testing and Reporting
needs: [test, buh-test, flaky-test]
runs-on: ubuntu-latest
if: always() && needs.test.result != 'cancelled' && needs.buh-test.result != 'cancelled'
env:
BUGGER_OFF: "true"
steps:
- name: checkout
uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- name: download test results
uses: actions/download-artifact@v5.0.0
with:
path: results
# - name: Annotate with Test Results
# uses: EnricoMi/publish-unit-test-result-action@v2
# with:
# files: results/test-results-*/*.results.xml
- name: publish test report
uses: mikepenz/action-junit-report@v5.6.2
if: always()
with:
report_paths: results/test-results-*/*.results.xml
# - name: set up python
# uses: actions/setup-python@v6.0.0
# with:
# python-version: "3.10"
# cache: pip
# cache-dependency-path: deeplake/requirements/*.txt
- name: merge coverage data
run: |
pip3 install coverage[toml]
find results -name "*.coverage" | xargs python3 -m coverage combine --keep --append --data-file=total.coverage
python3 -m coverage xml --data-file=total.coverage -o total.coverage.xml
sed -i 's/coverage.xml/total.coverage.xml/' sonar-project.properties
- name: upload coverage to codecov
uses: codecov/codecov-action@v5.5.1
with:
files: ./total.coverage.xml
flags: unittests
env_vars: OS,PYTHON
use_oidc: true
- name: sonarcloud scan
if: env.SONAR_TOKEN != ''
uses: SonarSource/sonarcloud-github-action@v5.0.0
env:
GITHUB_TOKEN: ${{ secrets.github_token }}
SONAR_TOKEN: ${{ secrets.sonar_token }}