-
Notifications
You must be signed in to change notification settings - Fork 29
/
Copy pathaggregate_build_tests.py
169 lines (137 loc) · 3.75 KB
/
aggregate_build_tests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
import subprocess
import json
from typing import List
import os
from urllib.parse import quote
def list_build_files(date: int) -> List[str]:
"""
Lists the files in s3 for the current build date
Input
=====
date: int
Date in the format yyyymmdd
Returns
=======
files: list[str]
list of package s3 keys for this date
"""
p = subprocess.run(
[
"aws", "s3", "ls",
f"s3://deps.memgraph.io/daily-build/mage/{date}/",
"--recursive"
],
capture_output=True,
text=True
)
# extract the file keys found
files = [line.split()[3] for line in p.stdout.splitlines()]
return files
def build_package_json(files: List[str], return_url: bool = True) -> dict:
"""
Extracts the OS and CPU architecture and builds the dict/json used by the
daily-builds workflow
Inputs
======
files: List[str]
list of s3 keys
return_url: bool
If True, the URL is returned, otherwise the s3 key
Returns
=======
out: dict
dictionary of the format:
{
"ubuntu-24.04: {
"x86_64": "https://.....",
"arm64": "https://....."
}
}
"""
out = {}
for file in files:
if return_url:
url = quote(
f"https://s3.eu-west-1.amazonaws.com/deps.memgraph.io/{file}",
safe=":/"
)
else:
url = file
if "arm64" in file:
arch = "arm64"
os = "Docker (arm64)"
else:
arch = "x86_64"
os = "Docker (x86_64)"
if "relwithdebinfo" in file:
arch = f"{arch}-debug"
if "malloc" in file:
arch = f"{arch}-malloc"
if os not in out:
out[os] = {}
out[os][arch] = url
return out
def list_daily_release_packages(date: int, return_url: bool = True) -> dict:
"""
returns dict containing all packages for a specific date
Inputs
======
date: int
Date in the format yyyymmdd
return_url: bool
If True, the URL is returned, otherwise the s3 key
Returns
=======
out: dict
dictionary of the format:
{
"ubuntu-24.04: {
"x86_64": "https://.....",
"arm64": "https://....."
}
}
"""
files = list_build_files(date)
packages = build_package_json(files, return_url)
return packages
def main() -> None:
"""
Collect BUILD_TEST_RESULTS, CURRENT_BUILD_DATE, s3 keys of packages and
build a JSON payload to be sent to the daily build repo workflow
The structure of the payload will be:
{
"event_type": "daily-build-update",
"client_payload": {
"date": 20250405,
"tests": "pass",
"packages": {
"ubuntu-24.04": {
"arm64": "https://s3.eu-west-1.....",
"x86_64": "https://s3.eu-west-1....."
}
}
}
}
"""
date = int(os.getenv("CURRENT_BUILD_DATE"))
# TODO: add individual test results and URL to each one
tests = os.getenv("TEST_RESULT")
# collect packages part of the payload
packages = list_daily_release_packages(date)
# build the payload dict, print the JSON dump
payload = {
"event_type": "trigger_update_index",
"client_payload": {
"table": "mage",
"limit": 42,
"build_data": {
"date": date,
"tests": tests,
"packages": packages
}
}
}
payload = json.dumps(payload)
print(payload)
if __name__ == "__main__":
main()