|
1 | 1 | #!/usr/bin/env python3
|
2 | 2 |
|
| 3 | +import os |
| 4 | +import tempfile |
| 5 | +import requests |
| 6 | +from subprocess import check_call, check_output, CalledProcessError |
| 7 | +from typing import TextIO |
| 8 | + |
| 9 | + |
3 | 10 | # Benchmark name, Directory with SVGs to render
|
4 | 11 | BENCHMARKS = [
|
5 |
| - [ "hicolor-apps", "./hicolor-apps" ], |
6 |
| - [ "symbolic-icons", "../tests/fixtures/reftests/adwaita" ], |
| 12 | + ["hicolor-apps", "./hicolor-apps"], |
| 13 | + ["symbolic-icons", "../tests/fixtures/reftests/adwaita"], |
7 | 14 | ]
|
| 15 | +METRICS_URL = "https://librsvg-metrics.fly.dev/api/metrics/" |
| 16 | +PATH_TO_RSVG_BENCH = "../target/release/rsvg-bench" |
| 17 | + |
| 18 | + |
| 19 | +def parse_output_file(file: TextIO): |
| 20 | + """ parse the cachegrind output file for metrics""" |
| 21 | + keys, values = None, None |
| 22 | + for line in file.readlines(): |
| 23 | + line = line.strip() |
| 24 | + if line.startswith("events: "): |
| 25 | + keys = line.removeprefix("events: ").split(" ") |
| 26 | + if line.startswith("summary: "): |
| 27 | + values = line.removeprefix("summary: ").split(" ") |
| 28 | + |
| 29 | + if keys is None or values is None: |
| 30 | + raise Exception("Couldn't parse cachegrind file, event names or summary metrics not found") |
| 31 | + |
| 32 | + return {k: v for k, v in zip(keys, values)} |
| 33 | + |
| 34 | + |
| 35 | +def get_commit_details(): |
| 36 | + """ Get commit details on which benchmarking is run """ |
| 37 | + if os.environ.get("CI_COMMIT_SHA") and os.environ.get("CI_COMMIT_TIMESTAMP"): |
| 38 | + return { |
| 39 | + "commit": os.environ["CI_COMMIT_SHA"], |
| 40 | + "time": os.environ["CI_COMMIT_TIMESTAMP"] |
| 41 | + } |
| 42 | + |
| 43 | + commit_hash = check_output(["git", "show", "--format=%cI"]).strip() |
| 44 | + commit_time = check_output(["git", "show", "--format=%H"]).strip() |
| 45 | + return { |
| 46 | + "commit": str(commit_hash), |
| 47 | + "time": str(commit_time) |
| 48 | + } |
| 49 | + |
| 50 | + |
| 51 | +def submit_metrics(data): |
| 52 | + token = os.environ["METRICS_TOKEN"] |
| 53 | + response = requests.post(METRICS_URL, json=data, headers={"Authorization": f"Token {token}"}) |
| 54 | + response.raise_for_status() |
| 55 | + |
| 56 | + |
| 57 | +def run_with_cachegrind(directory, path): |
| 58 | + command = ["valgrind", "--tool=cachegrind", f"--cachegrind-out-file={path}", PATH_TO_RSVG_BENCH, directory] |
| 59 | + check_call(command) |
| 60 | + |
| 61 | + |
| 62 | +def check_working_tree(): |
| 63 | + cmd = ["git", "diff-index", "--quiet", "HEAD"] |
| 64 | + try: |
| 65 | + check_call(cmd) |
| 66 | + except CalledProcessError as e: |
| 67 | + print("git working tree not clean, exiting.") |
| 68 | + raise e |
| 69 | + |
8 | 70 |
|
9 | 71 | def run_benchmark(name, directory):
|
10 |
| - # FIXME |
| 72 | + with tempfile.NamedTemporaryFile("r+") as file: |
| 73 | + run_with_cachegrind(directory, file.name) |
| 74 | + |
| 75 | + metrics = parse_output_file(file) |
| 76 | + metrics["value"] = metrics["Ir"] |
| 77 | + metrics["name"] = name |
| 78 | + |
| 79 | + metadata = get_commit_details() |
| 80 | + data = metadata | metrics |
| 81 | + submit_metrics(data) |
| 82 | + |
11 | 83 |
|
12 | 84 | def main():
|
| 85 | + check_working_tree() |
13 | 86 | for name, directory in BENCHMARKS:
|
14 | 87 | run_benchmark(name, directory)
|
15 | 88 |
|
| 89 | + |
16 | 90 | if __name__ == "__main__":
|
17 | 91 | main()
|
0 commit comments