Add benchmarking GHA Diff job (#147)

This commit is contained in:
Marko Budiselić 2021-05-12 17:17:57 +02:00 committed by GitHub
parent cc27a04139
commit 782c377f5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 69 additions and 3 deletions

View File

@ -340,3 +340,64 @@ jobs:
with:
name: "Jepsen Report"
path: tests/jepsen/Jepsen.tar.gz
release_benchmarks:
name: "Release benchmarks"
runs-on: [self-hosted, Linux, X64, Diff, Gen7]
env:
THREADS: 24
steps:
- name: Set up repository
uses: actions/checkout@v2
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
# Build only memgraph release binarie.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Run macro benchmarks
run: |
cd tests/macro_benchmark
./harness QuerySuite MemgraphRunner \
--groups aggregation 1000_create unwind_create dense_expand match \
--no-strict
- name: Upload macro benchmark results
run: |
cd tools/bench-graph-client
virtualenv -p python3 ve3
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "macro_benchmark" \
--benchmark-results-path "../../tests/macro_benchmark/.harness_summary" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}"
- name: Run mgbench
run: |
cd tests/mgbench
./benchmark.py --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
- name: Upload mgbench results
run: |
cd tools/bench-graph-client
virtualenv -p python3 ve3
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "mgbench" \
--benchmark-results-path "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}"

View File

@ -9,6 +9,7 @@ import json
import logging
import os
import requests
import subprocess
from datetime import datetime
from argparse import ArgumentParser
@ -20,7 +21,7 @@ GITHUB_REF = os.getenv("GITHUB_REF", "")
BENCH_GRAPH_SERVER_ENDPOINT = os.getenv(
"BENCH_GRAPH_SERVER_ENDPOINT",
"http://mgdeps-cache:9000")
"http://bench-graph-api:9001")
log = logging.getLogger(__name__)
@ -38,6 +39,10 @@ def post_measurement(args):
with open(args.benchmark_results_path, "r") as f:
data = json.load(f)
timestamp = datetime.now().timestamp()
branch = subprocess.run(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
stdout=subprocess.PIPE,
check=True).stdout.decode("utf-8").strip()
req = requests.post(
f"{BENCH_GRAPH_SERVER_ENDPOINT}/measurements",
json={
@ -48,8 +53,8 @@ def post_measurement(args):
"git_sha": GITHUB_SHA,
"github_run_id": args.github_run_id,
"github_run_number": args.github_run_number,
"results": data
},
"results": data,
"git_branch": branch},
timeout=1)
assert req.status_code == 200, \
f"Uploading {args.benchmark_name} data failed."