2017-08-02 16:48:33 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
import argparse
|
2017-08-18 20:48:21 +08:00
|
|
|
import atexit
|
2017-08-02 16:48:33 +08:00
|
|
|
import json
|
|
|
|
import multiprocessing
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2017-08-18 20:48:21 +08:00
|
|
|
import time
|
2017-08-02 16:48:33 +08:00
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
# dataset calibrated for running on Apollo (total 4min)
|
2017-11-16 21:29:19 +08:00
|
|
|
# bipartite.py runs for approx. 30s
|
|
|
|
# create_match.py runs for approx. 30s
|
2017-08-18 20:48:21 +08:00
|
|
|
# long_running runs for 1min
|
|
|
|
# long_running runs for 2min
|
2017-08-02 16:48:33 +08:00
|
|
|
SMALL_DATASET = [
|
|
|
|
{
|
|
|
|
"test": "bipartite.py",
|
|
|
|
"options": ["--u-count", "100", "--v-count", "100"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 5,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"test": "create_match.py",
|
|
|
|
"options": ["--vertex-count", "40000", "--create-pack-size", "100"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 5,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
|
|
|
{
|
2017-09-18 20:30:27 +08:00
|
|
|
"test": "long_running.cpp",
|
2017-10-06 03:32:04 +08:00
|
|
|
"options": ["--vertex-count", "1000", "--edge-count", "5000", "--max-time", "1", "--verify", "20"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 5,
|
|
|
|
},
|
|
|
|
{
|
2017-09-18 20:30:27 +08:00
|
|
|
"test": "long_running.cpp",
|
2017-10-06 03:32:04 +08:00
|
|
|
"options": ["--vertex-count", "10000", "--edge-count", "50000", "--max-time", "2", "--verify", "30"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 5,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
|
|
|
]
|
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
# dataset calibrated for running on daily stress instance (total 9h)
|
2017-11-16 21:29:19 +08:00
|
|
|
# bipartite.py and create_match.py run for approx. 15min
|
2017-08-18 20:48:21 +08:00
|
|
|
# long_running runs for 5min x 6 times = 30min
|
|
|
|
# long_running runs for 8h
|
2017-08-02 16:48:33 +08:00
|
|
|
LARGE_DATASET = [
|
|
|
|
{
|
|
|
|
"test": "bipartite.py",
|
|
|
|
"options": ["--u-count", "300", "--v-count", "300"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 30,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"test": "create_match.py",
|
|
|
|
"options": ["--vertex-count", "500000", "--create-pack-size", "500"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 30,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
2017-08-18 20:48:21 +08:00
|
|
|
] + [
|
2017-08-02 16:48:33 +08:00
|
|
|
{
|
2017-09-18 20:30:27 +08:00
|
|
|
"test": "long_running.cpp",
|
2017-10-06 03:32:04 +08:00
|
|
|
"options": ["--vertex-count", "10000", "--edge-count", "40000", "--max-time", "5", "--verify", "60"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 8,
|
|
|
|
},
|
|
|
|
] * 6 + [
|
|
|
|
{
|
2017-09-18 20:30:27 +08:00
|
|
|
"test": "long_running.cpp",
|
2017-10-08 20:17:21 +08:00
|
|
|
"options": ["--vertex-count", "400000", "--edge-count", "1600000", "--max-time", "480", "--verify", "300"],
|
2017-08-18 20:48:21 +08:00
|
|
|
"timeout": 500,
|
2017-08-02 16:48:33 +08:00
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# paths
|
|
|
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
BASE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
|
|
|
|
BUILD_DIR = os.path.join(BASE_DIR, "build")
|
|
|
|
CONFIG_DIR = os.path.join(BASE_DIR, "config")
|
2017-12-28 23:27:30 +08:00
|
|
|
MEASUREMENTS_FILE = os.path.join(SCRIPT_DIR, ".apollo_measurements")
|
|
|
|
|
|
|
|
# long running stats file
|
|
|
|
STATS_FILE = os.path.join(SCRIPT_DIR, ".long_running_stats")
|
|
|
|
SMALL_DATASET[-1]["options"].extend(["--stats-file", STATS_FILE])
|
|
|
|
LARGE_DATASET[-1]["options"].extend(["--stats-file", STATS_FILE])
|
2017-08-02 16:48:33 +08:00
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
# get number of threads
|
|
|
|
if "THREADS" in os.environ:
|
|
|
|
THREADS = os.environ["THREADS"]
|
|
|
|
else:
|
|
|
|
THREADS = multiprocessing.cpu_count()
|
2017-08-02 16:48:33 +08:00
|
|
|
|
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
# run test helper function
|
|
|
|
def run_test(args, test, options, timeout):
|
|
|
|
print("Running test '{}'".format(test))
|
2017-08-02 16:48:33 +08:00
|
|
|
|
2017-09-18 20:30:27 +08:00
|
|
|
# find binary
|
|
|
|
if test.endswith(".py"):
|
|
|
|
logging = "DEBUG" if args.verbose else "WARNING"
|
|
|
|
binary = [args.python, "-u", os.path.join(SCRIPT_DIR, test),
|
|
|
|
"--logging", logging]
|
|
|
|
elif test.endswith(".cpp"):
|
|
|
|
exe = os.path.join(BUILD_DIR, "tests", "stress", test[:-4])
|
|
|
|
if not os.path.exists(exe):
|
|
|
|
exe = os.path.join(BASE_DIR, "build_release", "tests", "stress",
|
|
|
|
test[:-4])
|
|
|
|
binary = [exe]
|
|
|
|
else:
|
|
|
|
raise Exception("Test '{}' binary not supported!".format(test))
|
|
|
|
|
2017-08-02 16:48:33 +08:00
|
|
|
# start test
|
2017-09-18 20:30:27 +08:00
|
|
|
cmd = binary + ["--worker-count", str(THREADS)] + options
|
2017-08-18 20:48:21 +08:00
|
|
|
start = time.time()
|
|
|
|
ret_test = subprocess.run(cmd, cwd = SCRIPT_DIR, timeout = timeout * 60)
|
|
|
|
|
2017-08-02 16:48:33 +08:00
|
|
|
if ret_test.returncode != 0:
|
|
|
|
raise Exception("Test '{}' binary returned non-zero ({})!".format(
|
|
|
|
test, ret_test.returncode))
|
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
runtime = time.time() - start
|
|
|
|
print(" Done after {:.3f} seconds".format(runtime))
|
|
|
|
|
2017-12-28 23:27:30 +08:00
|
|
|
return runtime
|
|
|
|
|
2017-08-02 16:48:33 +08:00
|
|
|
|
|
|
|
# parse arguments
|
|
|
|
parser = argparse.ArgumentParser(description = "Run stress tests on Memgraph.")
|
|
|
|
parser.add_argument("--memgraph", default = os.path.join(BUILD_DIR,
|
|
|
|
"memgraph"))
|
|
|
|
parser.add_argument("--config", default = os.path.join(CONFIG_DIR,
|
|
|
|
"stress.conf"))
|
2017-08-18 20:48:21 +08:00
|
|
|
parser.add_argument("--log-file", default = "")
|
2017-11-20 18:58:05 +08:00
|
|
|
parser.add_argument("--durability-directory", default = "")
|
2017-08-02 16:48:33 +08:00
|
|
|
parser.add_argument("--python", default = os.path.join(SCRIPT_DIR,
|
|
|
|
"ve3", "bin", "python3"), type = str)
|
|
|
|
parser.add_argument("--large-dataset", action = "store_const",
|
|
|
|
const = True, default = False)
|
|
|
|
parser.add_argument("--verbose", action = "store_const",
|
|
|
|
const = True, default = False)
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2017-09-13 20:49:26 +08:00
|
|
|
# find memgraph binary
|
|
|
|
if not os.path.exists(args.memgraph):
|
|
|
|
args.memgraph = os.path.join(BASE_DIR, "build_release", "memgraph")
|
|
|
|
|
2017-08-18 20:48:21 +08:00
|
|
|
# start memgraph
|
|
|
|
cwd = os.path.dirname(args.memgraph)
|
|
|
|
cmd = [args.memgraph, "--num-workers=" + str(THREADS)]
|
|
|
|
if not args.verbose:
|
2017-10-04 22:20:38 +08:00
|
|
|
cmd += ["--min-log-level", "1"]
|
2017-08-18 20:48:21 +08:00
|
|
|
if args.log_file:
|
|
|
|
cmd += ["--log-file", args.log_file]
|
2017-11-20 18:58:05 +08:00
|
|
|
if args.durability_directory:
|
|
|
|
cmd += ["--durability-directory", args.durability_directory]
|
2017-08-18 20:48:21 +08:00
|
|
|
proc_mg = subprocess.Popen(cmd, cwd = cwd,
|
|
|
|
env = {"MEMGRAPH_CONFIG": args.config})
|
|
|
|
time.sleep(1.0)
|
|
|
|
|
|
|
|
# at exit cleanup
|
|
|
|
@atexit.register
|
|
|
|
def cleanup():
|
|
|
|
global proc_mg
|
|
|
|
if proc_mg.poll() != None: return
|
|
|
|
proc_mg.kill()
|
|
|
|
proc_mg.wait()
|
|
|
|
|
2017-08-02 16:48:33 +08:00
|
|
|
# run tests
|
2017-12-28 23:27:30 +08:00
|
|
|
runtimes = {}
|
2017-08-02 16:48:33 +08:00
|
|
|
dataset = LARGE_DATASET if args.large_dataset else SMALL_DATASET
|
|
|
|
for test in dataset:
|
2017-12-28 23:27:30 +08:00
|
|
|
runtime = run_test(args, **test)
|
|
|
|
runtimes[os.path.splitext(test["test"])[0]] = runtime
|
2017-08-18 20:48:21 +08:00
|
|
|
|
|
|
|
# stop memgraph
|
|
|
|
proc_mg.terminate()
|
|
|
|
ret_mg = proc_mg.wait()
|
|
|
|
if ret_mg != 0:
|
|
|
|
raise Exception("Memgraph binary returned non-zero ({})!".format(ret_mg))
|
|
|
|
|
2017-12-28 23:27:30 +08:00
|
|
|
# measurements
|
|
|
|
measurements = ""
|
|
|
|
for key, value in runtimes.items():
|
|
|
|
measurements += "{}.runtime {}\n".format(key, value)
|
|
|
|
with open(STATS_FILE) as f:
|
|
|
|
stats = f.read().split("\n")
|
|
|
|
measurements += "long_running.queries.executed {}\n".format(stats[0])
|
|
|
|
measurements += "long_running.queries.failed {}\n".format(stats[1])
|
|
|
|
with open(MEASUREMENTS_FILE, "w") as f:
|
|
|
|
f.write(measurements)
|
|
|
|
|
2017-08-02 16:48:33 +08:00
|
|
|
print("Done!")
|