Fix some benchmarks

Summary: Fix bug in perf wrapper

Reviewers: mferencevic

Reviewed By: mferencevic

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D611
This commit is contained in:
Mislav Bradac 2017-07-30 16:22:25 +02:00
parent 8d482f5fcd
commit 5e3ff2438e
8 changed files with 15 additions and 18 deletions

View File

@ -19,7 +19,7 @@ def create_edges(edge_count, vertex_count):
matches = []
merges = []
for edge in range(edge_count):
matches.append("MATCH (a%d {id: %d}), (b%d {id: %d})" %
matches.append("MATCH (a%d :Label {id: %d}), (b%d :Label {id: %d})" %
(edge, randint(0, vertex_count - 1),
edge, randint(0, vertex_count - 1)))
merges.append("CREATE (a%d)-[:Type]->(b%d)" % (edge, edge))
@ -29,7 +29,3 @@ def create_edges(edge_count, vertex_count):
print(";")
matches = []
merges = []
if __name__ == '__main__':
raise Exception("This file is just for utilities, not for actual setup")

View File

@ -1,3 +1,3 @@
from setup import create_edges
from common import create_edges
create_edges(10000, 500)
create_edges(10000, 1000)

View File

@ -1,3 +1,4 @@
from setup import create_vertices
from common import create_vertices
create_vertices(500)
print("CREATE INDEX ON :Label(id);")
create_vertices(1000)

View File

@ -0,0 +1 @@
MATCH (n) DETACH DELETE n

View File

@ -218,7 +218,7 @@ class _QuerySuite:
# warmup phase
for _ in range(min(scenario_config.get("iterations", 1),
scenario_config.get("warmup", 3))):
scenario_config.get("warmup", 2))):
execute("itersetup")
execute("run", scenario_config.get("num_client_workers", 1))
execute("iterteardown")
@ -238,13 +238,12 @@ class _QuerySuite:
scenario_config.get("num_client_workers", 1))
add_measurement(run_result, iteration, WALL_TIME)
add_measurement(run_result, iteration, CPU_TIME)
if len(run_result.get("metadatas", [])) == 1:
add_measurement(run_result["metadatas"][0], iteration,
"query_parsing_time")
add_measurement(run_result["metadatas"][0], iteration,
"query_plan_execution_time")
add_measurement(run_result["metadatas"][0], iteration,
"query_planning_time")
for measurement in ["query_parsing_time",
"query_plan_execution_time",
"query_planning_time"] :
for i in range(len(run_result.get("metadatas", []))):
add_measurement(run_result["metadatas"][i], iteration,
measurement)
execute("iterteardown")
if self.perf:

View File

@ -28,5 +28,5 @@ class Perf():
def stop(self):
self.perf_proc.send_signal(signal.SIGUSR1)
self.perf_proc.send_signal(signal.SIGINT)
self.perf_proc.wait()