Store json results in harness

Reviewers: buda

Reviewed By: buda

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D556
This commit is contained in:
Mislav Bradac 2017-07-16 17:35:02 +02:00
parent 1c6ef47994
commit 10ded31eac
3 changed files with 30 additions and 34 deletions

View File

@ -21,7 +21,7 @@ from perf import Perf
log = logging.getLogger(__name__)
class QuerySuite():
class QuerySuite:
"""
Executes a Query-based benchmark scenario. Query-based scenarios
consist of setup steps (Cypher queries) executed before the benchmark,
@ -43,7 +43,7 @@ class QuerySuite():
args, _ = argp.parse_known_args(args)
self.perf = Perf() if args.perf else None
class Loader():
class Loader:
"""
Loads file contents. Supported types are:
.py - executable that prints out Cypher queries
@ -188,9 +188,11 @@ class QuerySuite():
measurement_sums = defaultdict(float)
def add_measurement(dictionary, iteration, key):
if key in dictionary:
measurement = {"target": key, "value": dictionary[key],
"unit": "s", "type": "time"}
measurement["iteration"] = iteration
measurement = {"target": key,
"value": float(dictionary[key]),
"unit": "s",
"type": "time",
"iteration": iteration}
measurements.append(measurement)
try:
measurement_sums[key] += float(dictionary[key])
@ -340,30 +342,12 @@ class MemgraphRunner:
self.memgraph_bin.wait()
def send_data(storage_url, data_type, payload):
log.info("Sending %d elements of type '%s' to storage at '%s'",
len(payload), data_type, storage_url)
log.debug("Sending payload:\n%s", json.dumps(payload, indent=2))
r = requests.post("%s/store/%s" % (storage_url, data_type), json=payload)
if r.status_code != 200:
raise Exception("Unable to send %s data." % data_type)
log.debug("Storage server response:\n%s", r.json())
if len(r.json()) == 0:
raise Exception("Invalid storage server response")
return r.json()
def send_data_one(storage_url, data_type, payload):
return send_data(storage_url, data_type, [payload])[0]
def parse_known_args():
argp = ArgumentParser(description=__doc__)
# positional, mandatory args
argp.add_argument("suite", help="Suite to run.")
argp.add_argument("runner", help="Engine to use.")
# named, optional arguments
argp.add_argument("--storage-url", help="URL of the storage server")
argp.add_argument("--groups", nargs="+", help="Groups to run. If none are"
" provided, all available grups are run.")
argp.add_argument("--scenarios", nargs="+", help="Scenarios to run. If "
@ -383,8 +367,7 @@ def main():
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("neo4j.bolt").setLevel(logging.WARNING)
log.info("Memgraph benchmark suite harness")
log.info("Executing for suite '%s', runner '%s', storage '%s'",
args.suite, args.runner, args.storage_url)
log.info("Executing for suite '%s', runner '%s'", args.suite, args.runner)
# Create suite
suites = {"QuerySuite": QuerySuite}
@ -440,19 +423,16 @@ def main():
log.info("Executing group.scenario '%s.%s' with elements %s",
group, scenario_name, list(scenario.keys()))
for iter_result in suite.run(scenario, scenario_name, runner):
iter_result['group'] = group
iter_result['scenario'] = scenario_name
iter_result["group"] = group
iter_result["scenario"] = scenario_name
results.append(iter_result)
run = dict()
run["suite"] = args.suite
run["runner"] = runner.__class__.__name__
run["runner_config"] = vars(runner.args)
run.update(args.additional_run_fields)
if args.storage_url is not None:
run_uuid = send_data_one(args.storage_url, "run", run)
for result in results:
result["run"] = run_uuid
send_data(args.storage_url, "measurement", results)
for result in results:
jail.store_data(json.dumps(result))
print("\n\n{}\n".format(suite.summary))

View File

@ -9,7 +9,8 @@ import threading
import time
import uuid
from signal import *
import datetime
import json
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
@ -194,7 +195,19 @@ def get_process():
return None
# TODO: ovo treba napravit
def store_data(data):
def store_data(_data, self):
data = json.loads(_data)
assert "unit" in data, "unit is nonoptional field"
assert "type" in data, "type is nonoptional field"
assert "value" in data, "value is nonoptional field"
if not hasattr(self, "timestamp"):
self.timestamp = datetime.datetime.now().isoformat()
with open(os.path.join(os.path.dirname(__file__), "results",
self.timestamp), "a") as results_file:
json.dump(data, results_file)
results_file.write("\n")
# TODO: treba assertat da data ima neke keyeve u sebi
# TODO: to trebaju bit keyevi value, type i sl...
@ -218,6 +231,7 @@ def store_data(data):
# to znaci da treba bit lista dictionary-ja
pass
store_data.__defaults__ = (store_data,)
if __name__ == "__main__":

View File

@ -0,0 +1,2 @@
*
!.gitignore