#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import argparse
import atexit
import json
import multiprocessing
import os
import subprocess
import sys
import time

# dataset calibrated for running on Apollo (total 4min)
# bipartite runs for approx. 30s
# create_match runs for approx. 30s
# long_running runs for 1min
# long_running runs for 2min
SMALL_DATASET = [
    {
        "test": "bipartite.py",
        "options": ["--u-count", "100", "--v-count", "100"],
        "timeout": 5,
    },
    {
        "test": "create_match.py",
        "options": ["--vertex-count", "40000", "--create-pack-size", "100"],
        "timeout": 5,
    },
    {
        "test": "long_running.cpp",
        "options": ["--vertex-count", "1000", "--edge-count", "1000", "--max-time", "1", "--verify", "20"],
        "timeout": 5,
    },
    {
        "test": "long_running.cpp",
        "options": ["--vertex-count", "1000", "--edge-count", "1000", "--max-time", "2", "--verify", "30"],
        "timeout": 5,
    },
]

# dataset calibrated for running on daily stress instance (total 9h)
# bipartite and create_match run for approx. 15min
# long_running runs for 5min x 6 times = 30min
# long_running runs for 8h
LARGE_DATASET = [
    {
        "test": "bipartite.py",
        "options": ["--u-count", "300", "--v-count", "300"],
        "timeout": 30,
    },
    {
        "test": "create_match.py",
        "options": ["--vertex-count", "500000", "--create-pack-size", "500"],
        "timeout": 30,
    },
] + [
    {
        "test": "long_running.cpp",
        "options": ["--vertex-count", "100000", "--edge-count", "100000", "--max-time", "5", "--verify", "60"],
        "timeout": 8,
    },
] * 6 + [
    {
        "test": "long_running.cpp",
        "options": ["--vertex-count", "200000", "--edge-count", "2000000", "--max-time", "480", "--verify", "300"],
        "timeout": 500,
    },
]

# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
BASE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
BUILD_DIR = os.path.join(BASE_DIR, "build")
CONFIG_DIR = os.path.join(BASE_DIR, "config")

# get number of threads
if "THREADS" in os.environ:
    THREADS = os.environ["THREADS"]
else:
    THREADS = multiprocessing.cpu_count()


# run test helper function
def run_test(args, test, options, timeout):
    print("Running test '{}'".format(test))

    # find binary
    if test.endswith(".py"):
        logging = "DEBUG" if args.verbose else "WARNING"
        binary = [args.python, "-u", os.path.join(SCRIPT_DIR, test),
                "--logging", logging]
    elif test.endswith(".cpp"):
        exe = os.path.join(BUILD_DIR, "tests", "stress", test[:-4])
        if not os.path.exists(exe):
            exe = os.path.join(BASE_DIR, "build_release", "tests", "stress",
                    test[:-4])
        binary = [exe]
    else:
        raise Exception("Test '{}' binary not supported!".format(test))

    # start test
    cmd = binary + ["--worker-count", str(THREADS)] + options
    start = time.time()
    ret_test = subprocess.run(cmd, cwd = SCRIPT_DIR, timeout = timeout * 60)

    if ret_test.returncode != 0:
        raise Exception("Test '{}' binary returned non-zero ({})!".format(
                test, ret_test.returncode))

    runtime = time.time() - start
    print("    Done after {:.3f} seconds".format(runtime))


# parse arguments
parser = argparse.ArgumentParser(description = "Run stress tests on Memgraph.")
parser.add_argument("--memgraph", default = os.path.join(BUILD_DIR,
        "memgraph"))
parser.add_argument("--config", default = os.path.join(CONFIG_DIR,
        "stress.conf"))
parser.add_argument("--log-file", default = "")
parser.add_argument("--snapshot-directory", default = "")
parser.add_argument("--python", default = os.path.join(SCRIPT_DIR,
        "ve3", "bin", "python3"), type = str)
parser.add_argument("--large-dataset", action = "store_const",
        const = True, default = False)
parser.add_argument("--verbose", action = "store_const",
        const = True, default = False)
args = parser.parse_args()

# find memgraph binary
if not os.path.exists(args.memgraph):
    args.memgraph = os.path.join(BASE_DIR, "build_release", "memgraph")

# start memgraph
cwd = os.path.dirname(args.memgraph)
cmd = [args.memgraph, "--num-workers=" + str(THREADS)]
if not args.verbose:
    cmd += ["--minloglevel", "1"]
if args.log_file:
    cmd += ["--log-file", args.log_file]
if args.snapshot_directory:
    cmd += ["--snapshot-directory", args.snapshot_directory]
proc_mg = subprocess.Popen(cmd, cwd = cwd,
        env = {"MEMGRAPH_CONFIG": args.config})
time.sleep(1.0)

# at exit cleanup
@atexit.register
def cleanup():
    global proc_mg
    if proc_mg.poll() != None: return
    proc_mg.kill()
    proc_mg.wait()

# run tests
dataset = LARGE_DATASET if args.large_dataset else SMALL_DATASET
for test in dataset:
    run_test(args, **test)

# stop memgraph
proc_mg.terminate()
ret_mg = proc_mg.wait()
if ret_mg != 0:
    raise Exception("Memgraph binary returned non-zero ({})!".format(ret_mg))

print("Done!")