Remove leftover Apollo files (#1)

This commit is contained in:
Matej Ferencevic 2020-09-21 14:58:52 +02:00 committed by GitHub
parent 0dcfdb9b89
commit e0ffc533b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 14 additions and 764 deletions

View File

@ -1,29 +0,0 @@
#!/usr/bin/env python3
import json
import os
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
def find_packages(build_output_dir):
ret = []
output_dir = os.path.join(SCRIPT_DIR, build_output_dir)
if os.path.exists(output_dir):
for fname in os.listdir(output_dir):
if fname.startswith("memgraph") and fname.endswith(".deb"):
path = os.path.join(build_output_dir, fname)
ret.append({
"name": "Release " + fname.split("_")[1] +
" (deb package)",
"archive": path,
})
return ret
archives = []
# Find enterprise package(s).
archives += find_packages(os.path.join("build_release", "output"))
# Find community package(s).
archives += find_packages(os.path.join("build_community", "output"))
print(json.dumps(archives, indent=4, sort_keys=True))

View File

@ -1,17 +0,0 @@
- name: Binaries
archive:
- build_debug/memgraph
- build_debug/config/memgraph.conf
- build_release/memgraph
- build_release/config/memgraph.conf
- build_release/tools/src/mg_client
- build_community/memgraph
- build_community/config/memgraph.conf
filename: binaries.tar.gz
- name: Doxygen documentation
cd: docs/doxygen/html
archive:
- .
filename: documentation.tar.gz
host: true

View File

@ -1,110 +0,0 @@
- name: Diff build
project: ^mg-master-diff$
commands: |
# Activate toolchain
export PATH=/opt/toolchain-v1/bin:$PATH
export LD_LIBRARY_PATH=/opt/toolchain-v1/lib:/opt/toolchain-v1/lib64
# Copy untouched repository to parent folder.
cd ..
cp -r memgraph parent
cd memgraph
# Initialize and create documentation.
TIMEOUT=1200 ./init
doxygen Doxyfile
# Remove default build directory.
rm -r build
# Build debug binaries.
mkdir build_debug
cd build_debug
cmake ..
TIMEOUT=1200 make -j$THREADS
# Build coverage binaries.
cd ..
mkdir build_coverage
cd build_coverage
cmake -DTEST_COVERAGE=ON ..
TIMEOUT=1200 make -j$THREADS memgraph__unit
# Build release binaries.
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1200 make -j$THREADS
# Build community binaries.
cd ..
mkdir build_community
cd build_community
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
TIMEOUT=1200 make -j$THREADS
cd ..
# Checkout to parent commit and initialize.
cd ../parent
git checkout HEAD~1
TIMEOUT=1200 ./init
# Build parent release binaries.
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1200 make -j$THREADS memgraph memgraph__macro_benchmark
# release build is the default one
- name: Release build
commands: |
# Activate toolchain
export PATH=/opt/toolchain-v1/bin:$PATH
export LD_LIBRARY_PATH=/opt/toolchain-v1/lib:/opt/toolchain-v1/lib64
# Initialize and create documentation.
TIMEOUT=1200 ./init
doxygen Doxyfile
# Remove default build directory.
rm -r build
# Build debug binaries.
mkdir build_debug
cd build_debug
cmake ..
TIMEOUT=1200 make -j$THREADS
# Build coverage binaries.
cd ..
mkdir build_coverage
cd build_coverage
cmake -DTEST_COVERAGE=ON ..
TIMEOUT=1200 make -j$THREADS memgraph__unit
# Build release binaries.
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF ..
TIMEOUT=1200 make -j$THREADS
# Create Debian package.
mkdir output
cd output
TIMEOUT=600 cpack -G DEB --config ../CPackConfig.cmake
cd ..
# Build community binaries.
cd ..
mkdir build_community
cd build_community
cmake -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF -DMG_ENTERPRISE=OFF ..
TIMEOUT=1200 make -j$THREADS
# Create Debian package.
mkdir output
cd output
TIMEOUT=600 cpack -G DEB --config ../CPackConfig.cmake

View File

@ -1,121 +0,0 @@
#!/usr/bin/env python3
import json
import os
import re
import subprocess
# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
WORKSPACE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
TESTS_DIR_REL = os.path.join("..", "build_debug", "tests")
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
# test ordering: first unit, then concurrent, then everything else
CTEST_ORDER = {"unit": 0, "concurrent": 1}
CTEST_DELIMITER = "__"
def get_runs(build_dir, include=None, exclude=None, outfile=None,
name_prefix=""):
tests_dir = os.path.join(build_dir, "tests")
tests_dir_abs = os.path.normpath(os.path.join(SCRIPT_DIR, tests_dir))
ctest_output = subprocess.run(
["ctest", "-N"], cwd=tests_dir_abs, check=True,
stdout=subprocess.PIPE).stdout.decode("utf-8")
tests = []
for row in ctest_output.split("\n"):
# Filter rows only containing tests.
if not re.match("^\s*Test\s+#", row):
continue
if not row.count("memgraph"):
continue
test_name = row.split(":")[1].strip()
name = test_name.replace("memgraph" + CTEST_DELIMITER, "")
path = os.path.join(
tests_dir, name.replace(CTEST_DELIMITER, "/", 1))
order = CTEST_ORDER.get(
name.split(CTEST_DELIMITER)[0], len(CTEST_ORDER))
tests.append((order, name, path))
tests.sort()
runs = []
for test in tests:
order, name, path = test
dirname, basename = os.path.split(path)
files = [basename]
# check whether the test should be included
if include is not None:
should_include = False
for inc in include:
if name.startswith(inc):
should_include = True
break
if not should_include:
continue
# check whether the test should be excluded
if exclude is not None:
should_exclude = False
for exc in exclude:
if name.startswith(exc):
should_exclude = True
break
if should_exclude:
continue
# larger timeout for benchmark and concurrent tests
prefix = ""
if name.startswith("benchmark") or name.startswith("concurrent"):
prefix = "TIMEOUT=600 "
# larger timeout for storage_v2_durability unit test
if name.endswith("storage_v2_durability"):
prefix = "TIMEOUT=300 "
# py_module unit test requires user-facing 'mgp' module
if name.endswith("py_module"):
mgp_path = os.path.join("..", "include", "mgp.py")
files.append(os.path.relpath(mgp_path, dirname))
# get output files
outfile_paths = []
if outfile:
curdir_abs = os.path.normpath(os.path.join(SCRIPT_DIR, dirname))
curdir_rel = os.path.relpath(curdir_abs, WORKSPACE_DIR)
outfile_paths.append("\./" + curdir_rel.replace(".", "\\.") + "/" +
outfile.replace(".", "\\."))
runs.append({
"name": name_prefix + name,
"cd": dirname,
"commands": prefix + "./" + basename,
"infiles": files,
"outfile_paths": outfile_paths,
})
return runs
# generation mode
mode = "release"
if os.environ.get("PROJECT", "") == "mg-master-diff":
mode = "diff"
# get unit tests
runs = get_runs("../build_coverage", include=["unit"],
outfile="default.profraw")
# get all other tests except unit and benchmark
runs += get_runs("../build_debug", exclude=["unit", "benchmark"])
# get benchmark tests
if mode != "diff":
runs += get_runs("../build_release", include=["benchmark"])
# get community tests
runs += get_runs("../build_community", include=["unit"],
name_prefix="community__")
print(json.dumps(runs, indent=4, sort_keys=True))

View File

@ -1,5 +0,0 @@
- name: test_lcp
cd: ../build_debug/tests/unit
commands: ./test_lcp
infiles:
- test_lcp

View File

@ -1,6 +0,0 @@
- name: drivers
commands: ./run.sh
infiles:
- . # all files present here
- ../../build_debug/memgraph # memgraph binary
enable_network: true

View File

@ -23,9 +23,6 @@ mkdir -p $tmpdir
# Find memgraph binaries.
binary_dir="$DIR/../../build"
if [ ! -d $binary_dir ]; then
binary_dir="$DIR/../../build_debug"
fi
# Start memgraph.
$binary_dir/memgraph \

View File

@ -1,21 +0,0 @@
- name: feature_benchmark__ha__read
cd: ha/read
commands: ./runner.sh
infiles:
- runner.sh # runner script
- raft.json # raft configuration file
- coordination.json # coordination configuration file
- ../../../../build_release/tests/feature_benchmark/ha/read/benchmark # benchmark binary
- ../../../../build_release/memgraph_ha # memgraph binary
enable_network: true
- name: feature_benchmark__ha__write
cd: ha/write
commands: ./runner.sh
infiles:
- runner.sh # runner script
- raft.json # raft configuration file
- coordination.json # coordination configuration file
- ../../../../build_release/tests/feature_benchmark/ha/write/benchmark # benchmark binary
- ../../../../build_release/memgraph_ha # memgraph binary
enable_network: true

View File

@ -22,9 +22,6 @@ cd "$DIR"
# Find memgraph binaries.
binary_dir="$DIR/../../../../build"
if [ ! -d $binary_dir ]; then
binary_dir="$DIR/../../../../build_release"
fi
# Results for apollo
RESULTS="$DIR/.apollo_measurements"

View File

@ -22,9 +22,6 @@ cd "$DIR"
# Find memgraph binaries.
binary_dir="$DIR/../../../../build"
if [ ! -d $binary_dir ]; then
binary_dir="$DIR/../../../../build_release"
fi
# Results for apollo
RESULTS="$DIR/.apollo_measurements"

View File

@ -1,145 +0,0 @@
- name: integration__telemetry
cd: telemetry
commands: TIMEOUT=200 ./runner.py
infiles:
- runner.py # runner script
- server.py # server script
- ../../../build_debug/tests/integration/telemetry/client # client binary
- ../../../build_debug/tests/manual/kvstore_console # kvstore console binary
- name: integration__ssl
cd: ssl
commands: ./runner.sh
infiles:
- runner.sh # runner script
- ../../../build_debug/tests/integration/ssl/tester # tester binary
enable_network: true
- name: integration__transactions
cd: transactions
commands: ./runner.sh
infiles:
- runner.sh # runner script
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/tests/integration/transactions/tester # tester binary
- name: integration__auth
cd: auth
commands: TIMEOUT=820 ./runner.py
infiles:
- runner.py # runner script
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/tests/integration/auth/checker # checker binary
- ../../../build_debug/tests/integration/auth/tester # tester binary
- name: integration__audit
cd: audit
commands: ./runner.py
infiles:
- runner.py # runner script
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/tests/integration/audit/tester # tester binary
- name: integration__ldap
cd: ldap
commands: |
./prepare.sh
./runner.py
infiles:
- prepare.sh # preparation script
- runner.py # runner script
- schema.ldif # schema file
- ve3 # Python virtual environment
- ../../../src/auth/reference_modules/ldap.py # LDAP auth module
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/tests/integration/ldap/tester # tester binary
enable_network: true
- name: integration__mg_import_csv
cd: mg_import_csv
commands: ./runner.py
infiles:
- runner.py # runner script
- tests # tests directory
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/src/mg_import_csv # mg_import_csv binary
- ../../../build_debug/tests/integration/mg_import_csv/tester # tester binary
- name: integration__durability
cd: durability
commands: ./runner.py
infiles:
- runner.py # runner script
- tests # tests directory
- ../../../build_debug/memgraph # memgraph binary
- ../../../build_debug/tools/src/mg_dump # memgraph dump binary
#- name: integration__ha_basic
# cd: ha/basic
# commands: TIMEOUT=480 ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/basic/tester # tester binary
#
#- name: integration__ha_constraints
# cd: ha/constraints
# commands: ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/constraints/tester # tester binary
#
#- name: integration__ha_index
# cd: ha/index
# commands: ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/index/tester # tester binary
#
#- name: integration__ha_large_log_entries
# cd: ha/large_log_entries
# commands: TIMEOUT=600 ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/large_log_entries/tester # tester binary
#
#- name: integration__ha_leader_election
# cd: ha/leader_election
# commands: TIMEOUT=300 ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/leader_election/tester # tester binary
#
#- name: integration__ha_log_compaction
# cd: ha/log_compaction
# commands: ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/manual/ha_client # tester binary
#
#- name: integration__ha_term_updates
# cd: ha/term_updates
# commands: ./runner.py
# infiles:
# - runner.py # runner script
# - raft.json # raft configuration
# - ../ha_test.py # raft test base module
# - ../../../../build_debug/memgraph_ha # memgraph ha binary
# - ../../../../build_debug/tests/integration/ha/term_updates/tester # tester binary

View File

@ -106,13 +106,8 @@ def execute_test(memgraph_binary, tester_binary):
if __name__ == "__main__":
memgraph_binary = os.path.join(PROJECT_DIR, "build", "memgraph")
if not os.path.exists(memgraph_binary):
memgraph_binary = os.path.join(PROJECT_DIR, "build_debug", "memgraph")
tester_binary = os.path.join(PROJECT_DIR, "build", "tests",
"integration", "audit", "tester")
if not os.path.exists(tester_binary):
tester_binary = os.path.join(PROJECT_DIR, "build_debug", "tests",
"integration", "audit", "tester")
parser = argparse.ArgumentParser()
parser.add_argument("--memgraph", default=memgraph_binary)

View File

@ -301,18 +301,10 @@ def execute_test(memgraph_binary, tester_binary, checker_binary):
if __name__ == "__main__":
memgraph_binary = os.path.join(PROJECT_DIR, "build", "memgraph")
if not os.path.exists(memgraph_binary):
memgraph_binary = os.path.join(PROJECT_DIR, "build_debug", "memgraph")
tester_binary = os.path.join(PROJECT_DIR, "build", "tests",
"integration", "auth", "tester")
if not os.path.exists(tester_binary):
tester_binary = os.path.join(PROJECT_DIR, "build_debug", "tests",
"integration", "auth", "tester")
checker_binary = os.path.join(PROJECT_DIR, "build", "tests",
"integration", "auth", "checker")
if not os.path.exists(checker_binary):
checker_binary = os.path.join(PROJECT_DIR, "build_debug", "tests",
"integration", "auth", "checker")
parser = argparse.ArgumentParser()
parser.add_argument("--memgraph", default=memgraph_binary)

View File

@ -124,12 +124,7 @@ def find_test_directories(directory):
if __name__ == "__main__":
memgraph_binary = os.path.join(PROJECT_DIR, "build", "memgraph")
if not os.path.exists(memgraph_binary):
memgraph_binary = os.path.join(PROJECT_DIR, "build_debug", "memgraph")
dump_binary = os.path.join(PROJECT_DIR, "build", "tools", "src", "mg_dump")
if not os.path.exists(dump_binary):
dump_binary = os.path.join(PROJECT_DIR, "build_debug", "tools", "src",
"mg_dump")
parser = argparse.ArgumentParser()
parser.add_argument("--memgraph", default=memgraph_binary)

View File

@ -110,10 +110,7 @@ class HaBasicTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -94,10 +94,7 @@ class HaIndexTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -61,10 +61,7 @@ class HaIndexTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -67,10 +67,7 @@ class HaLargeLogEntriesTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -72,10 +72,7 @@ class HaLeaderElectionTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -79,10 +79,7 @@ class HaLogCompactionTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -98,10 +98,7 @@ class HaTermUpdatesTest(HaTestBase):
def find_correct_path(path):
f = os.path.join(PROJECT_DIR, "build", path)
if not os.path.exists(f):
f = os.path.join(PROJECT_DIR, "build_debug", path)
return f
return os.path.join(PROJECT_DIR, "build", path)
if __name__ == "__main__":

View File

@ -362,13 +362,8 @@ def test_ssl_failure(memgraph, tester_binary):
if __name__ == "__main__":
memgraph_binary = os.path.join(PROJECT_DIR, "build", "memgraph")
if not os.path.exists(memgraph_binary):
memgraph_binary = os.path.join(PROJECT_DIR, "build_debug", "memgraph")
tester_binary = os.path.join(PROJECT_DIR, "build", "tests",
"integration", "ldap", "tester")
if not os.path.exists(tester_binary):
tester_binary = os.path.join(PROJECT_DIR, "build_debug", "tests",
"integration", "ldap", "tester")
parser = argparse.ArgumentParser()
parser.add_argument("--memgraph", default=memgraph_binary)

View File

@ -11,6 +11,7 @@ import yaml
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
BASE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", "..", ".."))
BUILD_DIR = os.path.join(BASE_DIR, "build")
def wait_for_server(port, delay=0.1):
@ -20,16 +21,6 @@ def wait_for_server(port, delay=0.1):
time.sleep(delay)
def get_build_dir():
if os.path.exists(os.path.join(BASE_DIR, "build_release")):
return os.path.join(BASE_DIR, "build_release")
if os.path.exists(os.path.join(BASE_DIR, "build_debug")):
return os.path.join(BASE_DIR, "build_debug")
if os.path.exists(os.path.join(BASE_DIR, "build_community")):
return os.path.join(BASE_DIR, "build_community")
return os.path.join(BASE_DIR, "build")
def extract_rows(data):
return list(map(lambda x: x.strip(), data.strip().split("\n")))
@ -173,11 +164,11 @@ def execute_test(name, test_path, test_config, memgraph_binary,
if __name__ == "__main__":
memgraph_binary = os.path.join(get_build_dir(), "memgraph")
memgraph_binary = os.path.join(BUILD_DIR, "memgraph")
mg_import_csv_binary = os.path.join(
get_build_dir(), "src", "mg_import_csv")
BUILD_DIR, "src", "mg_import_csv")
tester_binary = os.path.join(
get_build_dir(), "tests", "integration", "mg_import_csv", "tester")
BUILD_DIR, "tests", "integration", "mg_import_csv", "tester")
parser = argparse.ArgumentParser()
parser.add_argument("--memgraph", default=memgraph_binary)

View File

@ -32,9 +32,6 @@ for i in ca1 ca2; do
done
binary_dir="$DIR/../../../build"
if [ ! -d $binary_dir ]; then
binary_dir="$DIR/../../../build_debug"
fi
set +e

View File

@ -84,15 +84,8 @@ if __name__ == "__main__":
server_binary = os.path.join(SCRIPT_DIR, "server.py")
client_binary = os.path.join(PROJECT_DIR, "build", "tests",
"integration", "telemetry", "client")
if not os.path.exists(client_binary):
client_binary = os.path.join(PROJECT_DIR, "build_debug", "tests",
"integration", "telemetry", "client")
kvstore_console_binary = os.path.join(PROJECT_DIR, "build", "tests",
"manual", "kvstore_console")
if not os.path.exists(kvstore_console_binary):
kvstore_console_binary = os.path.join(PROJECT_DIR, "build_debug",
"tests", "manual",
"kvstore_console")
parser = argparse.ArgumentParser()
parser.add_argument("--client", default=client_binary)

View File

@ -4,9 +4,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$DIR"
binary_dir="$DIR/../../../build"
if [ ! -d $binary_dir ]; then
binary_dir="$DIR/../../../build_debug"
fi
# Start the memgraph process.
$binary_dir/memgraph &

View File

@ -1,62 +0,0 @@
- name: macro_benchmark__query_suite
supervisor: ./harness
arguments: &QUERY_SUITE_ARGS >
QuerySuite MemgraphRunner
--groups aggregation 1000_create unwind_create dense_expand match
--no-strict --database-cpu-ids 1 --client-cpu-ids 2
infiles: &MACRO_BENCHMARK_INFILES
- . # current directory
- ../../build_release/memgraph # memgraph release binary
- ../../config # directory with config files
- ../../build_release/tests/macro_benchmark # macro benchmark client binaries
outfile_paths: &MACRO_BENCHMARK_OUTFILE_PATHS
- \./memgraph/tests/macro_benchmark/\.harness_summary
- name: macro_benchmark__query_parallel_suite
project: ^mg-master-release-daily$ # regex to match only 'mg-master-release-daily'
supervisor: ./harness
arguments: &QUERY_PARALLEL_SUITE_ARGS >
QueryParallelSuite MemgraphRunner
--groups aggregation_parallel create_parallel bfs_parallel
--database-cpu-ids 1 2 3 4 5 6 7 8 9
--client-cpu-ids 10 11 12 13 14 15 16 17 18 19
--num-database-workers 9 --num-clients-workers 30
--no-strict
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: *MACRO_BENCHMARK_OUTFILE_PATHS
- name: macro_benchmark_parent__query_suite
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
cd: ../../../parent/tests/macro_benchmark # all files are relative to the parent build
supervisor: ./harness
arguments: *QUERY_SUITE_ARGS
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: &MACRO_BENCHMARK_PARENT_OUTFILE_PATHS
- \./parent/tests/macro_benchmark/\.harness_summary
link_to_run: macro_benchmark__query_suite
- name: macro_benchmark_parent__query_parallel_suite
project: ^NEVER$ # regex that currently doesn't match any project
cd: ../../../parent/tests/macro_benchmark # all files are relative to the parent build
supervisor: ./harness
arguments: *QUERY_PARALLEL_SUITE_ARGS
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: *MACRO_BENCHMARK_PARENT_OUTFILE_PATHS
link_to_run: macro_benchmark__query_parallel_suite
- name: macro_benchmark_summary
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
type: data process
require_runs: ^macro_benchmark(_parent)?__query_suite$ # regex to match our runs
cd: ../../..
commands: >
./memgraph/tools/apollo/macro_benchmark_summary
--current macro_benchmark__query_suite/memgraph/tests/macro_benchmark/.harness_summary
--previous macro_benchmark_parent__query_suite/parent/tests/macro_benchmark/.harness_summary
--output .harness_summary
# --current macro_benchmark__query_suite/memgraph/tests/macro_benchmark/.harness_summary macro_benchmark__query_parallel_suite/memgraph/tests/macro_benchmark/.harness_summary
# --previous macro_benchmark_parent__query_suite/parent/tests/macro_benchmark/.harness_summary macro_benchmark_parent__query_parallel_suite/parent/tests/macro_benchmark/.harness_summary
infiles:
- memgraph/tools/apollo/macro_benchmark_summary
outfile_paths:
- \./.harness_summary

View File

@ -28,11 +28,6 @@ class QueryClient:
client_path = "tests/macro_benchmark/query_client"
client = get_absolute_path(client_path, "build")
if not os.path.exists(client):
# Apollo builds both debug and release binaries on diff
# so we need to use the release client if the debug one
# doesn't exist
client = get_absolute_path(client_path, "build_release")
queries_fd, queries_path = tempfile.mkstemp()
try:
@ -93,11 +88,6 @@ class LongRunningClient:
client_path = "tests/macro_benchmark/{}".format(client)
client = get_absolute_path(client_path, "build")
if not os.path.exists(client):
# Apollo builds both debug and release binaries on diff
# so we need to use the release client if the debug one
# doesn't exist
client = get_absolute_path(client_path, "build_release")
config_fd, config_path = tempfile.mkstemp()
try:

View File

@ -19,8 +19,6 @@ DIR_PATH = os.path.dirname(os.path.realpath(__file__))
def get_absolute_path(path, base=""):
if base == "build":
extra = "../../build"
elif base == "build_release":
extra = "../../build_release"
elif base == "libs":
extra = "../../libs"
elif base == "config":

View File

@ -57,11 +57,6 @@ class Memgraph:
# find executable path
runner_bin = self.args.runner_bin
if not os.path.exists(runner_bin):
# Apollo builds both debug and release binaries on diff
# so we need to use the release binary if the debug one
# doesn't exist
runner_bin = get_absolute_path("memgraph", "build_release")
# start memgraph
self.database_bin.run(runner_bin, database_args, timeout=600)

View File

@ -1,14 +0,0 @@
- name: public_benchmark__ldbc
project: ^NEVER$ # regex that currently doesn't match any project (was 'release')
commands: . continuous_integration
infiles:
- . # current directory
- ../../../build_release/memgraph # memgraph release binary
- ../../../build_release/tools/src/mg_import_csv # memgraph csv import tool
- ../../../config # directory with config files
- ../../../libs/neo4j # neo4j directory
- ../../../tools/plot/ldbc_latency # ldbc plot generation tool
outfile_paths: # TODO: maybe this should also accept relative paths?
- \./memgraph/tests/public_benchmark/ldbc/results/.+
- \./memgraph/tests/public_benchmark/ldbc/plots/.+
enable_network: true

View File

@ -13,9 +13,6 @@ script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
base_dir="${script_dir}/../../.."
neo4j_dir="${base_dir}/libs/neo4j"
build_dir="${base_dir}/build"
if [ ! -d "${build_dir}" ]; then
build_dir="${base_dir}/build_release"
fi
# Add Maven to PATH
export PATH=$PATH:${script_dir}/maven/bin

View File

@ -36,8 +36,6 @@ class Memgraph:
def start(self):
# find executable path
binary = os.path.join(BASE_DIR, "build", "memgraph")
if not os.path.exists(binary):
binary = os.path.join(BASE_DIR, "build_release", "memgraph")
# database args
database_args = [binary,

View File

@ -1,7 +0,0 @@
- name: quality_assurance
commands: TIMEOUT=300 ./continuous_integration
infiles:
- . # current directory
- ../../build_release/memgraph # memgraph release binary
outfile_paths: &OUTFILE_PATHS
- \./memgraph/tests/qa/\.quality_assurance_status

View File

@ -1,50 +0,0 @@
- name: stress
commands: TIMEOUT=600 ./continuous_integration
infiles: &STRESS_INFILES
- . # current directory
- ../../build_release/memgraph # memgraph release binary
- ../../build_release/tests/stress # stress client binaries
- name: stress_ssl
commands: TIMEOUT=600 ./continuous_integration --use-ssl
infiles: *STRESS_INFILES
- name: stress_large
project: release
commands: TIMEOUT=43200 ./continuous_integration --large-dataset
infiles: *STRESS_INFILES
- name: community__stress
commands: TIMEOUT=600 ./continuous_integration
infiles: &COMMUNITY_STRESS_INFILES
- . # current directory
- ../../build_community/memgraph # memgraph community binary
- ../../build_community/tests/stress # stress client binaries
- name: community__stress_large
project: release
commands: TIMEOUT=43200 ./continuous_integration --large-dataset
infiles: *COMMUNITY_STRESS_INFILES
#- name: stress_ha_normal_operation
# commands: TIMEOUT=200 ./continuous_integration_ha
# infiles: &STRESS_HA_INFILES
# - . # current directory
# - ../../build_release/memgraph_ha # memgraph release binary
# - ../../build_release/tests/stress/ # stress client binaries
#
#- name: stress_ha_normal_opration_large
# project: release
# commands: TIMEOUT=7500 ./continuous_integration_ha --large-dataset
# infiles: *STRESS_HA_INFILES
- name: durability
commands: TIMEOUT=300 ./ve3/bin/python3 durability --num-steps 5
infiles: &DURABILITY_INFILES
- . # current directory
- ../../build_release/memgraph # memgraph release binary
- name: durability_large
project: release
commands: TIMEOUT=5400 ./ve3/bin/python3 durability --num-steps 20
infiles: *DURABILITY_INFILES

View File

@ -70,6 +70,7 @@ LARGE_DATASET = [
# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
BASE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
BUILD_DIR = os.path.join(BASE_DIR, "build")
MEASUREMENTS_FILE = os.path.join(SCRIPT_DIR, ".apollo_measurements")
KEY_FILE = os.path.join(SCRIPT_DIR, ".key.pem")
CERT_FILE = os.path.join(SCRIPT_DIR, ".cert.pem")
@ -86,14 +87,6 @@ else:
THREADS = multiprocessing.cpu_count()
def get_build_dir():
if os.path.exists(os.path.join(BASE_DIR, "build_release")):
return os.path.join(BASE_DIR, "build_release")
if os.path.exists(os.path.join(BASE_DIR, "build_community")):
return os.path.join(BASE_DIR, "build_community")
return os.path.join(BASE_DIR, "build")
def wait_for_server(port, delay=0.1):
cmd = ["nc", "-z", "-w", "1", "127.0.0.1", str(port)]
while subprocess.call(cmd) != 0:
@ -111,7 +104,7 @@ def run_test(args, test, options, timeout):
binary = [args.python, "-u", os.path.join(SCRIPT_DIR, test),
"--logging", logging]
elif test.endswith(".cpp"):
exe = os.path.join(get_build_dir(), "tests", "stress", test[:-4])
exe = os.path.join(BUILD_DIR, "tests", "stress", test[:-4])
binary = [exe]
else:
raise Exception("Test '{}' binary not supported!".format(test))
@ -133,7 +126,7 @@ def run_test(args, test, options, timeout):
# parse arguments
parser = argparse.ArgumentParser(description = "Run stress tests on Memgraph.")
parser.add_argument("--memgraph", default = os.path.join(get_build_dir(),
parser.add_argument("--memgraph", default = os.path.join(BUILD_DIR,
"memgraph"))
parser.add_argument("--log-file", default = "")
parser.add_argument("--data-directory", default = "")

View File

@ -84,9 +84,6 @@ def run_test(args, test, options, timeout):
"--logging", logging]
elif test.endswith(".cpp"):
exe = os.path.join(BUILD_DIR, "tests", "stress", test[:-4])
if not os.path.exists(exe):
exe = os.path.join(BASE_DIR, "build_release", "tests", "stress",
test[:-4])
binary = [exe]
else:
raise Exception("Test '{}' binary not supported!".format(test))
@ -106,10 +103,6 @@ def run_test(args, test, options, timeout):
return runtime
# find HA memgraph binary
if not os.path.exists(args.memgraph):
args.memgraph = os.path.join(BASE_DIR, "build_release", "memgraph_ha")
# Generate database instance flags
workers = [None for worker in range(CLUSTER_SIZE)]
durability_directory = tempfile.TemporaryDirectory()

View File

@ -45,10 +45,6 @@ parser.add_argument("--num-clients", default=multiprocessing.cpu_count())
parser.add_argument("--num-steps", type=int, default=5)
args = parser.parse_args()
# Find Memgraph directory (alternative location)
if not os.path.exists(args.memgraph):
args.memgraph = os.path.join(BASE_DIR, "build_release", "memgraph")
# Memgraph run command construction
cwd = os.path.dirname(args.memgraph)
cmd = [args.memgraph, "--bolt-num-workers=" + str(DB_WORKERS),

View File

@ -1,7 +0,0 @@
- name: Code coverage
cd: generated/html
run_type: data process
archive:
- .
filename: code_coverage.tar.gz
host: true

View File

@ -1,29 +0,0 @@
- name: cppcheck
commands: |
export PATH=/opt/toolchain-v1/bin:$PATH
export LD_LIBRARY_PATH=/opt/toolchain-v1/lib:/opt/toolchain-v1/lib64
TIMEOUT=2400 ./cppcheck
infiles:
- cppcheck # cppcheck script
- ../../src # src source dir
- ../../tests # tests source dir
- ../../.git # git directory (used to find out changed files in commit)
- ../../.clang-format # clang-format config file
outfile_paths:
- \./memgraph/tools/apollo/\.cppcheck_errors
- name: code_coverage
type: data process
require_runs: ^unit__.+ # regex to match all unit runs
commands: |
export PATH=/opt/toolchain-v1/bin:$PATH
export LD_LIBRARY_PATH=/opt/toolchain-v1/lib:/opt/toolchain-v1/lib64
TIMEOUT=300 ./coverage_convert
infiles:
- coverage_convert # coverage_convert script
- coverage_parse_export # coverage_parse_export script
- apollo_archives.yaml # coverage archive export config
- ../../src # src source dir
outfile_paths:
- \./memgraph/tools/apollo/generated/\.coverage_summary
- \./memgraph/tools/apollo/generated/\coverage.json

View File

@ -1,9 +0,0 @@
- name: tools__test_mg_client
commands: |
./test_mg_client
./test_mg_client --use-ssl
infiles:
- test_mg_client # test_mg_client script
- client # test files
- ../../build_release/memgraph # memgraph binary
- ../../build_release/tools/src/mg_client # mg_client binary

View File

@ -29,9 +29,6 @@ cd $tmpdir
# Find memgraph binaries.
memgraph_dir="$DIR/../../build"
if [ ! -d $memgraph_dir ]; then
memgraph_dir="$DIR/../../build_release"
fi
# Find mg_client binaries.
client_dir="$memgraph_dir/tools/src"