Refactor Apollo build projects
Summary: Fix LDBC run Reviewers: buda, teon.banek Reviewed By: buda, teon.banek Subscribers: pullbot Differential Revision: https://phabricator.memgraph.io/D1317
This commit is contained in:
parent
a88c598822
commit
b2f3bf9709
@ -181,11 +181,11 @@ target_link_libraries(antlr_opencypher_parser_lib antlr4)
|
|||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
# Optional subproject configuration -------------------------------------------
|
# Optional subproject configuration -------------------------------------------
|
||||||
option(POC "Build proof of concept binaries" ON)
|
option(POC "Build proof of concept binaries" OFF)
|
||||||
option(EXPERIMENTAL "Build experimental binaries" OFF)
|
option(EXPERIMENTAL "Build experimental binaries" OFF)
|
||||||
option(CUSTOMERS "Build customer binaries" ON)
|
option(CUSTOMERS "Build customer binaries" OFF)
|
||||||
option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF)
|
option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF)
|
||||||
option(TOOLS "Build tools binaries" OFF)
|
option(TOOLS "Build tools binaries" ON)
|
||||||
|
|
||||||
if (TEST_COVERAGE)
|
if (TEST_COVERAGE)
|
||||||
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
|
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
|
||||||
|
@ -7,7 +7,7 @@ import sys
|
|||||||
|
|
||||||
# paths
|
# paths
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
BUILD_OUTPUT_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "build", "output"))
|
BUILD_OUTPUT_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "build_release", "output"))
|
||||||
|
|
||||||
# helpers
|
# helpers
|
||||||
def run_cmd(cmd, cwd):
|
def run_cmd(cmd, cwd):
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
- name: Binary
|
- name: Binaries
|
||||||
archive:
|
archive:
|
||||||
- build/memgraph
|
- build_debug/memgraph
|
||||||
|
- build_release/memgraph
|
||||||
|
- build_release/tools/src/mg_import_csv
|
||||||
|
- build_release/tools/src/mg_statsd
|
||||||
- config
|
- config
|
||||||
filename: binary.tar.gz
|
filename: binaries.tar.gz
|
||||||
|
|
||||||
- name: Doxygen documentation
|
- name: Doxygen documentation
|
||||||
cd: docs/doxygen/html
|
cd: docs/doxygen/html
|
||||||
@ -11,11 +14,7 @@
|
|||||||
filename: documentation.tar.gz
|
filename: documentation.tar.gz
|
||||||
host: true
|
host: true
|
||||||
|
|
||||||
- name: Release (tarball)
|
- name: Release (user docs)
|
||||||
project: release
|
project: ^NEVER$ # TODO (mferencevic): replace with 'release' once user documentation creation is fixed (couscous -> pandoc)
|
||||||
archive: build/output/memgraph*.tar.gz
|
archive: docs/user_technical/docs.tar.gz
|
||||||
|
filename: release_user_docs.tar.gz
|
||||||
# - name: Release (user docs)
|
|
||||||
# project: release
|
|
||||||
# archive: docs/user_technical/docs.tar.gz
|
|
||||||
# filename: release_user_docs.tar.gz
|
|
||||||
|
@ -1,95 +1,94 @@
|
|||||||
- name: Diff build
|
- name: Diff build
|
||||||
project: ^mg-master-diff$
|
project: ^mg-master-diff$
|
||||||
commands: |
|
commands: |
|
||||||
|
# Copy untouched repository to parent folder.
|
||||||
cd ..
|
cd ..
|
||||||
cp -r memgraph parent
|
cp -r memgraph parent
|
||||||
cd memgraph
|
cd memgraph
|
||||||
|
|
||||||
|
# Initialize and create documentation.
|
||||||
TIMEOUT=600 ./init
|
TIMEOUT=600 ./init
|
||||||
doxygen Doxyfile
|
doxygen Doxyfile
|
||||||
|
|
||||||
cd build
|
# Remove default build directory.
|
||||||
cmake -DTOOLS=ON ..
|
rm -r build
|
||||||
|
|
||||||
|
# Build debug binaries.
|
||||||
|
mkdir build_debug
|
||||||
|
cd build_debug
|
||||||
|
cmake ..
|
||||||
TIMEOUT=1000 make -j$THREADS
|
TIMEOUT=1000 make -j$THREADS
|
||||||
|
|
||||||
|
# Build coverage binaries.
|
||||||
cd ..
|
cd ..
|
||||||
mkdir build_coverage
|
mkdir build_coverage
|
||||||
|
|
||||||
cd build_coverage
|
cd build_coverage
|
||||||
cmake -DTEST_COVERAGE=ON ..
|
cmake -DTEST_COVERAGE=ON ..
|
||||||
TIMEOUT=1000 make -j$THREADS memgraph__unit
|
TIMEOUT=1000 make -j$THREADS memgraph__unit
|
||||||
|
|
||||||
|
# Build release binaries.
|
||||||
cd ..
|
cd ..
|
||||||
mkdir build_release
|
mkdir build_release
|
||||||
|
|
||||||
cd build_release
|
cd build_release
|
||||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||||
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress memgraph__manual__card_fraud_generate_snapshot
|
TIMEOUT=1000 make -j$THREADS memgraph tools memgraph__macro_benchmark memgraph__stress memgraph__manual__card_fraud_generate_snapshot
|
||||||
|
|
||||||
cd ../../parent
|
# Generate distributed card fraud dataset.
|
||||||
|
cd ../tests/distributed/card_fraud
|
||||||
|
./generate_dataset.sh
|
||||||
|
cd ../../..
|
||||||
|
|
||||||
|
# Checkout to parent commit and initialize.
|
||||||
|
cd ../parent
|
||||||
git checkout HEAD~1
|
git checkout HEAD~1
|
||||||
TIMEOUT=600 ./init
|
TIMEOUT=600 ./init
|
||||||
|
|
||||||
|
# Build parent release binaries.
|
||||||
mkdir build_release
|
mkdir build_release
|
||||||
cd build_release
|
cd build_release
|
||||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||||
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark
|
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark
|
||||||
|
|
||||||
# Install tools, because they may be needed to run some benchmarks and tests.
|
|
||||||
cd ../../memgraph/tools
|
|
||||||
TIMEOUT=300 ./setup
|
|
||||||
|
|
||||||
# Generate distributed card fraud dataset
|
|
||||||
cd ../tests/distributed/card_fraud
|
|
||||||
./generate_dataset.sh
|
|
||||||
|
|
||||||
|
|
||||||
- name: Debug build
|
|
||||||
project: ^mg-master-debug$
|
|
||||||
commands: |
|
|
||||||
TIMEOUT=600 ./init
|
|
||||||
doxygen Doxyfile
|
|
||||||
|
|
||||||
cd build
|
|
||||||
cmake -DTOOLS=ON ..
|
|
||||||
TIMEOUT=1000 make -j$THREADS
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
mkdir build_release
|
|
||||||
cd build_release
|
|
||||||
|
|
||||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
|
||||||
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress
|
|
||||||
|
|
||||||
# Install tools, because they may be needed to run some benchmarks and tests.
|
|
||||||
cd ../tools
|
|
||||||
TIMEOUT=300 ./setup
|
|
||||||
|
|
||||||
|
|
||||||
# release build is the default one
|
# release build is the default one
|
||||||
- name: Release build
|
- name: Release build
|
||||||
commands: |
|
commands: |
|
||||||
TIMEOUT=600 ./init
|
TIMEOUT=600 ./init
|
||||||
doxygen Doxyfile
|
doxygen Doxyfile
|
||||||
cd build
|
|
||||||
cmake -DTOOLS=ON -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF ..
|
# Remove default build directory.
|
||||||
|
rm -r build
|
||||||
|
|
||||||
|
# Build debug binaries.
|
||||||
|
mkdir build_debug
|
||||||
|
cd build_debug
|
||||||
|
cmake ..
|
||||||
TIMEOUT=1000 make -j$THREADS
|
TIMEOUT=1000 make -j$THREADS
|
||||||
|
|
||||||
# Create a binary package (which can then be used for Docker image).
|
# Build coverage binaries.
|
||||||
|
cd ..
|
||||||
|
mkdir build_coverage
|
||||||
|
cd build_coverage
|
||||||
|
cmake -DTEST_COVERAGE=ON ..
|
||||||
|
TIMEOUT=1000 make -j$THREADS memgraph__unit
|
||||||
|
|
||||||
|
# Build release binaries.
|
||||||
|
cd ..
|
||||||
|
mkdir build_release
|
||||||
|
cd build_release
|
||||||
|
cmake -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF ..
|
||||||
|
TIMEOUT=1000 make -j$THREADS
|
||||||
|
|
||||||
|
# Create Debian package.
|
||||||
mkdir output
|
mkdir output
|
||||||
cd output
|
cd output
|
||||||
cpack -D CPACK_SET_DESTDIR=ON -G TGZ --config ../CPackConfig.cmake
|
|
||||||
cpack -G DEB --config ../CPackConfig.cmake
|
cpack -G DEB --config ../CPackConfig.cmake
|
||||||
|
|
||||||
# Create user technical documentation for community site
|
# Create user technical documentation for community site.
|
||||||
cd ../../docs/user_technical
|
cd ../../docs/user_technical
|
||||||
|
# TODO (mferencevic): uncomment this once couscous is replaced with pandoc
|
||||||
#./bundle_community
|
#./bundle_community
|
||||||
|
|
||||||
# Install tools, because they may be needed to run some benchmarks and tests.
|
# Generate distributed card fraud dataset.
|
||||||
cd ../../tools
|
cd ../../tests/distributed/card_fraud
|
||||||
TIMEOUT=300 ./setup
|
./generate_dataset.sh
|
||||||
|
|
||||||
# Link build to build_release
|
|
||||||
cd ..
|
|
||||||
ln -s build build_release
|
|
||||||
|
@ -26,7 +26,12 @@ do
|
|||||||
|
|
||||||
# run memgraph with durability_directory pointing
|
# run memgraph with durability_directory pointing
|
||||||
# to examples snapshots_dir
|
# to examples snapshots_dir
|
||||||
cd ../../build/
|
cd ../..
|
||||||
|
if [ ! -d build ]; then
|
||||||
|
cd build_release
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi
|
||||||
./memgraph --durability-directory "$script_dir/$snapshots_dir/" \
|
./memgraph --durability-directory "$script_dir/$snapshots_dir/" \
|
||||||
--snapshot-on-exit > /dev/null 2>&1 &
|
--snapshot-on-exit > /dev/null 2>&1 &
|
||||||
memgraph_pid=$!
|
memgraph_pid=$!
|
||||||
|
@ -7,7 +7,7 @@ import subprocess
|
|||||||
# paths
|
# paths
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
WORKSPACE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
|
WORKSPACE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
|
||||||
TESTS_DIR_REL = os.path.join("..", "build", "tests")
|
TESTS_DIR_REL = os.path.join("..", "build_debug", "tests")
|
||||||
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
|
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
|
||||||
|
|
||||||
# generation mode
|
# generation mode
|
||||||
@ -50,11 +50,13 @@ for test in tests:
|
|||||||
prefix = "TIMEOUT=600 "
|
prefix = "TIMEOUT=600 "
|
||||||
|
|
||||||
outfile_paths = []
|
outfile_paths = []
|
||||||
if name.startswith("unit") and mode == "diff":
|
if name.startswith("unit"):
|
||||||
dirname = dirname.replace("/build/", "/build_coverage/")
|
dirname = dirname.replace("/build_debug/", "/build_coverage/")
|
||||||
curdir_abs = os.path.normpath(os.path.join(SCRIPT_DIR, dirname))
|
curdir_abs = os.path.normpath(os.path.join(SCRIPT_DIR, dirname))
|
||||||
curdir_rel = os.path.relpath(curdir_abs, WORKSPACE_DIR)
|
curdir_rel = os.path.relpath(curdir_abs, WORKSPACE_DIR)
|
||||||
outfile_paths.append("\./" + curdir_rel.replace(".", "\\.") + "/.+")
|
outfile_paths.append("\./" + curdir_rel.replace(".", "\\.") + "/.+")
|
||||||
|
elif name.startswith("benchmark"):
|
||||||
|
dirname = dirname.replace("/build_debug/", "/build_release/")
|
||||||
|
|
||||||
runs.append({
|
runs.append({
|
||||||
"name": name,
|
"name": name,
|
||||||
|
@ -11,4 +11,8 @@ fi
|
|||||||
|
|
||||||
NUM_MACHINES="$( cat card_fraud.py | grep -m1 "NUM_MACHINES" | tail -c 2 )"
|
NUM_MACHINES="$( cat card_fraud.py | grep -m1 "NUM_MACHINES" | tail -c 2 )"
|
||||||
|
|
||||||
../../../build_release/tests/manual/card_fraud_generate_snapshot --config config.json --num-workers $NUM_MACHINES --dir $output_dir
|
build_dir=../../../build_release
|
||||||
|
if [ ! -d $build_dir ]; then
|
||||||
|
build_dir=../../../build
|
||||||
|
fi
|
||||||
|
$build_dir/tests/manual/card_fraud_generate_snapshot --config config.json --num-workers $NUM_MACHINES --dir $output_dir
|
||||||
|
@ -4,9 +4,9 @@
|
|||||||
infiles:
|
infiles:
|
||||||
- . # current directory
|
- . # current directory
|
||||||
- ../../../build_release/memgraph # memgraph release binary
|
- ../../../build_release/memgraph # memgraph release binary
|
||||||
|
- ../../../build_release/tools/src/mg_import_csv # memgraph csv import tool
|
||||||
- ../../../config # directory with config files
|
- ../../../config # directory with config files
|
||||||
- ../../../libs/neo4j # neo4j directory
|
- ../../../libs/neo4j # neo4j directory
|
||||||
- ../../../tools/mg_import_csv # memgraph csv import tool
|
|
||||||
- ../../../tools/plot/ldbc_latency # ldbc plot generation tool
|
- ../../../tools/plot/ldbc_latency # ldbc plot generation tool
|
||||||
outfile_paths: # TODO: maybe this should also accept relative paths?
|
outfile_paths: # TODO: maybe this should also accept relative paths?
|
||||||
- \./memgraph/tests/public_benchmark/ldbc/results/.+
|
- \./memgraph/tests/public_benchmark/ldbc/results/.+
|
||||||
|
@ -12,6 +12,10 @@ function print_help () {
|
|||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
base_dir="${script_dir}/../../.."
|
base_dir="${script_dir}/../../.."
|
||||||
neo4j_dir="${base_dir}/libs/neo4j"
|
neo4j_dir="${base_dir}/libs/neo4j"
|
||||||
|
build_dir="${base_dir}/build"
|
||||||
|
if [ ! -d "${build_dir}" ]; then
|
||||||
|
build_dir="${base_dir}/build_release"
|
||||||
|
fi
|
||||||
|
|
||||||
# Add Maven to PATH
|
# Add Maven to PATH
|
||||||
export PATH=$PATH:${script_dir}/maven/bin
|
export PATH=$PATH:${script_dir}/maven/bin
|
||||||
@ -131,6 +135,6 @@ memgraph_snapshot_dir=${dataset_dir}/memgraph/snapshots
|
|||||||
mkdir -p ${memgraph_snapshot_dir}
|
mkdir -p ${memgraph_snapshot_dir}
|
||||||
cd ${memgraph_snapshot_dir}
|
cd ${memgraph_snapshot_dir}
|
||||||
echo "Converting CSV dataset to '${memgraph_snapshot_dir}/snapshot'"
|
echo "Converting CSV dataset to '${memgraph_snapshot_dir}/snapshot'"
|
||||||
${base_dir}/tools/mg_import_csv --out snapshot ${csv_dataset} --csv-delimiter "|" --array-delimiter ";"
|
${build_dir}/tools/src/mg_import_csv --out snapshot ${csv_dataset} --csv-delimiter "|" --array-delimiter ";"
|
||||||
|
|
||||||
echo "Done!"
|
echo "Done!"
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
commands: TIMEOUT=300 ./continuous_integration
|
commands: TIMEOUT=300 ./continuous_integration
|
||||||
infiles:
|
infiles:
|
||||||
- . # current directory
|
- . # current directory
|
||||||
- ../../build/memgraph # memgraph debug binary
|
- ../../build_debug/memgraph # memgraph debug binary
|
||||||
- ../../config # directory with config files
|
- ../../config # directory with config files
|
||||||
outfile_paths:
|
outfile_paths:
|
||||||
- \./memgraph/tests/qa/\.quality_assurance_status
|
- \./memgraph/tests/qa/\.quality_assurance_status
|
||||||
|
11
tests/qa/run
11
tests/qa/run
@ -47,15 +47,16 @@ fi
|
|||||||
# save the path where this script is
|
# save the path where this script is
|
||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
memgraph_src_dir=${script_dir}/../..
|
memgraph_src_dir=${script_dir}/../..
|
||||||
|
|
||||||
memgraph_build_dir=${memgraph_src_dir}/build
|
memgraph_build_dir=${memgraph_src_dir}/build
|
||||||
|
if [ ! -d ${memgraph_build_dir} ]; then
|
||||||
|
memgraph_build_dir=${memgraph_src_dir}/build_debug
|
||||||
|
fi
|
||||||
|
memgraph_binary=${memgraph_build_dir}/memgraph
|
||||||
|
|
||||||
# activate virtualenv
|
# activate virtualenv
|
||||||
source $script_dir/ve3/bin/activate
|
source $script_dir/ve3/bin/activate
|
||||||
|
|
||||||
cd ${memgraph_build_dir}
|
|
||||||
# binary is available after the build
|
|
||||||
binary_name=memgraph
|
|
||||||
|
|
||||||
# get full path to memgraph config for interpreted queries
|
# get full path to memgraph config for interpreted queries
|
||||||
config_path="${memgraph_src_dir}/config/testing.conf"
|
config_path="${memgraph_src_dir}/config/testing.conf"
|
||||||
|
|
||||||
@ -73,7 +74,7 @@ fi
|
|||||||
set +e
|
set +e
|
||||||
|
|
||||||
# run memgraph
|
# run memgraph
|
||||||
MEMGRAPH_CONFIG="$config_path" "$memgraph_build_dir/$binary_name" --min-log-level=$loglevel 1>&2 &
|
MEMGRAPH_CONFIG="$config_path" "$memgraph_binary" --min-log-level=$loglevel 1>&2 &
|
||||||
background_pid=$!
|
background_pid=$!
|
||||||
|
|
||||||
function cleanup_and_exit {
|
function cleanup_and_exit {
|
||||||
|
@ -62,7 +62,7 @@ LARGE_DATASET = [
|
|||||||
] * 6 + [
|
] * 6 + [
|
||||||
{
|
{
|
||||||
"test": "long_running.cpp",
|
"test": "long_running.cpp",
|
||||||
"options": ["--vertex-count", "400000", "--edge-count", "1600000", "--max-time", "480", "--verify", "300"],
|
"options": ["--vertex-count", "200000", "--edge-count", "1000000", "--max-time", "480", "--verify", "300"],
|
||||||
"timeout": 500,
|
"timeout": 500,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -11,7 +11,6 @@
|
|||||||
- \./memgraph/tools/apollo/\.cppcheck_errors
|
- \./memgraph/tools/apollo/\.cppcheck_errors
|
||||||
|
|
||||||
- name: code_coverage
|
- name: code_coverage
|
||||||
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
|
|
||||||
type: data process
|
type: data process
|
||||||
commands: ./coverage_convert
|
commands: ./coverage_convert
|
||||||
infiles:
|
infiles:
|
||||||
|
@ -1 +0,0 @@
|
|||||||
|
|
@ -1 +0,0 @@
|
|||||||
|
|
@ -12,7 +12,7 @@ if echo "$PROJECT" | grep "release" >/dev/null 2>/dev/null; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$mode" == diff ]; then
|
if [ "$mode" == diff ]; then
|
||||||
files=$( git diff --name-only HEAD~1 HEAD | egrep '^(src|tests|poc)' | egrep '.(hpp|h|cpp)$' )
|
files=$( git diff --name-only HEAD~1 HEAD | egrep '^(src|tests|poc)' | egrep '\.(hpp|h|cpp)$' )
|
||||||
flags=""
|
flags=""
|
||||||
else
|
else
|
||||||
files=src/
|
files=src/
|
||||||
|
@ -1,327 +0,0 @@
|
|||||||
#!/usr/bin/python3
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# paths
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
BASE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
|
|
||||||
WORKSPACE_DIR = os.path.normpath(os.path.join(BASE_DIR, ".."))
|
|
||||||
BASE_DIR_NAME = os.path.basename(BASE_DIR)
|
|
||||||
BUILD_DIR = os.path.join(BASE_DIR, "build")
|
|
||||||
LIBS_DIR = os.path.join(BASE_DIR, "libs")
|
|
||||||
TESTS_DIR = os.path.join(BUILD_DIR, "tests")
|
|
||||||
TOOLS_BUILD_DIR = os.path.join(BUILD_DIR, "tools")
|
|
||||||
OUTPUT_DIR = os.path.join(BUILD_DIR, "apollo")
|
|
||||||
|
|
||||||
# output lists
|
|
||||||
ARCHIVES = []
|
|
||||||
RUNS = []
|
|
||||||
DATA_PROCESS = []
|
|
||||||
|
|
||||||
# generation mode
|
|
||||||
if len(sys.argv) >= 2:
|
|
||||||
mode = sys.argv[1]
|
|
||||||
else:
|
|
||||||
mode = "diff"
|
|
||||||
|
|
||||||
# helper functions
|
|
||||||
def run_cmd(cmd, cwd):
|
|
||||||
ret = subprocess.run(cmd, cwd = cwd, stdout = subprocess.PIPE, check = True)
|
|
||||||
return ret.stdout.decode("utf-8")
|
|
||||||
|
|
||||||
def find_memgraph_binary(loc):
|
|
||||||
return run_cmd(["find", ".", "-maxdepth", "1", "-executable", "-type",
|
|
||||||
"f", "-name", "memgraph*"], loc).split("\n")[0][2:]
|
|
||||||
|
|
||||||
def generate_run(name, typ = "run", supervisor = "", commands = "",
|
|
||||||
arguments = "", enable_network = False,
|
|
||||||
outfile_paths = "", infile = "", slave_group = "local",
|
|
||||||
link_to_run = ""):
|
|
||||||
if not commands.endswith("\n"): commands += "\n"
|
|
||||||
return dict(name = name, type = typ, supervisor = supervisor,
|
|
||||||
commands = commands, arguments = arguments,
|
|
||||||
enable_network = enable_network, outfile_paths = outfile_paths,
|
|
||||||
infile = infile, slave_group = slave_group,
|
|
||||||
link_to_run = link_to_run)
|
|
||||||
|
|
||||||
def generate_archive(name, short_name, archive):
|
|
||||||
return dict(name = name, short_name = short_name, archive = archive)
|
|
||||||
|
|
||||||
def create_archive(name, files, cwd):
|
|
||||||
oname = name + ".tar.gz"
|
|
||||||
ofile = os.path.join(OUTPUT_DIR, oname)
|
|
||||||
print("Creating archive:", name)
|
|
||||||
for i in range(len(files)):
|
|
||||||
files[i] = os.path.relpath(files[i], cwd)
|
|
||||||
subprocess.run(["tar", "-cpzf", ofile, "-C", cwd] + files, check = True)
|
|
||||||
return oname
|
|
||||||
|
|
||||||
def store_metadata(cwd, name, data):
|
|
||||||
json.dump(data, open(os.path.join(cwd, name + ".json"), "w"))
|
|
||||||
|
|
||||||
# create output directory
|
|
||||||
if os.path.exists(OUTPUT_DIR):
|
|
||||||
shutil.rmtree(OUTPUT_DIR)
|
|
||||||
os.makedirs(OUTPUT_DIR)
|
|
||||||
|
|
||||||
# store memgraph binary to archive
|
|
||||||
binary_name = find_memgraph_binary(BUILD_DIR)
|
|
||||||
binary_path = os.path.join(BUILD_DIR, binary_name)
|
|
||||||
binary_link_path = os.path.join(BUILD_DIR, "memgraph")
|
|
||||||
config_path = os.path.join(BASE_DIR, "config")
|
|
||||||
config_copy_path = os.path.join(BUILD_DIR, "config")
|
|
||||||
if os.path.exists(config_copy_path):
|
|
||||||
shutil.rmtree(config_copy_path)
|
|
||||||
shutil.copytree(config_path, config_copy_path)
|
|
||||||
archive = create_archive("binary", [binary_path, config_copy_path], BUILD_DIR)
|
|
||||||
ARCHIVES.append(generate_archive("Binary", "binary", archive))
|
|
||||||
|
|
||||||
# store documentation to archive
|
|
||||||
docs_path = os.path.join(BASE_DIR, "docs", "doxygen", "html")
|
|
||||||
archive = create_archive("doxygen_documentation", [docs_path], docs_path)
|
|
||||||
ARCHIVES.append(generate_archive("Doxygen documentation", "doxygen_documentation", archive))
|
|
||||||
|
|
||||||
# store release deb and tarball to archive
|
|
||||||
if mode == "release":
|
|
||||||
print("Copying release packages")
|
|
||||||
build_output_dir = os.path.join(BUILD_DIR, "output")
|
|
||||||
deb_name = run_cmd(["find", ".", "-maxdepth", "1", "-type", "f",
|
|
||||||
"-name", "memgraph*.deb"], build_output_dir).split("\n")[0][2:]
|
|
||||||
arch = run_cmd(["dpkg", "--print-architecture"], build_output_dir).split("\n")[0]
|
|
||||||
version = binary_name.split("-")[1]
|
|
||||||
# Generate Debian package file name as expected by Debian Policy.
|
|
||||||
standard_deb_name = "memgraph_{}-1_{}.deb".format(version, arch)
|
|
||||||
tarball_name = run_cmd(["find", ".", "-maxdepth", "1", "-type", "f",
|
|
||||||
"-name", "memgraph*.tar.gz"], build_output_dir).split("\n")[0][2:]
|
|
||||||
shutil.copyfile(os.path.join(build_output_dir, deb_name),
|
|
||||||
os.path.join(OUTPUT_DIR, standard_deb_name))
|
|
||||||
shutil.copyfile(os.path.join(build_output_dir, tarball_name),
|
|
||||||
os.path.join(OUTPUT_DIR, tarball_name))
|
|
||||||
ARCHIVES.append(generate_archive("Release (deb package)", standard_deb_name, standard_deb_name))
|
|
||||||
ARCHIVES.append(generate_archive("Release (tarball)", tarball_name, tarball_name))
|
|
||||||
|
|
||||||
# store user documentation to archive
|
|
||||||
if mode == "release":
|
|
||||||
print("Copying release documentation")
|
|
||||||
shutil.copyfile(os.path.join(BASE_DIR, "docs", "user_technical",
|
|
||||||
"docs.tar.gz"), os.path.join(OUTPUT_DIR, "release_user_docs.tar.gz"))
|
|
||||||
ARCHIVES.append(generate_archive("Release (user docs)", "release_user_docs",
|
|
||||||
"release_user_docs.tar.gz"))
|
|
||||||
|
|
||||||
# cppcheck run
|
|
||||||
cppcheck = os.path.join(BASE_DIR, "tools", "apollo", "cppcheck")
|
|
||||||
check_dirs = list(map(lambda x: os.path.join(BASE_DIR, x), ["src", "tests",
|
|
||||||
"poc", ".git"])) + [cppcheck]
|
|
||||||
archive = create_archive("cppcheck", check_dirs, WORKSPACE_DIR)
|
|
||||||
cmd = os.path.relpath(cppcheck, WORKSPACE_DIR)
|
|
||||||
outfile_paths = "\./" + cmd.replace("cppcheck", ".cppcheck_errors").replace(".", "\\.")
|
|
||||||
RUNS.append(generate_run("cppcheck", commands = 'TIMEOUT=2400 ./{} {}'.format(cmd, mode),
|
|
||||||
infile = archive, outfile_paths = outfile_paths))
|
|
||||||
|
|
||||||
# TODO: Refactor apollo/generate to be a config file which specifies how
|
|
||||||
# each test is run and which files it depends on.
|
|
||||||
|
|
||||||
# ctest tests
|
|
||||||
ctest_output = run_cmd(["ctest", "-N"], TESTS_DIR)
|
|
||||||
tests = []
|
|
||||||
|
|
||||||
# test ordering: first unit, then concurrent, then everything else
|
|
||||||
CTEST_ORDER = {"unit": 0, "concurrent": 1}
|
|
||||||
CTEST_DELIMITER = "__"
|
|
||||||
for row in ctest_output.split("\n"):
|
|
||||||
# Filter rows only containing tests.
|
|
||||||
if not re.match("^\s*Test\s+#", row): continue
|
|
||||||
test_name = row.split(":")[1].strip()
|
|
||||||
# We prefix all test names with BASE_DIR_NAME
|
|
||||||
name = test_name.replace(BASE_DIR_NAME + CTEST_DELIMITER, "")
|
|
||||||
path = os.path.join(TESTS_DIR, name.replace(CTEST_DELIMITER, "/", 1))
|
|
||||||
order = CTEST_ORDER.get(name.split(CTEST_DELIMITER)[0], len(CTEST_ORDER))
|
|
||||||
tests.append((order, name, path))
|
|
||||||
|
|
||||||
tests.sort()
|
|
||||||
|
|
||||||
for test in tests:
|
|
||||||
order, name, path = test
|
|
||||||
dirname = os.path.dirname(path)
|
|
||||||
cmakedir = os.path.join(dirname, "CMakeFiles",
|
|
||||||
BASE_DIR_NAME + CTEST_DELIMITER + name + ".dir")
|
|
||||||
files = [path, cmakedir]
|
|
||||||
|
|
||||||
# skip benchmark tests on diffs
|
|
||||||
if name.startswith("benchmark") and mode == "diff":
|
|
||||||
continue
|
|
||||||
|
|
||||||
# larger timeout for benchmark tests
|
|
||||||
prefix = ""
|
|
||||||
if name.startswith("benchmark"):
|
|
||||||
prefix = "TIMEOUT=600 "
|
|
||||||
|
|
||||||
cwd = os.path.dirname(BASE_DIR)
|
|
||||||
infile = create_archive(name, files, cwd = cwd)
|
|
||||||
|
|
||||||
exepath = os.path.relpath(path, cwd)
|
|
||||||
commands = "cd {}\n{}./{}\n".format(os.path.dirname(exepath),
|
|
||||||
prefix, os.path.basename(exepath))
|
|
||||||
|
|
||||||
outfile_paths = ""
|
|
||||||
if name.startswith("unit"):
|
|
||||||
cmakedir_rel = os.path.relpath(cmakedir, WORKSPACE_DIR)
|
|
||||||
outfile_paths = "\./" + cmakedir_rel.replace(".", "\\.") + ".+\n"
|
|
||||||
run = generate_run(name, commands = commands, infile = infile,
|
|
||||||
outfile_paths = outfile_paths)
|
|
||||||
|
|
||||||
RUNS.append(run)
|
|
||||||
|
|
||||||
# quality assurance tests
|
|
||||||
qa_path = os.path.join(BASE_DIR, "tests", "qa")
|
|
||||||
infile = create_archive("quality_assurance", [qa_path, binary_path,
|
|
||||||
binary_link_path, config_path], cwd = WORKSPACE_DIR)
|
|
||||||
commands = "cd memgraph/tests/qa\nTIMEOUT=300 ./continuous_integration\n"
|
|
||||||
RUNS.append(generate_run("quality_assurance", commands = commands,
|
|
||||||
infile = infile, outfile_paths = "\./memgraph/tests/qa/"
|
|
||||||
"\.quality_assurance_status"))
|
|
||||||
|
|
||||||
# build release paths
|
|
||||||
if mode == "release":
|
|
||||||
BUILD_RELEASE_DIR = os.path.join(BASE_DIR, "build")
|
|
||||||
else:
|
|
||||||
BUILD_RELEASE_DIR = os.path.join(BASE_DIR, "build_release")
|
|
||||||
binary_release_name = find_memgraph_binary(BUILD_RELEASE_DIR)
|
|
||||||
binary_release_path = os.path.join(BUILD_RELEASE_DIR, binary_release_name)
|
|
||||||
binary_release_link_path = os.path.join(BUILD_RELEASE_DIR, "memgraph")
|
|
||||||
|
|
||||||
# macro benchmark tests
|
|
||||||
MACRO_BENCHMARK_ARGS = (
|
|
||||||
"QuerySuite MemgraphRunner "
|
|
||||||
"--groups aggregation 1000_create unwind_create dense_expand match "
|
|
||||||
"--no-strict --database-cpu-ids 1 --client-cpu-ids 2")
|
|
||||||
MACRO_PARALLEL_BENCHMARK_ARGS = (
|
|
||||||
"QueryParallelSuite MemgraphRunner --groups aggregation_parallel "
|
|
||||||
"create_parallel bfs_parallel --database-cpu-ids 1 2 3 4 5 6 7 8 9 "
|
|
||||||
"--client-cpu-ids 10 11 12 13 14 15 16 17 18 19 "
|
|
||||||
"--num-database-workers 9 --num-clients-workers 30 --no-strict")
|
|
||||||
|
|
||||||
macro_bench_path = os.path.join(BASE_DIR, "tests", "macro_benchmark")
|
|
||||||
harness_client_binaries = os.path.join(BUILD_RELEASE_DIR, "tests",
|
|
||||||
"macro_benchmark")
|
|
||||||
postgresql_lib_dir = os.path.join(LIBS_DIR, "postgresql", "lib")
|
|
||||||
infile = create_archive("macro_benchmark", [binary_release_path,
|
|
||||||
binary_release_link_path, macro_bench_path, config_path,
|
|
||||||
harness_client_binaries, postgresql_lib_dir], cwd = WORKSPACE_DIR)
|
|
||||||
supervisor = "./memgraph/tests/macro_benchmark/harness"
|
|
||||||
outfile_paths = "\./memgraph/tests/macro_benchmark/\.harness_summary"
|
|
||||||
RUNS.append(generate_run("macro_benchmark__query_suite",
|
|
||||||
supervisor = supervisor,
|
|
||||||
arguments = MACRO_BENCHMARK_ARGS,
|
|
||||||
infile = infile,
|
|
||||||
outfile_paths = outfile_paths))
|
|
||||||
RUNS.append(generate_run("macro_benchmark__query_parallel_suite",
|
|
||||||
supervisor = supervisor,
|
|
||||||
arguments = MACRO_PARALLEL_BENCHMARK_ARGS,
|
|
||||||
infile = infile,
|
|
||||||
outfile_paths = outfile_paths,
|
|
||||||
slave_group = "remote_20c140g"))
|
|
||||||
|
|
||||||
# macro benchmark parent tests
|
|
||||||
if mode == "diff":
|
|
||||||
PARENT_DIR = os.path.join(WORKSPACE_DIR, "parent")
|
|
||||||
BUILD_PARENT_DIR = os.path.join(PARENT_DIR, "build")
|
|
||||||
LIBS_PARENT_DIR = os.path.join(PARENT_DIR, "libs")
|
|
||||||
binary_parent_name = find_memgraph_binary(BUILD_PARENT_DIR)
|
|
||||||
binary_parent_path = os.path.join(BUILD_PARENT_DIR, binary_parent_name)
|
|
||||||
binary_parent_link_path = os.path.join(BUILD_PARENT_DIR, "memgraph")
|
|
||||||
parent_config_path = os.path.join(PARENT_DIR, "config")
|
|
||||||
parent_macro_bench_path = os.path.join(PARENT_DIR, "tests", "macro_benchmark")
|
|
||||||
parent_harness_client_binaries = os.path.join(BUILD_PARENT_DIR, "tests",
|
|
||||||
"macro_benchmark")
|
|
||||||
parent_postgresql_lib_dir = os.path.join(LIBS_PARENT_DIR, "postgresql", "lib")
|
|
||||||
infile = create_archive("macro_benchmark_parent", [binary_parent_path,
|
|
||||||
binary_parent_link_path, parent_macro_bench_path, parent_config_path,
|
|
||||||
parent_harness_client_binaries, parent_postgresql_lib_dir],
|
|
||||||
cwd = WORKSPACE_DIR)
|
|
||||||
supervisor = "./parent/tests/macro_benchmark/harness"
|
|
||||||
args = MACRO_BENCHMARK_ARGS + " --RunnerBin " + binary_parent_path
|
|
||||||
outfile_paths = "\./parent/tests/macro_benchmark/\.harness_summary"
|
|
||||||
RUNS.append(generate_run("macro_benchmark_parent__query_suite",
|
|
||||||
supervisor = supervisor,
|
|
||||||
arguments = MACRO_BENCHMARK_ARGS + " --RunnerBin " + binary_parent_path,
|
|
||||||
infile = infile,
|
|
||||||
outfile_paths = outfile_paths,
|
|
||||||
link_to_run = "macro_benchmark__query_suite"))
|
|
||||||
RUNS.append(generate_run("macro_benchmark_parent__query_parallel_suite",
|
|
||||||
supervisor = supervisor,
|
|
||||||
arguments = MACRO_PARALLEL_BENCHMARK_ARGS + " --RunnerBin " + binary_parent_path,
|
|
||||||
infile = infile,
|
|
||||||
outfile_paths = outfile_paths,
|
|
||||||
slave_group = "remote_20c140g",
|
|
||||||
link_to_run = "macro_benchmark__query_parallel_suite"))
|
|
||||||
|
|
||||||
# macro benchmark comparison data process
|
|
||||||
script_path = os.path.join(BASE_DIR, "tools", "apollo",
|
|
||||||
"macro_benchmark_summary")
|
|
||||||
infile = create_archive("macro_benchmark_summary", [script_path],
|
|
||||||
cwd = WORKSPACE_DIR)
|
|
||||||
cmd = "./memgraph/tools/apollo/macro_benchmark_summary " \
|
|
||||||
"--current " \
|
|
||||||
"macro_benchmark__query_suite/memgraph/tests/macro_benchmark/.harness_summary " \
|
|
||||||
"macro_benchmark__query_parallel_suite/memgraph/tests/macro_benchmark/.harness_summary " \
|
|
||||||
"--previous " \
|
|
||||||
"macro_benchmark_parent__query_suite/parent/tests/macro_benchmark/.harness_summary " \
|
|
||||||
"macro_benchmark_parent__query_parallel_suite/parent/tests/macro_benchmark/.harness_summary " \
|
|
||||||
"--output .harness_summary"
|
|
||||||
outfile_paths = "\./.harness_summary"
|
|
||||||
DATA_PROCESS.append(generate_run("macro_benchmark_summary", typ = "data process",
|
|
||||||
commands = cmd, infile = infile, outfile_paths = outfile_paths))
|
|
||||||
|
|
||||||
# stress tests
|
|
||||||
stress_path = os.path.join(BASE_DIR, "tests", "stress")
|
|
||||||
stress_binary_path = os.path.join(BUILD_RELEASE_DIR, "tests", "stress")
|
|
||||||
infile = create_archive("stress", [binary_release_path,
|
|
||||||
binary_release_link_path, stress_path, stress_binary_path,
|
|
||||||
config_path],
|
|
||||||
cwd = WORKSPACE_DIR)
|
|
||||||
cmd = "cd memgraph/tests/stress\nTIMEOUT=600 ./continuous_integration"
|
|
||||||
RUNS.append(generate_run("stress", commands = cmd, infile = infile))
|
|
||||||
# stress tests for daily release (large dataset)
|
|
||||||
if mode == "release":
|
|
||||||
cmd = "cd memgraph/tests/stress\nTIMEOUT=43200 ./continuous_integration" \
|
|
||||||
" --large-dataset"
|
|
||||||
RUNS.append(generate_run("stress_large", commands = cmd, infile = infile,
|
|
||||||
slave_group = "remote_16c56g"))
|
|
||||||
|
|
||||||
# public_benchmark/ldbc tests
|
|
||||||
if mode == "release":
|
|
||||||
ldbc_path = os.path.join(BASE_DIR, "tests", "public_benchmark", "ldbc")
|
|
||||||
neo4j_path = os.path.join(BASE_DIR, "libs", "neo4j")
|
|
||||||
mg_import_csv_path = os.path.join(BASE_DIR, "tools", "mg_import_csv")
|
|
||||||
plot_ldbc_latency_path = os.path.join(BASE_DIR, "tools", "plot_ldbc_latency")
|
|
||||||
infile = create_archive("ldbc", [binary_release_path, ldbc_path,
|
|
||||||
binary_release_link_path, neo4j_path, config_path,
|
|
||||||
mg_import_csv_path, plot_ldbc_latency_path],
|
|
||||||
cwd = WORKSPACE_DIR)
|
|
||||||
cmd = "cd memgraph/tests/public_benchmark/ldbc\n. continuous_integration\n"
|
|
||||||
outfile_paths = "\./memgraph/tests/public_benchmark/ldbc/results/.+\n" \
|
|
||||||
"\./memgraph/tests/public_benchmark/ldbc/plots/.+\n"
|
|
||||||
RUNS.append(generate_run("public_benchmark__ldbc", commands = cmd,
|
|
||||||
infile = infile, outfile_paths = outfile_paths,
|
|
||||||
slave_group = "remote_20c140g", enable_network = True))
|
|
||||||
|
|
||||||
# tools tests
|
|
||||||
ctest_output = run_cmd(["ctest", "-N"], TOOLS_BUILD_DIR)
|
|
||||||
tools_infile = create_archive("tools_test", [TOOLS_BUILD_DIR], cwd = WORKSPACE_DIR)
|
|
||||||
for row in ctest_output.split("\n"):
|
|
||||||
# Filter rows only containing tests.
|
|
||||||
if not re.match("^\s*Test\s+#", row): continue
|
|
||||||
test_name = row.split(":")[1].strip()
|
|
||||||
test_dir = os.path.relpath(TOOLS_BUILD_DIR, WORKSPACE_DIR)
|
|
||||||
commands = "cd {}\nctest --output-on-failure -R \"^{}$\"".format(test_dir, test_name)
|
|
||||||
run = generate_run("tools_" + test_name, commands = commands, infile = tools_infile)
|
|
||||||
RUNS.append(run)
|
|
||||||
|
|
||||||
# store ARCHIVES and RUNS
|
|
||||||
store_metadata(OUTPUT_DIR, "archives", ARCHIVES)
|
|
||||||
store_metadata(OUTPUT_DIR, "runs", RUNS + DATA_PROCESS)
|
|
@ -17,4 +17,4 @@ endif()
|
|||||||
install(TARGETS mg_import_csv RUNTIME DESTINATION bin)
|
install(TARGETS mg_import_csv RUNTIME DESTINATION bin)
|
||||||
|
|
||||||
# Target for building all the tool executables.
|
# Target for building all the tool executables.
|
||||||
add_custom_target(tools DEPENDS mg_import_csv)
|
add_custom_target(tools DEPENDS mg_import_csv mg_statsd)
|
||||||
|
@ -6,7 +6,7 @@ import subprocess
|
|||||||
|
|
||||||
# paths
|
# paths
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
TESTS_DIR_REL = os.path.join("..", "..", "build", "tools", "tests")
|
TESTS_DIR_REL = os.path.join("..", "..", "build_debug", "tools", "tests")
|
||||||
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
|
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
|
||||||
|
|
||||||
# ctest tests
|
# ctest tests
|
||||||
|
Loading…
Reference in New Issue
Block a user