Add initial version of Apollo config files

Summary:
Improve Apollo config files

Add name to apollo_build

Remove old generate script from build

Add build_release symlink to release build

Rename 'args' to 'arguments'

Add run definition for cppcheck

Host doxygen documentation

Reviewers: teon.banek

Reviewed By: teon.banek

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D1095
This commit is contained in:
Matej Ferencevic 2018-01-10 13:53:42 +01:00
parent 87f8c4e256
commit 499ad3ba15
15 changed files with 367 additions and 100 deletions

View File

@ -705,7 +705,7 @@ CITE_BIB_FILES =
# messages are off.
# The default value is: NO.
QUIET = NO
QUIET = YES
# The WARNINGS tag can be used to turn on/off the warning messages that are
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
@ -721,7 +721,7 @@ WARNINGS = YES
# will automatically be disabled.
# The default value is: YES.
WARN_IF_UNDOCUMENTED = YES
WARN_IF_UNDOCUMENTED = NO
# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
# potential errors in the documentation, such as not documenting some parameters

39
apollo_archives.py Executable file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import json
import os
import re
import subprocess
import sys
# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
BUILD_OUTPUT_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "build", "output"))
# helpers
def run_cmd(cmd, cwd):
return subprocess.run(cmd, cwd=cwd, check=True,
stdout=subprocess.PIPE).stdout.decode("utf-8")
# check project
if re.search(r"release", os.environ.get("PROJECT", "")) is None:
print(json.dumps([]))
sys.exit(0)
# generate archive
deb_name = run_cmd(["find", ".", "-maxdepth", "1", "-type", "f",
"-name", "memgraph*.deb"], BUILD_OUTPUT_DIR).split("\n")[0][2:]
arch = run_cmd(["dpkg", "--print-architecture"], BUILD_OUTPUT_DIR).split("\n")[0]
version = deb_name.split("-")[1]
# generate Debian package file name as expected by Debian Policy
standard_deb_name = "memgraph_{}-1_{}.deb".format(version, arch)
archive_path = os.path.relpath(os.path.join(BUILD_OUTPUT_DIR,
deb_name), SCRIPT_DIR)
archives = [{
"name": "Release (deb package)",
"archive": archive_path,
"filename": standard_deb_name,
}]
print(json.dumps(archives, indent=4, sort_keys=True))

21
apollo_archives.yaml Normal file
View File

@ -0,0 +1,21 @@
- name: Binary
archive:
- build/memgraph
- config
filename: binary.tar.gz
- name: Doxygen documentation
cd: docs/doxygen/html
archive:
- .
filename: documentation.tar.gz
host: true
- name: Release (tarball)
project: release
archive: build/output/memgraph*.tar.gz
# - name: Release (user docs)
# project: release
# archive: docs/user_technical/docs.tar.gz
# filename: release_user_docs.tar.gz

84
apollo_build.yaml Normal file
View File

@ -0,0 +1,84 @@
- name: Diff build
project: ^mg-master-diff$
commands: |
cd ..
cp -r memgraph parent
cd memgraph
TIMEOUT=600 ./init
doxygen Doxyfile
cd build
cmake -DTOOLS=ON -DTEST_COVERAGE=ON ..
TIMEOUT=1000 make -j$THREADS
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress
cd ../../parent
git checkout HEAD~1
TIMEOUT=600 ./init
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../../memgraph/tools
TIMEOUT=300 ./setup
- name: Debug build
project: ^mg-master-debug$
commands: |
TIMEOUT=600 ./init
doxygen Doxyfile
cd build
cmake -DTOOLS=ON ..
TIMEOUT=1000 make -j$THREADS
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../tools
TIMEOUT=300 ./setup
# release build is the default one
- name: Release build
commands: |
TIMEOUT=600 ./init
doxygen Doxyfile
cd build
cmake -DTOOLS=ON -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF ..
TIMEOUT=1000 make -j$THREADS
# Create a binary package (which can then be used for Docker image).
mkdir output
cd output
cpack -D CPACK_SET_DESTDIR=ON -G TGZ --config ../CPackConfig.cmake
cpack -G DEB --config ../CPackConfig.cmake
# Create user technical documentation for community site
cd ../../docs/user_technical
#./bundle_community
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../../tools
TIMEOUT=300 ./setup
# Link build to build_release
cd ..
ln -s build build_release

72
tests/apollo_runs.py Executable file
View File

@ -0,0 +1,72 @@
#!/usr/bin/env python3
import json
import os
import re
import subprocess
# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
WORKSPACE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "..", ".."))
TESTS_DIR_REL = os.path.join("..", "build", "tests")
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
# generation mode
mode = "release"
if os.environ.get("PROJECT", "") == "mg-master-diff":
mode = "diff"
# ctest tests
ctest_output = subprocess.run(["ctest", "-N"], cwd=TESTS_DIR, check=True,
stdout=subprocess.PIPE).stdout.decode("utf-8")
tests = []
# test ordering: first unit, then concurrent, then everything else
CTEST_ORDER = {"unit": 0, "concurrent": 1}
CTEST_DELIMITER = "__"
for row in ctest_output.split("\n"):
# Filter rows only containing tests.
if not re.match("^\s*Test\s+#", row): continue
test_name = row.split(":")[1].strip()
name = test_name.replace("memgraph" + CTEST_DELIMITER, "")
path = os.path.join(TESTS_DIR_REL, name.replace(CTEST_DELIMITER, "/", 1))
order = CTEST_ORDER.get(name.split(CTEST_DELIMITER)[0], len(CTEST_ORDER))
tests.append((order, name, path))
tests.sort()
runs = []
for test in tests:
order, name, path = test
dirname, basename = os.path.split(path)
cmakedir = os.path.join("CMakeFiles",
"memgraph" + CTEST_DELIMITER + name + ".dir")
files = [basename, cmakedir]
# extra files for specific tests
if name == "unit__fswatcher":
files.append(os.path.join("..", "data"))
# skip benchmark tests on diffs
if name.startswith("benchmark") and mode == "diff":
continue
# larger timeout for benchmark tests
prefix = ""
if name.startswith("benchmark"):
prefix = "TIMEOUT=600 "
outfile_paths = []
if name.startswith("unit"):
cmakedir_abs = os.path.join(TESTS_DIR, "unit", cmakedir)
cmakedir_rel = os.path.relpath(cmakedir_abs, WORKSPACE_DIR)
outfile_paths.append("\./" + cmakedir_rel.replace(".", "\\.") + ".+")
runs.append({
"name": name,
"cd": dirname,
"commands": prefix + "./" + basename,
"infiles": files,
"outfile_paths": outfile_paths,
})
print(json.dumps(runs, indent=4, sort_keys=True))

View File

@ -0,0 +1,61 @@
- name: macro_benchmark__query_suite
supervisor: ./harness
arguments: &QUERY_SUITE_ARGS >
QuerySuite MemgraphRunner
--groups aggregation 1000_create unwind_create dense_expand match
--no-strict --database-cpu-ids 1 --client-cpu-ids 2
infiles: &MACRO_BENCHMARK_INFILES
- . # current directory
- ../../build_release/memgraph # memgraph release binary
- ../../config # directory with config files
- ../../build_release/tests/macro_benchmark # macro benchmark client binaries
- ../../libs/postgresql/lib # postgresql libs dir (for client binaries)
outfile_paths: &MACRO_BENCHMARK_OUTFILE_PATHS
- \./memgraph/tests/macro_benchmark/\.harness_summary
- name: macro_benchmark__query_parallel_suite
supervisor: ./harness
arguments: &QUERY_PARALLEL_SUITE_ARGS >
QueryParallelSuite MemgraphRunner
--groups aggregation_parallel create_parallel bfs_parallel
--database-cpu-ids 1 2 3 4 5 6 7 8 9
--client-cpu-ids 10 11 12 13 14 15 16 17 18 19
--num-database-workers 9 --num-clients-workers 30
--no-strict
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: *MACRO_BENCHMARK_OUTFILE_PATHS
slave_group: &QUERY_PARALLEL_SUITE_SLAVE_GROUP remote_20c140g
- name: macro_benchmark_parent__query_suite
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
cd: ../../../parent/tests/macro_benchmark # all files are relative to the parent build
supervisor: ./harness
arguments: *QUERY_SUITE_ARGS
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: &MACRO_BENCHMARK_PARENT_OUTFILE_PATHS
- \./parent/tests/macro_benchmark/\.harness_summary
link_to_run: macro_benchmark__query_suite
- name: macro_benchmark_parent__query_parallel_suite
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
cd: ../../../parent/tests/macro_benchmark # all files are relative to the parent build
supervisor: ./harness
arguments: *QUERY_PARALLEL_SUITE_ARGS
infiles: *MACRO_BENCHMARK_INFILES
outfile_paths: *MACRO_BENCHMARK_PARENT_OUTFILE_PATHS
link_to_run: macro_benchmark__query_parallel_suite
slave_group: *QUERY_PARALLEL_SUITE_SLAVE_GROUP
- name: macro_benchmark_summary
project: ^mg-master-diff$ # regex to match only 'mg-master-diff'
type: data process
cd: ../../..
commands: >
./memgraph/tools/apollo/macro_benchmark_summary
--current macro_benchmark__query_suite/memgraph/tests/macro_benchmark/.harness_summary macro_benchmark__query_parallel_suite/memgraph/tests/macro_benchmark/.harness_summary
--previous macro_benchmark_parent__query_suite/parent/tests/macro_benchmark/.harness_summary macro_benchmark_parent__query_parallel_suite/parent/tests/macro_benchmark/.harness_summary
--output .harness_summary
infiles:
- memgraph/tools/apollo/macro_benchmark_summary
outfile_paths:
- \./.harness_summary

View File

@ -0,0 +1,15 @@
- name: public_benchmark__ldbc
project: release
commands: . continuous_integration
infiles:
- . # current directory
- ../../../build_release/memgraph # memgraph release binary
- ../../../config # directory with config files
- ../../../libs/neo4j # neo4j directory
- ../../../tools/mg_import_csv # memgraph csv import tool
- ../../../tools/plot_ldbc_latency # ldbc plot generation tool
outfile_paths: # TODO: maybe this should also accept relative paths?
- \./memgraph/tests/public_benchmark/ldbc/results/.+
- \./memgraph/tests/public_benchmark/ldbc/plots/.+
slave_group: remote_20c140g
enable_network: true

View File

@ -0,0 +1,8 @@
- name: quality_assurance
commands: TIMEOUT=300 ./continuous_integration
infiles:
- . # current directory
- ../../build/memgraph # memgraph debug binary
- ../../config # directory with config files
outfile_paths:
- \./memgraph/tests/qa/\.quality_assurance_status

View File

@ -0,0 +1,13 @@
- name: stress
commands: TIMEOUT=600 ./continuous_integration
infiles: &STRESS_INFILES
- . # current directory
- ../../build_release/memgraph # memgraph release binary
- ../../config # directory with config files
- ../../build_release/tests/stress # stress client binaries
- name: stress_large
project: release
commands: TIMEOUT=43200 ./continuous_integration --large-dataset
infiles: *STRESS_INFILES
slave_group: remote_16c56g

View File

@ -0,0 +1,10 @@
- name: cppcheck
commands: TIMEOUT=2400 ./cppcheck
infiles:
- cppcheck # cppcheck script
- ../../src # src source dir
- ../../tests # tests source dir
- ../../poc # poc source dir
- ../../.git # git directory (used to find out changed files in commit)
outfile_paths:
- \./memgraph/tools/apollo/\.cppcheck_errors

View File

@ -1,27 +1 @@
# WARNING: do not run this script without defining THREADS!
# If THREADS isn't defined then this script will call 'make -j'.
# From the manpage: "If the -j option is given without an argument, make will not limit the number of jobs that can run simultaneously."
# That means that the whole build will be started simultaneously and IT WILL CRASH YOUR COMPUTER!
cd ../..
TIMEOUT=600 ./init
bash -c "doxygen Doxyfile >/dev/null 2>/dev/null"
cd build
cmake -DTOOLS=ON ..
TIMEOUT=1000 make -j$THREADS
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../tools
TIMEOUT=300 ./setup
cd apollo
TIMEOUT=300 ./generate debug

View File

@ -1,40 +1 @@
# WARNING: do not run this script without defining THREADS!
# If THREADS isn't defined then this script will call 'make -j'.
# From the manpage: "If the -j option is given without an argument, make will not limit the number of jobs that can run simultaneously."
# That means that the whole build will be started simultaneously and IT WILL CRASH YOUR COMPUTER!
cd ../../..
cp -r memgraph parent
cd memgraph
TIMEOUT=600 ./init
bash -c "doxygen Doxyfile >/dev/null 2>/dev/null"
cd build
cmake -DTOOLS=ON -DTEST_COVERAGE=ON ..
TIMEOUT=1000 make -j$THREADS
cd ..
mkdir build_release
cd build_release
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark memgraph__stress
cd ../../parent
git checkout HEAD~1
TIMEOUT=600 ./init
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
TIMEOUT=1000 make -j$THREADS memgraph memgraph__macro_benchmark
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../../memgraph/tools
TIMEOUT=300 ./setup
cd apollo
TIMEOUT=300 ./generate diff

View File

@ -1,30 +1 @@
# WARNING: do not run this script without defining THREADS!
# If THREADS isn't defined then this script will call 'make -j'.
# From the manpage: "If the -j option is given without an argument, make will not limit the number of jobs that can run simultaneously."
# That means that the whole build will be started simultaneously and IT WILL CRASH YOUR COMPUTER!
cd ../..
TIMEOUT=600 ./init
bash -c "doxygen Doxyfile >/dev/null 2>/dev/null"
cd build
cmake -DTOOLS=ON -DCMAKE_BUILD_TYPE=Release -DUSE_READLINE=OFF ..
TIMEOUT=1000 make -j$THREADS
# Create a binary package (which can then be used for Docker image).
mkdir output
cd output
cpack -D CPACK_SET_DESTDIR=ON -G TGZ --config ../CPackConfig.cmake
cpack -G DEB --config ../CPackConfig.cmake
# Create user technical documentation for community site
cd ../../docs/user_technical
./bundle_community
# Install tools, because they may be needed to run some benchmarks and tests.
cd ../../tools
TIMEOUT=300 ./setup
cd apollo
TIMEOUT=300 ./generate release

View File

@ -6,10 +6,9 @@ cd "$DIR/../../"
tmpfile="$DIR/.cppcheck_errors.tmp"
errfile="$DIR/.cppcheck_errors"
if [ "$1" == "" ]; then
mode=diff
else
mode=$1
mode=diff
if echo "$PROJECT" | grep "release" >/dev/null 2>/dev/null; then
mode=release
fi
if [ "$mode" == diff ]; then

39
tools/tests/apollo_runs.py Executable file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import json
import os
import re
import subprocess
# paths
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
TESTS_DIR_REL = os.path.join("..", "..", "build", "tools", "tests")
TESTS_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, TESTS_DIR_REL))
# ctest tests
ctest_output = subprocess.run(["ctest", "-N"], cwd=TESTS_DIR, check=True,
stdout=subprocess.PIPE).stdout.decode("utf-8")
runs = []
# test ordering: first unit, then concurrent, then everything else
for row in ctest_output.split("\n"):
# Filter rows only containing tests.
if not re.match("^\s*Test\s+#", row): continue
name = row.split(":")[1].strip()
path = os.path.join(TESTS_DIR_REL, name)
dirname, basename = os.path.split(path)
files = [basename, "CTestTestfile.cmake"]
# extra files for specific tests
if name == "test_mg_import_csv":
files.extend(["csv", "mg_recovery_check", "../src/mg_import_csv"])
runs.append({
"name": "tools__" + name,
"cd": dirname,
"commands": "ctest --output-on-failure -R \"^{}$\"".format(name),
"infiles": files,
})
print(json.dumps(runs, indent=4, sort_keys=True))