Remove extension from Python executables

Reviewers: mferencevic, buda

Reviewed By: buda

Subscribers: mislav.bradac, pullbot

Differential Revision: https://phabricator.memgraph.io/D988
This commit is contained in:
Dominik Gleich 2017-11-16 14:29:19 +01:00
parent 26aea646c7
commit 73c1206e81
14 changed files with 22 additions and 18 deletions

View File

@ -27,8 +27,8 @@ enable_testing()
add_subdirectory(tests)
# copy test scripts into the build/ directory (for distributed tests)
configure_file(${PROJECT_SOURCE_DIR}/tests/start_distributed.py
${PROJECT_BINARY_DIR}/tests/start_distributed.py COPYONLY)
configure_file(${PROJECT_SOURCE_DIR}/tests/start_distributed
${PROJECT_BINARY_DIR}/tests/start_distributed COPYONLY)
configure_file(${PROJECT_SOURCE_DIR}/tests/config
${PROJECT_BINARY_DIR}/tests/config COPYONLY)

View File

@ -136,7 +136,7 @@ class Master : public Reactor {
if (workers_seen == static_cast<int64_t>(worker_mnids_.size())) {
subscription.Unsubscribe();
// Sleep for a while so we can read output in the terminal.
// (start_distributed.py runs each process in a new tab which is
// (start_distributed runs each process in a new tab which is
// closed immediately after process has finished)
std::this_thread::sleep_for(std::chrono::seconds(4));
CloseChannel("main");
@ -146,12 +146,13 @@ class Master : public Reactor {
// send a TextMessage to each worker
for (auto wmnid : worker_mnids_) {
auto stream = memgraph.FindChannel(wmnid, "worker", "main");
stream->OnEventOnce().ChainOnce<ChannelResolvedMessage>([this, stream](
const ChannelResolvedMessage &msg, const Subscription &) {
msg.channelWriter()->Send<TextMessage>("master", "main",
"hi from master");
stream->Close();
});
stream->OnEventOnce().ChainOnce<ChannelResolvedMessage>(
[this, stream](const ChannelResolvedMessage &msg,
const Subscription &) {
msg.channelWriter()->Send<TextMessage>("master", "main",
"hi from master");
stream->Close();
});
}
}

View File

@ -1,3 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Automatically copied to the build/ directory during Makefile (configured by cmake)
import os

View File

@ -33,7 +33,7 @@
cd memgraph/tests/public_benchmark/ldbc
source ve3/bin/activate
./index_creation.py ldbc-snb-impls/snb-interactive-neo4j/scripts/indexCreation.neo4j
./index_creation ldbc-snb-impls/snb-interactive-neo4j/scripts/indexCreation.neo4j
## Where is and how to use LDBC plotting?

View File

@ -156,7 +156,7 @@ LDBC_DEFAULT_PROPERTIES = \
def create_index(port, database):
index_file = os.path.join(SCRIPT_DIR, 'ldbc-snb-impls',
'snb-interactive-neo4j', 'scripts', 'indexCreation.neo4j')
subprocess.check_call(('ve3/bin/python3', 'index_creation.py', '--port',
subprocess.check_call(('ve3/bin/python3', 'index_creation', '--port',
port, '--database', database, index_file),
cwd=SCRIPT_DIR)
time.sleep(1.0)

0
tests/stress/bipartite.py Executable file → Normal file
View File

View File

@ -11,8 +11,8 @@ import sys
import time
# dataset calibrated for running on Apollo (total 4min)
# bipartite runs for approx. 30s
# create_match runs for approx. 30s
# bipartite.py runs for approx. 30s
# create_match.py runs for approx. 30s
# long_running runs for 1min
# long_running runs for 2min
SMALL_DATASET = [
@ -39,7 +39,7 @@ SMALL_DATASET = [
]
# dataset calibrated for running on daily stress instance (total 9h)
# bipartite and create_match run for approx. 15min
# bipartite.py and create_match.py run for approx. 15min
# long_running runs for 5min x 6 times = 30min
# long_running runs for 8h
LARGE_DATASET = [

0
tests/stress/create_match.py Executable file → Normal file
View File

View File

@ -211,7 +211,7 @@ postgresql_lib_dir = os.path.join(LIBS_DIR, "postgresql", "lib")
infile = create_archive("macro_benchmark", [binary_release_path,
binary_release_link_path, macro_bench_path, config_path,
harness_client_binaries, postgresql_lib_dir], cwd = WORKSPACE_DIR)
supervisor = "./memgraph/tests/macro_benchmark/harness.py"
supervisor = "./memgraph/tests/macro_benchmark/harness"
outfile_paths = "\./memgraph/tests/macro_benchmark/\.harness_summary"
RUNS.append(generate_run("macro_benchmark__query_suite",
supervisor = supervisor,
@ -242,7 +242,7 @@ if mode == "diff":
binary_parent_link_path, parent_macro_bench_path, parent_config_path,
parent_harness_client_binaries, parent_postgresql_lib_dir],
cwd = WORKSPACE_DIR)
supervisor = "./parent/tests/macro_benchmark/harness.py"
supervisor = "./parent/tests/macro_benchmark/harness"
args = MACRO_BENCHMARK_ARGS + " --RunnerBin " + binary_parent_path
outfile_paths = "\./parent/tests/macro_benchmark/\.harness_summary"
RUNS.append(generate_run("macro_benchmark_parent__query_suite",

View File

@ -1,13 +1,13 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
import sys
import os
# hackish way to resuse existing start code
sys.path.append("../tests/macro_benchmark/")
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +
"/../tests/macro_benchmark/")
from databases import *
from clients import *
from common import get_absolute_path