Add HA normal operation long running test to daily build

Summary: Currently set to run for 2 hours.

Reviewers: mferencevic

Reviewed By: mferencevic

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D2206
This commit is contained in:
Ivan Paljak 2019-07-18 16:49:47 +02:00
parent 10136f43dc
commit fa4c303af6
3 changed files with 10 additions and 3 deletions

View File

@ -19,13 +19,18 @@
commands: TIMEOUT=43200 ./continuous_integration --large-dataset commands: TIMEOUT=43200 ./continuous_integration --large-dataset
infiles: *STRESS_INFILES infiles: *STRESS_INFILES
- name: stress_ha - name: stress_ha_normal_operation
commands: TIMEOUT=200 ./continuous_integration_ha commands: TIMEOUT=200 ./continuous_integration_ha
infiles: &STRESS_HA_INFILES infiles: &STRESS_HA_INFILES
- . # current directory - . # current directory
- ../../build_release/memgraph_ha # memgraph release binary - ../../build_release/memgraph_ha # memgraph release binary
- ../../build_release/tests/stress/ # stress client binaries - ../../build_release/tests/stress/ # stress client binaries
- name: stress_ha_normal_opration_large
project: release
commands: TIMEOUT=7500 ./continuous_integration_ha --large-dataset
infiles: *STRESS_HA_INFILES
- name: durability - name: durability
commands: TIMEOUT=300 ./ve3/bin/python3 durability --num-steps 5 commands: TIMEOUT=300 ./ve3/bin/python3 durability --num-steps 5
infiles: &DURABILITY_INFILES infiles: &DURABILITY_INFILES

View File

@ -32,7 +32,7 @@ SMALL_DATASET = [
LARGE_DATASET = [ LARGE_DATASET = [
{ {
"test": "long_running.cpp", "test": "long_running.cpp",
"options": ["--vertex-count", "200000", "--edge-count", "1000000", "--max-time", "120", "--verify", "300"], "options": ["--vertex-count", "10000", "--edge-count", "50000", "--max-time", "120", "--verify", "300"],
"timeout": 140, "timeout": 140,
}, },
] ]
@ -169,6 +169,8 @@ def cleanup():
proc_mg.kill() proc_mg.kill()
proc_mg.wait() proc_mg.wait()
time.sleep(3)
# run tests # run tests
runtimes = {} runtimes = {}
dataset = LARGE_DATASET if args.large_dataset else SMALL_DATASET dataset = LARGE_DATASET if args.large_dataset else SMALL_DATASET

View File

@ -3,5 +3,5 @@
"election_timeout_max": 1000, "election_timeout_max": 1000,
"heartbeat_interval": 100, "heartbeat_interval": 100,
"replication_timeout": 10000, "replication_timeout": 10000,
"log_size_snapshot_threshold": -1 "log_size_snapshot_threshold": 500
} }