Cleanup some errors in ldbc setup scripts

Summary:
Don't require setup_system to run as root, nor apt-get
Implement command_fail for ldbc/setup_dependencies
ldbc.setup_dataset: Find Java on ArchLinux

Reviewers: buda, mferencevic

Reviewed By: buda

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D729
This commit is contained in:
Teon Banek 2017-08-30 14:56:32 +02:00
parent 26297ca641
commit 77e574fcc5
4 changed files with 36 additions and 23 deletions

View File

@ -3,7 +3,7 @@
## How to run the benchmark against Neo4j OR Memgraph?
cd memgraph/tests/public_benchmark/ldbc
sudo ./setup_system
./setup_system
./setup_dependencies
./setup_dataset [--scale-factor 1]
./neo [--run] OR ./mg [--run]

View File

@ -32,6 +32,7 @@ do
shift # past argument or value
done
echo "Using scale_factor" $scale_factor
# Prepare the folder structure.
dataset_folder_prefix="neo4j_csv_dataset"
dataset_folder="${script_dir}/${dataset_folder_prefix}_scale_${scale_factor}"
@ -50,7 +51,15 @@ EOF
ldbc_snb_datagen_folder=${script_dir}/ldbc_snb_datagen
cd ${ldbc_snb_datagen_folder}
export HADOOP_OPTS="$HADOOP_OPTS -Xmx10240M"
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre
if [[ -d "/usr/lib/jvm/default-java/jre" ]]; then
export JAVA_HOME=/usr/lib/jvm/default-java/jre
elif [[ -d "/usr/lib/jvm/default-runtime/" ]]; then
export JAVA_HOME=/usr/lib/jvm/default-runtime/
else
echo "Unable to find JRE under /usr/lib/jvm"
exit 1
fi
echo "Using JAVA_HOME" $JAVA_HOME
HADOOP_HOME=/usr/local/hadoop LDBC_SNB_DATAGEN_HOME=${ldbc_snb_datagen_folder} ./run.sh
# Transform the dataset into Neo4j CSV format.

View File

@ -2,7 +2,11 @@
# Setup all dependencies
set -e
function command_fail {
echo $1
exit 1
}
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Setup ldbc_snb_datagen
@ -13,19 +17,23 @@ git clone https://github.com/ldbc/ldbc_snb_datagen
cd ${script_dir}
git clone https://github.com/ldbc/ldbc_driver.git
cd ${script_dir}/ldbc_driver
mvn clean package -DskipTests
mvn install -DskipTests
mvn clean package -DskipTests || exit 1
mvn install -DskipTests || exit 1
# Setup ldbc-snb-impls
cd ${script_dir}
git clone https://phabricator.memgraph.io/source/ldbc-snb-impls.git
cp ${script_dir}/ldbc-snb-impls-pom.xml ${script_dir}/ldbc-snb-impls/pom.xml
cd ${script_dir}/ldbc-snb-impls
mvn install
mvn install || exit 1
# Use set -e after we have called git clone, to avoid exiting if we already
# cloned something.
set -e
# Setup python virtual environment & Install dependencies
cd ${script_dir}
if ! which virtualenv >/dev/null; then
if ! which virtualenv > /dev/null 2>&1; then
command_fail "Please install virtualenv!"
fi
if [ ! -d "ve3" ]; then

View File

@ -2,26 +2,22 @@
# System setup (root access is required)
# Make sure only root can run our script
# TODO: remove sudo requirement
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
# Working directories
set -e
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
mkdir -p ${script_dir}/tmp
# Install OS packages
# TODO: sudo is required here (remove)
cd ${script_dir}/tmp
wget -O - http://debian.neo4j.org/neotechnology.gpg.key >> key.pgp
apt-key add key.pgp
echo 'deb http://debian.neo4j.org/repo stable/' | tee -a /etc/apt/sources.list.d/neo4j.list > /dev/null
apt-get update
apt-get install -y maven default-jdk neo4j
if which apt-get > /dev/null 2>&1; then
wget -O - http://debian.neo4j.org/neotechnology.gpg.key >> key.pgp || exit 1
sudo apt-key add key.pgp || exit 1
sudo echo 'deb http://debian.neo4j.org/repo stable/' | tee -a /etc/apt/sources.list.d/neo4j.list > /dev/null || exit 1
sudo apt-get update || exit 1
sudo apt-get install -y maven default-jdk neo4j || exit 1
else
echo "Assuming that 'maven', 'jdk' and 'neo4j' are installed"
fi
# Install Hadoop
cd ${script_dir}/tmp
@ -29,10 +25,10 @@ hadoop_version="hadoop-2.7.3"
hadoop_tar="${hadoop_version}.tar.gz"
hadoop_url="http://apache.mirrors.tds.net/hadoop/common/${hadoop_version}/${hadoop_tar}"
wget ${hadoop_url}
tar -xzvf ${hadoop_tar}
rm -rf /usr/local/hadoop/${hadoop_version}
tar -xzf ${hadoop_tar}
# TODO: root access is required here -> run hadoop under a current user
mv ${hadoop_version} /usr/local/hadoop
echo "Moving hadoop to /usr/local/hadoop"
sudo mv ${hadoop_version} /usr/local/hadoop
# Performance Setup
# echo performance | sudo tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor >/dev/null