Migrate to toolchain-v2 ()

* GCC_VERSION=10.2.0
* BINUTILS_VERSION=2.35.1
* GDB_VERSION=10.1 (except centos-7 8.3)
* CMAKE_VERSION=3.18.4
* CPPCHECK_VERSION=2.2
* LLVM_VERSION=11.0.0
* SWIG_VERSION=4.0.2
This commit is contained in:
Marko Budiselić 2020-11-12 20:18:11 +01:00 committed by GitHub
parent f0382c82cd
commit 958bc870b3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 1514 additions and 358 deletions
.clang-tidy
.github/workflows
CMakeLists.txt
docs/dev
environment
init
libs
query_modules/louvain/src
src
tests
integration/mg_import_csv/tests
array_types
array_types_multi_char_array_delimiter
bad_relationships
csv_parser_test10
csv_parser_test12
csv_parser_test13
csv_parser_test15
csv_parser_test17
csv_parser_test18
csv_parser_test2
csv_parser_test20
csv_parser_test21
csv_parser_test23
csv_parser_test3
csv_parser_test5
csv_parser_test6
csv_parser_test8
data_split_into_multiple_files
extra_columns_relationships
field_types
ignored_columns
multi_char_quote_and_node_label
neo_example
node_id
relationship_properties
single_char_quote_and_relationship_type
unit
tools

View File

@ -36,6 +36,9 @@ Checks: '*,
-hicpp-use-equals-default,
-hicpp-vararg,
-llvm-header-guard,
-llvmlibc-callee-namespace,
-llvmlibc-implementation-in-namespace,
-llvmlibc-restrict-system-libc-headers,
-misc-non-private-member-variables-in-classes,
-misc-unused-parameters,
-modernize-avoid-c-arrays,

View File

@ -24,7 +24,7 @@ jobs:
- name: Build community binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -37,7 +37,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -65,7 +65,7 @@ jobs:
- name: Build coverage binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -78,7 +78,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -87,7 +87,7 @@ jobs:
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Compute code coverage.
cd tools/github
@ -120,7 +120,7 @@ jobs:
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -133,7 +133,7 @@ jobs:
- name: Run leftover CTest tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run leftover CTest tests (all except unit and benchmark tests).
cd build
@ -165,7 +165,7 @@ jobs:
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run cppcheck and clang-format.
cd tools/github
@ -209,7 +209,7 @@ jobs:
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -222,7 +222,7 @@ jobs:
- name: Build parent binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
cd ../parent

View File

@ -24,7 +24,7 @@ jobs:
- name: Build community binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -37,7 +37,7 @@ jobs:
- name: Create community DEB package
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Create community DEB package.
cd build
@ -53,7 +53,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -86,7 +86,7 @@ jobs:
- name: Build coverage binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -99,7 +99,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -108,7 +108,7 @@ jobs:
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Compute code coverage.
cd tools/github
@ -141,7 +141,7 @@ jobs:
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -154,7 +154,7 @@ jobs:
- name: Run leftover CTest tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run leftover CTest tests (all except unit and benchmark tests).
cd build
@ -186,7 +186,7 @@ jobs:
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run cppcheck and clang-format.
cd tools/github
@ -216,7 +216,7 @@ jobs:
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -229,7 +229,7 @@ jobs:
- name: Create enterprise DEB package
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Create enterprise DEB package.
cd build
@ -245,7 +245,7 @@ jobs:
- name: Run micro benchmark tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run micro benchmark tests.
cd build

View File

@ -21,7 +21,7 @@ jobs:
- name: Build community binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -34,7 +34,7 @@ jobs:
- name: Create community RPM package
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Create community RPM package.
cd build
@ -51,7 +51,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -84,7 +84,7 @@ jobs:
- name: Build coverage binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -97,7 +97,7 @@ jobs:
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run unit tests.
cd build
@ -106,7 +106,7 @@ jobs:
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Compute code coverage.
cd tools/github
@ -139,7 +139,7 @@ jobs:
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -152,7 +152,7 @@ jobs:
- name: Run leftover CTest tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run leftover CTest tests (all except unit and benchmark tests).
cd build
@ -184,7 +184,7 @@ jobs:
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run cppcheck and clang-format.
cd tools/github
@ -214,7 +214,7 @@ jobs:
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Initialize dependencies.
./init
@ -227,7 +227,7 @@ jobs:
- name: Create enterprise RPM package
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Create enterprise RPM package.
cd build
@ -244,7 +244,7 @@ jobs:
- name: Run micro benchmark tests
run: |
# Activate toolchain.
source /opt/toolchain-v1/activate
source /opt/toolchain-v2/activate
# Run micro benchmark tests.
cd build

View File

@ -12,6 +12,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-2:
name: "HP-DL360G6-2"
@ -22,6 +23,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-3:
name: "HP-DL360G6-3"
@ -32,6 +34,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-1:
@ -43,6 +46,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-2:
name: "HP-DL360G6-v2-2"
@ -53,6 +57,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-3:
name: "HP-DL360G6-v2-3"
@ -63,6 +68,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-4:
name: "HP-DL360G6-v2-4"
@ -73,6 +79,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-5:
name: "HP-DL360G6-v2-5"
@ -83,6 +90,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-6:
name: "HP-DL360G6-v2-6"
@ -93,6 +101,7 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system
HP-DL360G6-v2-7:
name: "HP-DL360G6-v2-7"
@ -103,4 +112,5 @@ jobs:
uses: actions/checkout@v2
- name: Check the system
run: |
source /opt/toolchain-v2/activate
./tools/check-build-system

View File

@ -151,18 +151,21 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# c99-designator is disabled because of required mixture of designated and
# non-designated initializers in Python Query Module code (`py_module.cpp`).
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall \
-Werror=switch -Werror=switch-bool -Werror=return-type \
-Werror=return-stack-address")
-Werror=return-stack-address \
-Wno-c99-designator")
# Don't omit frame pointer in RelWithDebInfo, for additional callchain debug.
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO
"${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-omit-frame-pointer")
# Statically link libgcc and libstdc++, the GCC allows this according to:
# https://gcc.gnu.org/onlinedocs/gcc-8.3.0/libstdc++/manual/manual/license.html
# https://gcc.gnu.org/onlinedocs/gcc-10.2.0/libstdc++/manual/manual/license.html
# https://www.gnu.org/licenses/gcc-exception-faq.html
# Last checked for gcc-8.3 which we are using on the build machines.
# Last checked for gcc-10.2 which we are using on the build machines.
# ** If we change versions, recheck this! **
# ** Static linking is allowed only for executables! **
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")

View File

@ -10,9 +10,7 @@ on your machine before you can download the source code.
On Debian systems, you can do it inside a terminal with the following
command:
sudo apt-get install git
On ArchLinux or Gentoo, you probably already know what to do.
apt install git
After installing `git`, you are now ready to fetch your own copy of Memgraph
source code. Run the following command:
@ -32,19 +30,21 @@ In your terminal, position yourself in the obtained memgraph directory.
### Installing Dependencies
On Debian systems, dependencies that are required by the codebase should be
setup by running the `init` script:
./init -s
Currently, other systems aren't supported in the `init` script. But you can
issue the needed steps manually. First run the `init` script.
Dependencies that are required by the codebase should be checked by running the
`init` script:
./init
The script will output the required packages, which you should be able to
install via your favorite package manager. For example, `pacman` on ArchLinux.
After installing the packages, issue the following commands:
If the script fails, dependencies installation scripts could be found under
`environment/os/`. The directory contains dependencies management script for
each supported operating system. E.g. if your system is **Debian 10**, run the
following to install all required build packages:
./environment/os/debian-10.sh install MEMGRAPH_BUILD_DEPS
Once everything is installed, rerun the `init` script.
Once the `init` script is successfully finished, issue the following commands:
mkdir -p build
./libs/setup.sh
@ -52,19 +52,17 @@ After installing the packages, issue the following commands:
### Compiling
Memgraph is compiled using our own custom toolchain that can be obtained from
[Toolchain
repository](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain). You
should read the `README.txt` file in the repository and install the apropriate
toolchain for your distribution. After you have installed the toolchain you
should read the instructions for the toolchain in the toolchain install
directory (`/opt/toolchain-vXYZ/README.md`) and install dependencies that are
necessary to run the toolchain.
the toolchain repository. You should read the `environment/README.txt` file
in the repository and install the apropriate toolchain for your distribution.
After you have installed the toolchain you should read the instructions for the
toolchain in the toolchain install directory (`/opt/toolchain-vXYZ/README.md`)
and install dependencies that are necessary to run the toolchain.
When you want to compile Memgraph you should activate the toolchain using the
prepared toolchain activation script that is also described in the toolchain
`README`.
NOTE: You *must* activate the toolchain every time you want to compile
NOTE: You **must** activate the toolchain every time you want to compile
Memgraph!
You should now activate the toolchain in your console.

44
environment/README.md Normal file
View File

@ -0,0 +1,44 @@
# Memgraph Build and Run Environments
## Toolchain Installation Procedure
1) Download the toolchain for your operating system from one of the following
links (current active toolchain is `toolchain-v2`):
* [CentOS 7](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-centos-7.tar.gz)
* [CentOS 8](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-centos-8.tar.gz)
* [Debian 9](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-debian-9.tar.gz)
* [Debian 10](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-debian-10.tar.gz)
* [Ubuntu 18.04](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-ubuntu-18.04.tar.gz)
* [Ubuntu 20.04](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain-v2/toolchain-v2-binaries-ubuntu-20.04.tar.gz)
2) Extract the toolchain with the following command:
```bash
tar xzvf {{toolchain-archive}}.tar.gz -C /opt
```
3) Check and install required toolchain runtime dependencies by executing
(e.g., on **Debian 10**):
```bash
./environment/os/debian-10.sh check TOOLCHAIN_RUN_DEPS
./environment/os/debian-10.sh install TOOLCHAIN_RUN_DEPS
```
4) Activate the toolchain:
```bash
source /opt/toolchain-v2/activate
```
## Toolchain Upgrade Procedure
1) Build a new toolchain for each supported OS (latest versions).
2) If the new toolchain doesn't compile on some supported OS, the last
compilable toolchain has to be used instead. In other words, the project has
to compile on the oldest active toolchain as well. Suppose some
changes/improvements were added when migrating to the latest toolchain; in
that case, the maintainer has to ensure that the project still compiles on
previous toolchains (everything from `init` script to the actual code has to
work on all supported operating systems).

3
environment/os/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
*.deb
*.rpm
*.tar.gz

127
environment/os/centos-7.sh Executable file
View File

@ -0,0 +1,127 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel libipt-devel libbabeltrace-devel xz-devel python3-devel # gdb
texinfo # gdb
libcurl-devel # cmake
readline-devel # cmake and llvm
libffi-devel libxml2-devel perl-Digest-MD5 # llvm
libedit-devel pcre-devel automake bison # swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat libipt libbabeltrace xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconfig # build system
curl wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
sbcl # for custom Lisp C++ preprocessing
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which mono-complete dotnet-sdk-3.1 golang nodejs zip unzip java-11-openjdk-devel # for driver tests
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == git ]; then
if ! which "git" >/dev/null; then
missing="git $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
if [ "$SUDO_USER" == "" ]; then
echo "Please run as sudo."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
yum install -y epel-release
yum update -y
yum install -y wget git python3 python3-pip
for pkg in $1; do
if [ "$pkg" == libipt ]; then
if ! yum list installed libipt >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! yum list installed dotnet-sdk-3.1 >/dev/null 2>/dev/null; then
wget -nv https://packages.microsoft.com/config/centos/7/packages-microsoft-prod.rpm -O packages-microsoft-prod.rpm
rpm -Uvh https://packages.microsoft.com/config/centos/7/packages-microsoft-prod.rpm
yum update -y
yum install -y dotnet-sdk-3.1
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
continue
fi
yum install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

144
environment/os/centos-8.sh Executable file
View File

@ -0,0 +1,144 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel libipt-devel libbabeltrace-devel xz-devel python36-devel texinfo # for gdb
libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat libipt libbabeltrace xz-libs python36 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconf-pkg-config # build system
curl wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python36-devel # for query modules
openssl-devel
libseccomp-devel
python36 python3-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it manually using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which mono-complete dotnet-sdk-3.1 nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
if [ "$SUDO_USER" == "" ]; then
echo "Please run as sudo."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf install -y epel-release
dnf config-manager --set-enabled PowerTools # Required to install texinfo.
dnf update -y
dnf install -y wget git python36 python3-pip
for pkg in $1; do
if [ "$pkg" == libipt ]; then
if ! dnf list installed libipt >/dev/null 2>/dev/null; then
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
# Install GDB dependencies not present in the standard repos.
# https://bugs.centos.org/view.php?id=17068
# https://centos.pkgs.org
# Since 2020, there is Babeltrace2 (https://babeltrace.org). Not used
# within GDB yet (an assumption).
if [ "$pkg" == libbabeltrace-devel ]; then
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libbabeltrace-devel-1.5.4-2.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == sbcl ]; then
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
fi
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
fi
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dnf list installed dotnet-sdk-3.1 >/dev/null 2>/dev/null; then
wget -nv https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm -O packages-microsoft-prod.rpm
rpm -Uvh https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm
dnf update -y
dnf install -y dotnet-sdk-3.1
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
continue
fi
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

86
environment/os/debian-10.sh Executable file
View File

@ -0,0 +1,86 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
libreadline7 # for cmake and llvm
libffi6 libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang nodejs npm
)
list() {
echo "$1"
}
check() {
check_all_dpkg "$1"
}
install() {
cat >/etc/apt/sources.list <<EOF
deb http://deb.debian.org/debian/ buster main non-free contrib
deb-src http://deb.debian.org/debian/ buster main non-free contrib
deb http://deb.debian.org/debian/ buster-updates main contrib non-free
deb-src http://deb.debian.org/debian/ buster-updates main contrib non-free
deb http://security.debian.org/debian-security buster/updates main contrib non-free
deb-src http://security.debian.org/debian-security buster/updates main contrib non-free
EOF
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-3.1
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

55
environment/os/debian-9.sh Executable file
View File

@ -0,0 +1,55 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev libbabeltrace-ctf-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt1 libbabeltrace1 libbabeltrace-ctf1 liblzma5 python3 # for gdb
libcurl3 # for cmake
libreadline7 # for cmake and llvm
libffi6 libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
python3 python-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # for driver tests
)
list() {
echo "$1"
}
check() {
check_all_dpkg "$1"
}
install() {
install_all_apt "$1"
}
deps=$2"[*]"
"$1" "${!deps}"

30
environment/os/template.sh Executable file
View File

@ -0,0 +1,30 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
pkg
)
TOOLCHAIN_RUN_DEPS=(
pkg
)
MEMGRAPH_BUILD_DEPS=(
pkg
)
list() {
echo "$1"
}
check() {
echo "TODO: Implement ${FUNCNAME[0]}."
exit 1
}
install() {
echo "TODO: Implement ${FUNCNAME[0]}."
exit 1
}
# http://ahmed.amayem.com/bash-indirect-expansion-exploration
deps=$2"[*]"
"$1" "${!deps}"

56
environment/os/ubuntu-18.04.sh Executable file
View File

@ -0,0 +1,56 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # archive download
gnupg # archive signature verification
tar gzip bzip2 xz-utils unzip # archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev # gdb
texinfo # gdb
libcurl4-openssl-dev # cmake
libreadline-dev # cmake and llvm
libffi-dev libxml2-dev # llvm
libedit-dev libpcre3-dev automake bison # swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt1 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
libreadline7 # for cmake and llvm
libffi6 libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
python3 virtualenv python3-virtualenv python3-pip # qa, macro bench and stress tests
python3-yaml # the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # driver tests
)
list() {
echo "$1"
}
check() {
check_all_dpkg "$1"
}
install() {
apt install -y $1
}
deps=$2"[*]"
"$1" "${!deps}"

78
environment/os/ubuntu-20.04.sh Executable file
View File

@ -0,0 +1,78 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
libreadline8 # for cmake and llvm
libffi7 libxml2 # for llvm
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang nodejs npm
)
list() {
echo "$1"
}
check() {
check_all_dpkg "$1"
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s dotnet-sdk-3.1 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-3.1
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

533
environment/toolchain/v2.sh Executable file
View File

@ -0,0 +1,533 @@
#!/bin/bash -e
# helpers
pushd () { command pushd "$@" > /dev/null; }
popd () { command popd "$@" > /dev/null; }
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../util.sh"
DISTRO="$(operating_system)"
# toolchain version
TOOLCHAIN_VERSION=2
# package versions used
GCC_VERSION=10.2.0
BINUTILS_VERSION=2.35.1
case "$DISTRO" in
centos-7) # because GDB >= 9 does NOT compile with readline6.
GDB_VERSION=8.3
;;
*)
GDB_VERSION=10.1
;;
esac
CMAKE_VERSION=3.18.4
CPPCHECK_VERSION=2.2
LLVM_VERSION=11.0.0
SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION
PREFIX=/opt/$NAME
mkdir -p $PREFIX >/dev/null 2>/dev/null || true
if [ ! -d $PREFIX ] || [ ! -w $PREFIX ]; then
echo "Please make sure that the directory '$PREFIX' exists and is writable by the current user!"
echo
echo "If unsure, execute these commands as root:"
echo " mkdir $PREFIX && chown $USER:$USER $PREFIX"
echo
echo "Press <return> when you have created the directory and granted permissions."
# wait for the directory to be created
while true; do
read
if [ ! -d $PREFIX ] || [ ! -w $PREFIX ]; then
echo
echo "You can't continue before you have created the directory and granted permissions!"
echo
echo "Press <return> when you have created the directory and granted permissions."
else
break
fi
done
fi
# create archives directory
mkdir -p archives
# download all archives
pushd archives
if [ ! -f gcc-$GCC_VERSION.tar.gz ]; then
wget https://ftp.gnu.org/gnu/gcc/gcc-$GCC_VERSION/gcc-$GCC_VERSION.tar.gz
fi
if [ ! -f binutils-$BINUTILS_VERSION.tar.gz ]; then
wget https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS_VERSION.tar.gz
fi
if [ ! -f gdb-$GDB_VERSION.tar.gz ]; then
wget https://ftp.gnu.org/gnu/gdb/gdb-$GDB_VERSION.tar.gz
fi
if [ ! -f cmake-$CMAKE_VERSION.tar.gz ]; then
wget https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION.tar.gz
fi
if [ ! -f swig-$SWIG_VERSION.tar.gz ]; then
wget https://github.com/swig/swig/archive/rel-$SWIG_VERSION.tar.gz -O swig-$SWIG_VERSION.tar.gz
fi
if [ ! -f cppcheck-$CPPCHECK_VERSION.tar.gz ]; then
wget https://github.com/danmar/cppcheck/archive/$CPPCHECK_VERSION.tar.gz -O cppcheck-$CPPCHECK_VERSION.tar.gz
fi
if [ ! -f llvm-$LLVM_VERSION.src.tar.xz ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/llvm-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/lld-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz
fi
if [ ! -f pahole-gdb-master.zip ]; then
wget https://github.com/PhilArmstrong/pahole-gdb/archive/master.zip -O pahole-gdb-master.zip
fi
# verify all archives
# NOTE: Verification can fail if the archive is signed by another developer. I
# haven't added commands to download all developer GnuPG keys because the
# download is very slow. If the verification fails for you, figure out who has
# signed the archive and download their public key instead.
GPG="gpg --homedir .gnupg"
KEYSERVER="hkp://keyserver.ubuntu.com"
mkdir -p .gnupg
chmod 700 .gnupg
# verify gcc
if [ ! -f gcc-$GCC_VERSION.tar.gz.sig ]; then
wget https://ftp.gnu.org/gnu/gcc/gcc-$GCC_VERSION/gcc-$GCC_VERSION.tar.gz.sig
fi
# list of valid gcc gnupg keys: https://gcc.gnu.org/mirrors.html
$GPG --keyserver $KEYSERVER --recv-keys 0x3AB00996FC26A641
$GPG --verify gcc-$GCC_VERSION.tar.gz.sig gcc-$GCC_VERSION.tar.gz
# verify binutils
if [ ! -f binutils-$BINUTILS_VERSION.tar.gz.sig ]; then
wget https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS_VERSION.tar.gz.sig
fi
$GPG --keyserver $KEYSERVER --recv-keys 0xDD9E3C4F
$GPG --verify binutils-$BINUTILS_VERSION.tar.gz.sig binutils-$BINUTILS_VERSION.tar.gz
# verify gdb
if [ ! -f gdb-$GDB_VERSION.tar.gz.sig ]; then
wget https://ftp.gnu.org/gnu/gdb/gdb-$GDB_VERSION.tar.gz.sig
fi
$GPG --keyserver $KEYSERVER --recv-keys 0xFF325CF3
$GPG --verify gdb-$GDB_VERSION.tar.gz.sig gdb-$GDB_VERSION.tar.gz
# verify cmake
if [ ! -f cmake-$CMAKE_VERSION-SHA-256.txt ] || [ ! -f cmake-$CMAKE_VERSION-SHA-256.txt.asc ]; then
wget https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-SHA-256.txt
wget https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-SHA-256.txt.asc
# Because CentOS 7 doesn't have the `--ignore-missing` flag for `sha256sum`
# we filter out the missing files from the sums here manually.
cat cmake-$CMAKE_VERSION-SHA-256.txt | grep "cmake-$CMAKE_VERSION.tar.gz" > cmake-$CMAKE_VERSION-SHA-256-filtered.txt
fi
$GPG --keyserver $KEYSERVER --recv-keys 0xC6C265324BBEBDC350B513D02D2CEF1034921684
sha256sum -c cmake-$CMAKE_VERSION-SHA-256-filtered.txt
$GPG --verify cmake-$CMAKE_VERSION-SHA-256.txt.asc cmake-$CMAKE_VERSION-SHA-256.txt
# verify llvm, cfe, lld, clang-tools-extra
if [ ! -f llvm-$LLVM_VERSION.src.tar.xz.sig ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/llvm-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/lld-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz.sig
fi
# list of valid llvm gnupg keys: https://releases.llvm.org/download.html
$GPG --keyserver $KEYSERVER --recv-keys 0x345AD05D
$GPG --verify llvm-$LLVM_VERSION.src.tar.xz.sig llvm-$LLVM_VERSION.src.tar.xz
$GPG --verify clang-$LLVM_VERSION.src.tar.xz.sig clang-$LLVM_VERSION.src.tar.xz
$GPG --verify lld-$LLVM_VERSION.src.tar.xz.sig lld-$LLVM_VERSION.src.tar.xz
$GPG --verify clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig clang-tools-extra-$LLVM_VERSION.src.tar.xz
$GPG --verify compiler-rt-$LLVM_VERSION.src.tar.xz.sig compiler-rt-$LLVM_VERSION.src.tar.xz
popd
# create build directory
mkdir -p build
pushd build
# compile gcc
if [ ! -f $PREFIX/bin/gcc ]; then
if [ -d gcc-$GCC_VERSION ]; then
rm -rf gcc-$GCC_VERSION
fi
tar -xvf ../archives/gcc-$GCC_VERSION.tar.gz
pushd gcc-$GCC_VERSION
./contrib/download_prerequisites
mkdir build && pushd build
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=gcc-8&arch=amd64&ver=8.3.0-6&stamp=1554588545
../configure -v \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--target=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-multilib \
--with-system-zlib \
--enable-checking=release \
--enable-languages=c,c++,fortran \
--enable-gold=yes \
--enable-ld=yes \
--enable-lto \
--enable-bootstrap \
--disable-vtable-verify \
--disable-werror \
--without-included-gettext \
--enable-threads=posix \
--enable-nls \
--enable-clocale=gnu \
--enable-libstdcxx-debug \
--enable-libstdcxx-time=yes \
--enable-gnu-unique-object \
--enable-libmpx \
--enable-plugin \
--enable-default-pie \
--with-target-system-zlib \
--with-tune=generic \
--without-cuda-driver
#--program-suffix=$( printf "$GCC_VERSION" | cut -d '.' -f 1,2 ) \
make -j$CPUS
# make -k check # run test suite
make install
popd && popd
fi
# activate toolchain
export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64
# compile binutils
if [ ! -f $PREFIX/bin/ld.gold ]; then
if [ -d binutils-$BINUTILS_VERSION ]; then
rm -rf binutils-$BINUTILS_VERSION
fi
tar -xvf ../archives/binutils-$BINUTILS_VERSION.tar.gz
pushd binutils-$BINUTILS_VERSION
mkdir build && pushd build
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=binutils&arch=amd64&ver=2.32-7&stamp=1553247092
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2" \
CXXFLAGS="-g -O2" \
LDFLAGS="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--enable-ld=default \
--enable-gold \
--enable-lto \
--enable-plugins \
--enable-shared \
--enable-threads \
--with-system-zlib \
--enable-deterministic-archives \
--disable-compressed-debug-sections \
--enable-new-dtags \
--disable-werror
make -j$CPUS
# make -k check # run test suite
make install
popd && popd
fi
# compile gdb
if [ ! -f $PREFIX/bin/gdb ]; then
if [ -d gdb-$GDB_VERSION ]; then
rm -rf gdb-$GDB_VERSION
fi
tar -xvf ../archives/gdb-$GDB_VERSION.tar.gz
pushd gdb-$GDB_VERSION
mkdir build && pushd build
# https://buildd.debian.org/status/fetch.php?pkg=gdb&arch=amd64&ver=8.2.1-2&stamp=1550831554&raw=0
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
LDFLAGS="-Wl,-z,relro" \
PYTHON="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-maintainer-mode \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-gdbtk \
--disable-shared \
--without-guile \
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
--with-system-readline \
--with-expat \
--with-system-zlib \
--with-lzma \
--with-babeltrace \
--with-intel-pt \
--enable-tui \
--with-python=python3
make -j$CPUS
make install
popd && popd
fi
# install pahole
if [ ! -d $PREFIX/share/pahole-gdb ]; then
unzip ../archives/pahole-gdb-master.zip
mv pahole-gdb-master $PREFIX/share/pahole-gdb
fi
# setup system gdbinit
if [ ! -f $PREFIX/etc/gdb/gdbinit ]; then
mkdir -p $PREFIX/etc/gdb
cat >$PREFIX/etc/gdb/gdbinit <<EOF
# improve formatting
set print pretty on
set print object on
set print static-members on
set print vtbl on
set print demangle on
set demangle-style gnu-v3
set print sevenbit-strings off
# load libstdc++ pretty printers
add-auto-load-scripts-directory $PREFIX/lib64
add-auto-load-safe-path $PREFIX
# load pahole
python
sys.path.insert(0, "$PREFIX/share/pahole-gdb")
import offsets
import pahole
end
EOF
fi
# compile cmake
if [ ! -f $PREFIX/bin/cmake ]; then
if [ -d cmake-$CMAKE_VERSION ]; then
rm -rf cmake-$CMAKE_VERSION
fi
tar -xvf ../archives/cmake-$CMAKE_VERSION.tar.gz
pushd cmake-$CMAKE_VERSION
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=cmake&arch=amd64&ver=3.13.4-1&stamp=1549799837
echo 'set(CMAKE_SKIP_RPATH ON CACHE BOOL "Skip rpath" FORCE)' >> build-flags.cmake
echo 'set(CMAKE_USE_RELATIVE_PATHS ON CACHE BOOL "Use relative paths" FORCE)' >> build-flags.cmake
echo 'set(CMAKE_C_FLAGS "-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2" CACHE STRING "C flags" FORCE)' >> build-flags.cmake
echo 'set(CMAKE_CXX_FLAGS "-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2" CACHE STRING "C++ flags" FORCE)' >> build-flags.cmake
echo 'set(CMAKE_SKIP_BOOTSTRAP_TEST ON CACHE BOOL "Skip BootstrapTest" FORCE)' >> build-flags.cmake
echo 'set(BUILD_CursesDialog ON CACHE BOOL "Build curses GUI" FORCE)' >> build-flags.cmake
mkdir build && pushd build
../bootstrap \
--prefix=$PREFIX \
--init=../build-flags.cmake \
--parallel=$CPUS \
--system-curl
make -j$CPUS
# make test # run test suite
make install
popd && popd
fi
# compile cppcheck
if [ ! -f $PREFIX/bin/cppcheck ]; then
if [ -d cppcheck-$CPPCHECK_VERSION ]; then
rm -rf cppcheck-$CPPCHECK_VERSION
fi
tar -xvf ../archives/cppcheck-$CPPCHECK_VERSION.tar.gz
pushd cppcheck-$CPPCHECK_VERSION
env \
CC=gcc \
CXX=g++ \
PREFIX=$PREFIX \
FILESDIR=$PREFIX/share/cppcheck \
CFGDIR=$PREFIX/share/cppcheck/cfg \
make -j$CPUS
env \
CC=gcc \
CXX=g++ \
PREFIX=$PREFIX \
FILESDIR=$PREFIX/share/cppcheck \
CFGDIR=$PREFIX/share/cppcheck/cfg \
make install
popd
fi
# compile swig
if [ ! -d swig-$SWIG_VERSION/install ]; then
if [ -d swig-$SWIG_VERSION ]; then
rm -rf swig-$SWIG_VERSION
fi
tar -xvf ../archives/swig-$SWIG_VERSION.tar.gz
mv swig-rel-$SWIG_VERSION swig-$SWIG_VERSION
pushd swig-$SWIG_VERSION
./autogen.sh
mkdir build && pushd build
../configure --prefix=$DIR/build/swig-$SWIG_VERSION/install
make -j$CPUS
make install
popd && popd
fi
# compile llvm
if [ ! -f $PREFIX/bin/clang ]; then
if [ -d llvm-$LLVM_VERSION ]; then
rm -rf llvm-$LLVM_VERSION
fi
tar -xvf ../archives/llvm-$LLVM_VERSION.src.tar.xz
mv llvm-$LLVM_VERSION.src llvm-$LLVM_VERSION
tar -xvf ../archives/clang-$LLVM_VERSION.src.tar.xz
mv clang-$LLVM_VERSION.src llvm-$LLVM_VERSION/tools/clang
tar -xvf ../archives/lld-$LLVM_VERSION.src.tar.xz
mv lld-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/tools/lld
tar -xvf ../archives/clang-tools-extra-$LLVM_VERSION.src.tar.xz
mv clang-tools-extra-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/tools/clang/tools/extra
tar -xvf ../archives/compiler-rt-$LLVM_VERSION.src.tar.xz
mv compiler-rt-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/projects/compiler-rt
pushd llvm-$LLVM_VERSION
mkdir build && pushd build
# activate swig
export PATH=$DIR/build/swig-$SWIG_VERSION/install/bin:$PATH
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=llvm-toolchain-7&arch=amd64&ver=1%3A7.0.1%7E%2Brc2-1%7Eexp1&stamp=1541506173&raw=0
cmake .. \
-DCMAKE_C_COMPILER=$PREFIX/bin/gcc \
-DCMAKE_CXX_COMPILER=$PREFIX/bin/g++ \
-DCMAKE_CXX_LINK_FLAGS="-L$PREFIX/lib64 -Wl,-rpath,$PREFIX/lib64" \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DCMAKE_CXX_FLAGS_RELWITHDEBINFO="-O2 -DNDEBUG" \
-DCMAKE_CXX_FLAGS=' -fuse-ld=gold -fPIC -Wno-unused-command-line-argument -Wno-unknown-warning-option' \
-DCMAKE_C_FLAGS=' -fuse-ld=gold -fPIC -Wno-unused-command-line-argument -Wno-unknown-warning-option' \
-DLLVM_LINK_LLVM_DYLIB=ON \
-DLLVM_INSTALL_UTILS=ON \
-DLLVM_VERSION_SUFFIX= \
-DLLVM_BUILD_LLVM_DYLIB=ON \
-DLLVM_ENABLE_RTTI=ON \
-DLLVM_ENABLE_FFI=ON \
-DLLVM_BINUTILS_INCDIR=$PREFIX/include/ \
-DLLVM_USE_PERF=yes
make -j$CPUS
make -j$CPUS check-clang # run clang test suite
make -j$CPUS check-lld # run lld test suite
make install
popd && popd
fi
# create README
if [ ! -f $PREFIX/README.md ]; then
cat >$PREFIX/README.md <<EOF
# Memgraph Toolchain v$TOOLCHAIN_VERSION
## Included tools
- GCC $GCC_VERSION
- Binutils $BINUTILS_VERSION
- GDB $GDB_VERSION
- CMake $CMAKE_VERSION
- Cppcheck $CPPCHECK_VERSION
- LLVM (Clang, LLD, compiler-rt, Clang tools extra) $LLVM_VERSION
## Required libraries
In order to be able to run all of these tools you should install the following
packages:
\`\`\`
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\`
## Usage
In order to use the toolchain you just have to source the activation script:
\`\`\`
source $PREFIX/activate
\`\`\`
EOF
fi
# create activation script
if [ ! -f $PREFIX/activate ]; then
cat >$PREFIX/activate <<EOF
# This file must be used with "source $PREFIX/activate" *from bash*
# You can't run it directly!
# check for active virtual environments
if [ "\$( type -t deactivate )" != "" ]; then
echo "You already have an active virtual environment!"
return 0
fi
# check that we aren't root
if [ "\$USER" == "root" ]; then
echo "You shouldn't use the toolchan as root!"
return 0
fi
# save original environment
export ORIG_PATH=\$PATH
export ORIG_PS1=\$PS1
export ORIG_LD_LIBRARY_PATH=\$LD_LIBRARY_PATH
# activate new environment
export PATH=$PREFIX/bin:\$PATH
export PS1="($NAME) \$PS1"
export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64
# disable root
function su () {
echo "You don't want to use root functions while using the toolchain!"
return 1
}
function sudo () {
echo "You don't want to use root functions while using the toolchain!"
return 1
}
# create deactivation function
function deactivate() {
export PATH=\$ORIG_PATH
export PS1=\$ORIG_PS1
export LD_LIBRARY_PATH=\$ORIG_LD_LIBRARY_PATH
unset ORIG_PATH ORIG_PS1 ORIG_LD_LIBRARY_PATH
unset -f su sudo deactivate
}
EOF
fi
# create toolchain archive
if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO.tar.gz -C /opt $NAME
fi
# output final instructions
echo -e "\n\n"
echo "All tools have been built. They are installed in '$PREFIX'."
echo "In order to distribute the tools to someone else, an archive with the toolchain was created in the 'build' directory."
echo "If you want to install the packed tools you should execute the following command:"
echo
echo " tar -xvzf build/$NAME-binaries.tar.gz -C /opt"
echo
echo "Because the tools were built on this machine, you should probably change the permissions of the installation directory using:"
echo
echo " OPTIONAL: chown -R root:root $PREFIX"
echo
echo "In order to use all of the newly compiled tools you should use the prepared activation script:"
echo
echo " source $PREFIX/activate"
echo
echo "Or, for more advanced uses, you can add the following lines to your script:"
echo
echo " export PATH=$PREFIX/bin:\$PATH"
echo " export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64"
echo
echo "Enjoy!"

45
environment/util.sh Normal file
View File

@ -0,0 +1,45 @@
#!/bin/bash
operating_system() {
grep -E '^(VERSION_)?ID=' /etc/os-release | \
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
}
check_all_yum() {
local missing=""
for pkg in $1; do
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
check_all_dpkg() {
local missing=""
for pkg in $1; do
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
check_all_dnf() {
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}

113
init
View File

@ -1,55 +1,14 @@
#!/bin/bash -e
required_pkgs_apt=(git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
python3 python-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
)
optional_pkgs_apt=(doxygen graphviz # source documentation generators
php-cli # for user technical documentation generators
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # for driver tests
)
required_pkgs_yum=(# NOTE: git is too old on CentOS, install it manually from IUS
make pkgconfig # build system
curl wget # for downloading libs
libuuid-devel java-1.8.0-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
# NOTE: python3-yaml doesn't exist on CentOS, install it manually using `pip3 install PyYAML`
libcurl-devel # mg-requests
sbcl # for custom Lisp C++ preprocessing
rpm-build rpmlint # for RPM package building
)
optional_pkgs_yum=(doxygen graphviz # source documentation generators
php-cli # for user technical documentation generators
mono-complete nodejs zip unzip java-1.8.0-openjdk-devel # for driver tests
)
use_sudo=0
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$DIR"
source "$DIR/environment/util.sh"
function print_help () {
echo "Usage: $0 [OPTION]"
echo -e "Check for missing packages and install them if possible.\n"
echo -e "Check for missing packages and setup the project.\n"
echo "Optional arguments:"
echo -e " -s\tuse sudo apt-get for installing packages"
echo -e " -h\tdisplay this help and exit"
}
@ -76,9 +35,6 @@ if [[ $# -gt 1 ]]; then
exit 1
elif [[ $# -eq 1 ]]; then
case "$1" in
-s)
use_sudo=1
;;
-h)
print_help
exit 0
@ -91,65 +47,10 @@ elif [[ $# -eq 1 ]]; then
esac
fi
echo "Started installing dependencies for Memgraph"
required_missing=0
# install all dependencies on supported operating systems
if command -v apt-get >/dev/null; then
for pkg in ${required_pkgs_apt[@]}; do
if dpkg -s $pkg 2>/dev/null >/dev/null; then
echo "Found $pkg"
elif (( $use_sudo )); then
echo "Installing $pkg"
if [[ ! `sudo apt-get -y install $pkg` ]]; then
echo "Didn't install $pkg [required]"
required_missing=1
fi
else
echo "Missing $pkg [required]"
required_missing=1
fi
done
for pkg in ${optional_pkgs_apt[@]}; do
if dpkg -s $pkg 2>/dev/null >/dev/null; then
echo "Found $pkg [optional]"
else
echo "Missing $pkg [optional]"
fi
done
elif command -v yum >/dev/null; then
for pkg in ${required_pkgs_yum[@]}; do
if yum list installed $pkg 2>/dev/null >/dev/null; then
echo "Found $pkg"
elif (( $use_sudo )); then
echo "Installing $pkg"
if [[ ! `sudo yum install -y $pkg` ]]; then
echo "Didn't install $pkg [required]"
required_missing=1
fi
else
echo "Missing $pkg [required]"
required_missing=1
fi
done
for pkg in ${optional_pkgs_yum[@]}; do
if yum list installed $pkg 2>/dev/null >/dev/null; then
echo "Found $pkg [optional]"
else
echo "Missing $pkg [optional]"
fi
done
else
echo "Unsupported distribution!"
exit 1
fi
if (( $required_missing )); then
echo "Missing required packages. EXITING!"
echo "Please, install required packages and rerun $0 again."
exit 2
fi
DISTRO=$(operating_system)
echo "ALL BUILD PACKAGES: $($DIR/environment/os/$DISTRO.sh list MEMGRAPH_BUILD_DEPS)"
$DIR/environment/os/$DISTRO.sh check MEMGRAPH_BUILD_DEPS
echo "All packages are in-place..."
# create a default build directory
mkdir -p ./build

View File

@ -195,7 +195,9 @@ import_external_library(zlib STATIC
import_external_library(rocksdb STATIC
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/lib/librocksdb.a
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include
CMAKE_ARGS -DUSE_RTTI=ON
DEPENDS gflags-proj
CMAKE_ARGS -Dgflags_DIR=${CMAKE_CURRENT_SOURCE_DIR}/gflags/lib/cmake/gflags
-DUSE_RTTI=ON
-DWITH_TESTS=OFF
-DCMAKE_INSTALL_LIBDIR=lib
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true

View File

@ -65,7 +65,7 @@ fmt_cxx14_fix="b9aaa507fc49680d037fd84c043f747a395bce04"
clone https://github.com/fmtlib/fmt.git fmt $fmt_tag $fmt_cxx14_fix
# rapidcheck
rapidcheck_tag="853e14f0f4313a9eb3c71e24848373e7b843dfd1" # Jun 23, 2017
rapidcheck_tag="7bc7d302191a4f3d0bf005692677126136e02f60" # (2020-05-04)
clone https://github.com/emil-e/rapidcheck.git rapidcheck $rapidcheck_tag
# google benchmark
@ -111,10 +111,8 @@ clone https://github.com/madler/zlib.git zlib $zlib_tag
# remove shared library from install dependencies
sed -i 's/install(TARGETS zlib zlibstatic/install(TARGETS zlibstatic/g' zlib/CMakeLists.txt
rocksdb_tag="641fae60f63619ed5d0c9d9e4c4ea5a0ffa3e253" # v5.18.3 Feb 11, 2019
rocksdb_tag="f3e33549c151f30ac4eb7c22356c6d0331f37652" # (2020-10-14)
clone https://github.com/facebook/rocksdb.git rocksdb $rocksdb_tag
# fix compilation flags to work with clang 8
sed -i 's/-Wshadow/-Wno-defaulted-function-deleted/' rocksdb/CMakeLists.txt
# remove shared library from install dependencies
sed -i 's/TARGETS ${ROCKSDB_SHARED_LIB}/TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL/' rocksdb/CMakeLists.txt

View File

@ -2,6 +2,7 @@
#include <exception>
#include <numeric>
#include <stdexcept>
#include <unordered_map>
#include <unordered_set>
#include <vector>
@ -17,9 +18,7 @@ Graph::Graph(uint32_t n_nodes) : n_nodes_(n_nodes), total_w_(0) {
std::iota(community_.begin(), community_.end(), 0);
}
uint32_t Graph::Size() const {
return n_nodes_;
}
uint32_t Graph::Size() const { return n_nodes_; }
uint32_t Graph::Community(uint32_t node) const { return community_.at(node); }
@ -63,20 +62,15 @@ uint32_t Graph::Degree(uint32_t node) const {
return static_cast<uint32_t>(adj_list_.at(node).size());
}
double Graph::IncidentWeight(uint32_t node) const {
return inc_w_.at(node);
}
double Graph::IncidentWeight(uint32_t node) const { return inc_w_.at(node); }
double Graph::TotalWeight() const {
return total_w_;
}
double Graph::TotalWeight() const { return total_w_; }
double Graph::Modularity() const {
double ret = 0;
// Since all weights should be positive, this implies that our graph has
// no edges.
if (total_w_ == 0)
return 0;
if (total_w_ == 0) return 0;
std::unordered_map<uint32_t, double> weight_c;
std::unordered_map<uint32_t, double> degree_c;
@ -102,4 +96,4 @@ const std::vector<Neighbour> &Graph::Neighbours(uint32_t node) const {
return adj_list_.at(node);
}
} // namespace comdata
} // namespace comdata

View File

@ -1,6 +1,7 @@
#include "mg_procedure.h"
#include <exception>
#include <string>
#include <unordered_map>
#include "algorithms/algorithms.hpp"
@ -109,7 +110,7 @@ std::optional<comdata::Graph> RunLouvain(
}
void communities(const mgp_list *args, const mgp_graph *graph,
mgp_result *result, mgp_memory *memory) {
mgp_result *result, mgp_memory *memory) {
try {
// Normalize vertex ids
auto mem_to_louv_id = NormalizeVertexIds(graph, result, memory);
@ -161,7 +162,7 @@ void communities(const mgp_list *args, const mgp_graph *graph,
}
void modularity(const mgp_list *args, const mgp_graph *graph,
mgp_result *result, mgp_memory *memory) {
mgp_result *result, mgp_memory *memory) {
try {
// Normalize vertex ids
auto mem_to_louv_id = NormalizeVertexIds(graph, result, memory);
@ -172,7 +173,8 @@ void modularity(const mgp_list *args, const mgp_graph *graph,
if (!louvain_graph) return;
// Return graph modularity after Louvain
// TODO(ipaljak) - consider allowing the user to specify seed communities and
// TODO(ipaljak) - consider allowing the user to specify seed communities
// and
// yield modularity values both before and after running
// louvain.
mgp_result_record *record = mgp_result_new_record(result);
@ -203,7 +205,7 @@ void modularity(const mgp_list *args, const mgp_graph *graph,
}
}
} // namespace
} // namespace
extern "C" int mgp_init_module(struct mgp_module *module,
struct mgp_memory *memory) {
@ -215,7 +217,7 @@ extern "C" int mgp_init_module(struct mgp_module *module,
return 1;
struct mgp_proc *modularity_proc =
mgp_module_add_read_procedure(module, "modularity", modularity);
mgp_module_add_read_procedure(module, "modularity", modularity);
if (!modularity_proc) return 1;
if (!mgp_proc_add_result(modularity_proc, "modularity", mgp_type_float()))
return 1;
@ -223,6 +225,4 @@ extern "C" int mgp_init_module(struct mgp_module *module,
return 0;
}
extern "C" int mgp_shutdown_module() {
return 0;
}
extern "C" int mgp_shutdown_module() { return 0; }

View File

@ -1,3 +1,6 @@
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <algorithm>
#include <cstdio>
#include <filesystem>
@ -6,9 +9,6 @@
#include <regex>
#include <unordered_map>
#include <gflags/gflags.h>
#include <glog/logging.h>
#include "helpers.hpp"
#include "storage/v2/storage.hpp"
#include "utils/exceptions.hpp"
@ -326,10 +326,8 @@ std::pair<std::vector<std::string>, uint64_t> ReadRow(std::istream &stream) {
}
if (FLAGS_trim_strings) {
for (size_t i = 0; i < row.size(); ++i) {
std::string trimmed(utils::Trim(row[i]));
row[i] = std::move(trimmed);
}
std::transform(std::begin(row), std::end(row), std::begin(row),
[](const auto &item) { return utils::Trim(item); });
}
return {std::move(row), lines_count};
@ -736,16 +734,14 @@ int main(int argc, char *argv[]) {
}
std::unordered_map<NodeId, storage::Gid> node_id_map;
storage::Storage store{
{.durability =
{.storage_directory = FLAGS_data_directory,
.recover_on_startup = false,
.snapshot_wal_mode =
storage::Config::Durability::SnapshotWalMode::DISABLED,
.snapshot_on_exit = true},
.items = {
.properties_on_edges = FLAGS_storage_properties_on_edges,
}}};
storage::Storage store{{
.items = {.properties_on_edges = FLAGS_storage_properties_on_edges},
.durability = {.storage_directory = FLAGS_data_directory,
.recover_on_startup = false,
.snapshot_wal_mode =
storage::Config::Durability::SnapshotWalMode::DISABLED,
.snapshot_on_exit = true},
}};
utils::Timer load_timer;

View File

@ -35,17 +35,22 @@ PyObject *DisallowPickleAndCopy(PyObject *self, PyObject *Py_UNUSED(ignored)) {
// method for checking this by our higher level API in `mgp` module. Python only
// does shallow copies by default, and we do not provide deep copy of
// `_mgp.Graph`, so this validity concept should work fine.
//
// clang-format off
struct PyGraph {
PyObject_HEAD
const mgp_graph *graph;
mgp_memory *memory;
};
// clang-format on
// clang-format off
struct PyVerticesIterator {
PyObject_HEAD
mgp_vertices_iterator *it;
PyGraph *py_graph;
};
// clang-format on
PyObject *MakePyVertex(const mgp_vertex &vertex, PyGraph *py_graph);
@ -90,21 +95,25 @@ static PyMethodDef PyVerticesIteratorMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyVerticesIteratorType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.VerticesIterator",
.tp_doc = "Wraps struct mgp_vertices_iterator.",
.tp_basicsize = sizeof(PyVerticesIterator),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyVerticesIteratorMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyVerticesIteratorDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_vertices_iterator.",
.tp_methods = PyVerticesIteratorMethods,
};
// clang-format on
// clang-format off
struct PyEdgesIterator {
PyObject_HEAD
mgp_edges_iterator *it;
PyGraph *py_graph;
};
// clang-format on
PyObject *MakePyEdge(const mgp_edge &edge, PyGraph *py_graph);
@ -149,15 +158,17 @@ static PyMethodDef PyEdgesIteratorMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyEdgesIteratorType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.EdgesIterator",
.tp_doc = "Wraps struct mgp_edges_iterator.",
.tp_basicsize = sizeof(PyEdgesIterator),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyEdgesIteratorMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyEdgesIteratorDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_edges_iterator.",
.tp_methods = PyEdgesIteratorMethods,
};
// clang-format on
PyObject *PyGraphInvalidate(PyGraph *self, PyObject *Py_UNUSED(ignored)) {
self->graph = nullptr;
@ -232,14 +243,16 @@ static PyMethodDef PyGraphMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyGraphType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Graph",
.tp_doc = "Wraps struct mgp_graph.",
.tp_basicsize = sizeof(PyGraph),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_graph.",
.tp_methods = PyGraphMethods,
};
// clang-format on
PyObject *MakePyGraph(const mgp_graph *graph, mgp_memory *memory) {
CHECK(!graph || (graph && memory));
@ -250,18 +263,22 @@ PyObject *MakePyGraph(const mgp_graph *graph, mgp_memory *memory) {
return reinterpret_cast<PyObject *>(py_graph);
}
// clang-format off
struct PyCypherType {
PyObject_HEAD
const mgp_type *type;
};
// clang-format on
// clang-format off
static PyTypeObject PyCypherTypeType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Type",
.tp_doc = "Wraps struct mgp_type.",
.tp_basicsize = sizeof(PyCypherType),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_type.",
};
// clang-format on
PyObject *MakePyCypherType(const mgp_type *type) {
CHECK(type);
@ -271,10 +288,12 @@ PyObject *MakePyCypherType(const mgp_type *type) {
return reinterpret_cast<PyObject *>(py_type);
}
// clang-format off
struct PyQueryProc {
PyObject_HEAD
mgp_proc *proc;
};
// clang-format on
PyObject *PyQueryProcAddArg(PyQueryProc *self, PyObject *args) {
CHECK(self->proc);
@ -384,19 +403,23 @@ static PyMethodDef PyQueryProcMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyQueryProcType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Proc",
.tp_doc = "Wraps struct mgp_proc.",
.tp_basicsize = sizeof(PyQueryProc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_proc.",
.tp_methods = PyQueryProcMethods,
};
// clang-format on
// clang-format off
struct PyQueryModule {
PyObject_HEAD
mgp_module *module;
};
// clang-format on
py::Object MgpListToPyTuple(const mgp_list *list, PyGraph *py_graph) {
CHECK(list);
@ -631,14 +654,16 @@ static PyMethodDef PyQueryModuleMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyQueryModuleType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Module",
.tp_doc = "Wraps struct mgp_module.",
.tp_basicsize = sizeof(PyQueryModule),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_module.",
.tp_methods = PyQueryModuleMethods,
};
// clang-format on
PyObject *MakePyQueryModule(mgp_module *module) {
CHECK(module);
@ -736,6 +761,7 @@ static PyMethodDef PyMgpModuleMethods[] = {
{nullptr},
};
// clang-format off
static PyModuleDef PyMgpModule = {
PyModuleDef_HEAD_INIT,
.m_name = "_mgp",
@ -743,12 +769,15 @@ static PyModuleDef PyMgpModule = {
.m_size = -1,
.m_methods = PyMgpModuleMethods,
};
// clang-format on
// clang-format off
struct PyPropertiesIterator {
PyObject_HEAD
mgp_properties_iterator *it;
PyGraph *py_graph;
};
// clang-format on
void PyPropertiesIteratorDealloc(PyPropertiesIterator *self) {
CHECK(self->it);
@ -797,21 +826,25 @@ static PyMethodDef PyPropertiesIteratorMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyPropertiesIteratorType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.PropertiesIterator",
.tp_doc = "Wraps struct mgp_properties_iterator.",
.tp_basicsize = sizeof(PyPropertiesIterator),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyPropertiesIteratorMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyPropertiesIteratorDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_properties_iterator.",
.tp_methods = PyPropertiesIteratorMethods,
};
// clang-format on
// clang-format off
struct PyEdge {
PyObject_HEAD
mgp_edge *edge;
PyGraph *py_graph;
};
// clang-format on
PyObject *PyEdgeGetTypeName(PyEdge *self, PyObject *Py_UNUSED(ignored)) {
CHECK(self);
@ -929,16 +962,18 @@ static PyMethodDef PyEdgeMethods[] = {
PyObject *PyEdgeRichCompare(PyObject *self, PyObject *other, int op);
// clang-format off
static PyTypeObject PyEdgeType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Edge",
.tp_doc = "Wraps struct mgp_edge.",
.tp_basicsize = sizeof(PyEdge),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyEdgeMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyEdgeDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_edge.",
.tp_richcompare = PyEdgeRichCompare,
.tp_methods = PyEdgeMethods,
};
// clang-format on
/// Create an instance of `_mgp.Edge` class.
///
@ -980,11 +1015,13 @@ PyObject *PyEdgeRichCompare(PyObject *self, PyObject *other, int op) {
return PyBool_FromLong(mgp_edge_equal(e1->edge, e2->edge));
}
// clang-format off
struct PyVertex {
PyObject_HEAD
mgp_vertex *vertex;
PyGraph *py_graph;
};
// clang-format on
void PyVertexDealloc(PyVertex *self) {
CHECK(self->vertex);
@ -1148,16 +1185,18 @@ static PyMethodDef PyVertexMethods[] = {
PyObject *PyVertexRichCompare(PyObject *self, PyObject *other, int op);
// clang-format off
static PyTypeObject PyVertexType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Vertex",
.tp_doc = "Wraps struct mgp_vertex.",
.tp_basicsize = sizeof(PyVertex),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyVertexMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyVertexDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_vertex.",
.tp_richcompare = PyVertexRichCompare,
.tp_methods = PyVertexMethods,
};
// clang-format on
PyObject *MakePyVertex(mgp_vertex *vertex, PyGraph *py_graph) {
CHECK(vertex);
@ -1202,11 +1241,13 @@ PyObject *PyVertexRichCompare(PyObject *self, PyObject *other, int op) {
return PyBool_FromLong(mgp_vertex_equal(v1->vertex, v2->vertex));
}
// clang-format off
struct PyPath {
PyObject_HEAD
mgp_path *path;
PyGraph *py_graph;
};
// clang-format on
void PyPathDealloc(PyPath *self) {
CHECK(self->path);
@ -1308,15 +1349,17 @@ static PyMethodDef PyPathMethods[] = {
{nullptr},
};
// clang-format off
static PyTypeObject PyPathType = {
PyVarObject_HEAD_INIT(nullptr, 0)
.tp_name = "_mgp.Path",
.tp_doc = "Wraps struct mgp_path.",
.tp_basicsize = sizeof(PyPath),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = PyPathMethods,
.tp_dealloc = reinterpret_cast<destructor>(PyPathDealloc),
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "Wraps struct mgp_path.",
.tp_methods = PyPathMethods,
};
// clang-format on
PyObject *MakePyPath(mgp_path *path, PyGraph *py_graph) {
CHECK(path);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry0`:`hello` {__mg_id__: 0, `country`: "Croatia", `content`: "yes", `browser`: "Chrome"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry1` {__mg_id__: 1, `country`: "United Kingdom", `content`: "thanks", `browser`: "Chrome"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry0`:`hello` {__mg_id__: 0, `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry1` {__mg_id__: 1, `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry2`:`world` {__mg_id__: 2, `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry3` {__mg_id__: 3, `country`: "France", `content`: "I see", `browser`: "Firefox"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry4`:`this`:`is`:`a`:` lot `:`of`:` labels` {__mg_id__: 4, `country`: "Italy", `content`: "fine", `browser`: "Internet Explorer"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry3` {__mg_id__: 3, `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry4`:`this`:`is`:`a`:` lot `:`of`:` labels` {__mg_id__: 4, `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`KNOWS` {`value`: ["5", " asd", " helloworld"]}]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 4 AND v.__mg_id__ = 0 CREATE (u)-[:`KNOWS` {`value`: ["6", "hmmm"]}]->(v);
DROP INDEX ON :__mg_vertex__(__mg_id__);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry0`:`hello` {__mg_id__: 0, `country`: "Croatia", `content`: "yes", `browser`: "Chrome"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry1` {__mg_id__: 1, `country`: "United Kingdom", `content`: "thanks", `browser`: "Chrome"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry0`:`hello` {__mg_id__: 0, `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry1` {__mg_id__: 1, `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry2`:`world` {__mg_id__: 2, `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry3` {__mg_id__: 3, `country`: "France", `content`: "I see", `browser`: "Firefox"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry4`:`this`:`is`:`a`:` lot `:`of`:` labels` {__mg_id__: 4, `country`: "Italy", `content`: "fine", `browser`: "Internet Explorer"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry3` {__mg_id__: 3, `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`Entry4`:`this`:`is`:`a`:` lot `:`of`:` labels` {__mg_id__: 4, `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`KNOWS` {`value`: ["5", " asd", " helloworld"]}]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 4 AND v.__mg_id__ = 0 CREATE (u)-[:`KNOWS` {`value`: ["6", "hmmm"]}]->(v);
DROP INDEX ON :__mg_vertex__(__mg_id__);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `id`: "0", `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `id`: "1", `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `id`: "2", `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `id`: "3", `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `id`: "4", `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `country`: "Croatia", `id`: "0"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `country`: "United Kingdom", `id`: "1"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `content`: "LOL", `country`: "Germany", `id`: "2"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `country`: "France", `id`: "3"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `country`: "Italy", `id`: "4"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 1 CREATE (u)-[:`LIKES`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 2 CREATE (u)-[:`VISITED`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`FOLLOWS`]->(v);

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "world fgh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "world fgh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "ÖworldÖÖÖ fÖgh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "ÖworldÖÖÖ fÖgh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl this work?Ö", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl this work?Ö", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "Öworld,Ö,ÖÖ, fÖ,gh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "Öworld,Ö,ÖÖ, fÖ,gh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl t,his work?Ö", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl t,his work?Ö", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "woÖrld"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "woÖrld", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl this work?ÖÖ", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wilÖl this work?ÖÖ", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "world"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "world", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "my", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "my", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "world fgh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "world fgh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "world"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "world", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "my", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "my", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "\"world\"\"\" f\"gh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "\"world\"\"\" f\"gh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "\"worldÖ\"Ö\"\"Ö f\"Ögh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "\"worldÖ\"Ö\"\"Ö f\"Ögh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l tÖhis work?\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l tÖhis work?\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "wo\"rld"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "wo\"rld", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "world fgh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "world fgh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "will this work?", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "\"world\"\"\" f\"gh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "\"world\"\"\" f\"gh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "\"world,\",\"\", f\",gh"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "\"world,\",\"\", f\",gh", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l t,his work?\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l t,his work?\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,6 +1,6 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `id`: "0", `value`: "wo\"rld"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `test`: "asd", `value`: "wo\"rld", `id`: "0"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `test`: "string", `id`: "1"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"\"", `id`: "2", `value`: "hello"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `test`: "wil\"l this work?\"\"", `value`: "hello", `id`: "2"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,14 +1,14 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `country`: "Austria", `value`: 6});
CREATE (:__mg_vertex__ {__mg_id__: 1, `country`: "Hungary", `value`: 7});
CREATE (:__mg_vertex__ {__mg_id__: 2, `country`: "Romania", `value`: 8});
CREATE (:__mg_vertex__ {__mg_id__: 3, `country`: "Bulgaria", `value`: 9});
CREATE (:__mg_vertex__ {__mg_id__: 4, `country`: "Spain", `value`: 10});
CREATE (:__mg_vertex__ {__mg_id__: 5, `country`: "Latvia", `value`: 11});
CREATE (:__mg_vertex__ {__mg_id__: 6, `country`: "Russia", `value`: 12});
CREATE (:__mg_vertex__ {__mg_id__: 7, `country`: "Poland", `value`: 13});
CREATE (:__mg_vertex__ {__mg_id__: 8, `country`: "Czech Republic", `value`: 14});
CREATE (:__mg_vertex__ {__mg_id__: 9, `country`: "Moldova", `value`: 15});
CREATE (:__mg_vertex__ {__mg_id__: 0, `value`: 6, `country`: "Austria"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `value`: 7, `country`: "Hungary"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `value`: 8, `country`: "Romania"});
CREATE (:__mg_vertex__ {__mg_id__: 3, `value`: 9, `country`: "Bulgaria"});
CREATE (:__mg_vertex__ {__mg_id__: 4, `value`: 10, `country`: "Spain"});
CREATE (:__mg_vertex__ {__mg_id__: 5, `value`: 11, `country`: "Latvia"});
CREATE (:__mg_vertex__ {__mg_id__: 6, `value`: 12, `country`: "Russia"});
CREATE (:__mg_vertex__ {__mg_id__: 7, `value`: 13, `country`: "Poland"});
CREATE (:__mg_vertex__ {__mg_id__: 8, `value`: 14, `country`: "Czech Republic"});
CREATE (:__mg_vertex__ {__mg_id__: 9, `value`: 15, `country`: "Moldova"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 1 CREATE (u)-[:`NEIGHBOUR`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 2 AND v.__mg_id__ = 1 CREATE (u)-[:`NEIGHBOUR`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 4 AND v.__mg_id__ = 6 CREATE (u)-[:`NOT_NEIGHBOUR`]->(v);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `id`: "0", `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `id`: "1", `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `id`: "2", `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `id`: "3", `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `id`: "4", `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `country`: "Croatia", `id`: "0"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `country`: "United Kingdom", `id`: "1"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `content`: "LOL", `country`: "Germany", `id`: "2"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `country`: "France", `id`: "3"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `country`: "Italy", `id`: "4"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 1 CREATE (u)-[:`LIKES`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 2 CREATE (u)-[:`VISITED`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`FOLLOWS`]->(v);

View File

@ -1,4 +1,4 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `value_bool`: false, `value_boolean`: true, `value_integer`: 5, `value_float`: 2.718, `value_double`: 3.141, `value_short`: 4, `value_str`: "hello", `id`: "0", `value_char`: "world", `value_int`: 1, `value_long`: 2, `value_byte`: 3});
CREATE (:__mg_vertex__ {__mg_id__: 0, `value_bool`: false, `value_boolean`: true, `value_integer`: 5, `value_float`: 2.718, `value_double`: 3.141, `value_short`: 4, `value_byte`: 3, `value_long`: 2, `value_int`: 1, `value_char`: "world", `value_str`: "hello", `id`: "0"});
DROP INDEX ON :__mg_vertex__(__mg_id__);
MATCH (u) REMOVE u:__mg_vertex__, u.__mg_id__;

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `country`: "Croatia"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `country`: "United Kingdom"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `content`: "LOL", `country`: "Germany"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `country`: "France"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `country`: "Italy"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`KNOWS` {`value`: 5}]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 4 AND v.__mg_id__ = 0 CREATE (u)-[:`KNOWS` {`value`: 6}]->(v);
DROP INDEX ON :__mg_vertex__(__mg_id__);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `id`: "0", `country`: "Croatia"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `id`: "1", `country`: "United Kingdom"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 2, `content`: "LOL", `id`: "2", `country`: "Germany"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `id`: "3", `country`: "France"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `id`: "4", `country`: "Italy"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `country`: "Croatia", `id`: "0"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `country`: "United Kingdom", `id`: "1"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 2, `content`: "LOL", `country`: "Germany", `id`: "2"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `country`: "France", `id`: "3"});
CREATE (:__mg_vertex__:`Message`:`Comment`:`First`:`Second` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `country`: "Italy", `id`: "4"});
CREATE (:__mg_vertex__:`Forum`:`First`:`Second` {__mg_id__: 5, `title`: "General", `id`: "0"});
CREATE (:__mg_vertex__:`Forum`:`First`:`Second` {__mg_id__: 6, `title`: "Support", `id`: "1"});
CREATE (:__mg_vertex__:`Forum`:`First`:`Second` {__mg_id__: 7, `title`: "Music", `id`: "2"});

View File

@ -1,7 +1,7 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Movie` {__mg_id__: 0, `year`: 1999, `movieId`: "tt0133093", `title`: "The Matrix"});
CREATE (:__mg_vertex__:`Movie`:`Sequel` {__mg_id__: 1, `year`: 2003, `movieId`: "tt0234215", `title`: "The Matrix Reloaded"});
CREATE (:__mg_vertex__:`Movie`:`Sequel` {__mg_id__: 2, `year`: 2003, `movieId`: "tt0242653", `title`: "The Matrix Revolutions"});
CREATE (:__mg_vertex__:`Movie` {__mg_id__: 0, `year`: 1999, `title`: "The Matrix", `movieId`: "tt0133093"});
CREATE (:__mg_vertex__:`Movie`:`Sequel` {__mg_id__: 1, `year`: 2003, `title`: "The Matrix Reloaded", `movieId`: "tt0234215"});
CREATE (:__mg_vertex__:`Movie`:`Sequel` {__mg_id__: 2, `year`: 2003, `title`: "The Matrix Revolutions", `movieId`: "tt0242653"});
CREATE (:__mg_vertex__:`Actor` {__mg_id__: 3, `name`: "Keanu Reeves", `personId`: "keanu"});
CREATE (:__mg_vertex__:`Actor` {__mg_id__: 4, `name`: "Laurence Fishburne", `personId`: "laurence"});
CREATE (:__mg_vertex__:`Actor` {__mg_id__: 5, `name`: "Carrie-Anne Moss", `personId`: "carrieanne"});

View File

@ -1,19 +1,19 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `country`: "Croatia", `value`: 1});
CREATE (:__mg_vertex__ {__mg_id__: 1, `country`: "Germany", `value`: 2});
CREATE (:__mg_vertex__ {__mg_id__: 2, `country`: "United Kingdom", `value`: 3});
CREATE (:__mg_vertex__ {__mg_id__: 3, `country`: "France", `value`: 4});
CREATE (:__mg_vertex__ {__mg_id__: 4, `country`: "Italy", `value`: 5});
CREATE (:__mg_vertex__ {__mg_id__: 5, `country`: "Austria", `value`: 6});
CREATE (:__mg_vertex__ {__mg_id__: 6, `country`: "Hungary", `value`: 7});
CREATE (:__mg_vertex__ {__mg_id__: 7, `country`: "Romania", `value`: 8});
CREATE (:__mg_vertex__ {__mg_id__: 8, `country_id`: 4, `country`: "Bulgaria", `value`: 9});
CREATE (:__mg_vertex__ {__mg_id__: 9, `country_id`: 5, `country`: "Spain", `value`: 10});
CREATE (:__mg_vertex__ {__mg_id__: 10, `country_id`: 6, `country`: "Latvia", `value`: 11});
CREATE (:__mg_vertex__ {__mg_id__: 11, `country_id`: 7, `country`: "Russia", `value`: 12});
CREATE (:__mg_vertex__ {__mg_id__: 12, `country_id`: 8, `country`: "Poland", `value`: 13});
CREATE (:__mg_vertex__ {__mg_id__: 13, `country_id`: 9, `country`: "Czech Republic", `value`: 14});
CREATE (:__mg_vertex__ {__mg_id__: 14, `country_id`: 10, `country`: "Moldova", `value`: 15});
CREATE (:__mg_vertex__ {__mg_id__: 0, `value`: 1, `country`: "Croatia"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `value`: 2, `country`: "Germany"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `value`: 3, `country`: "United Kingdom"});
CREATE (:__mg_vertex__ {__mg_id__: 3, `value`: 4, `country`: "France"});
CREATE (:__mg_vertex__ {__mg_id__: 4, `value`: 5, `country`: "Italy"});
CREATE (:__mg_vertex__ {__mg_id__: 5, `value`: 6, `country`: "Austria"});
CREATE (:__mg_vertex__ {__mg_id__: 6, `value`: 7, `country`: "Hungary"});
CREATE (:__mg_vertex__ {__mg_id__: 7, `value`: 8, `country`: "Romania"});
CREATE (:__mg_vertex__ {__mg_id__: 8, `country_id`: 4, `value`: 9, `country`: "Bulgaria"});
CREATE (:__mg_vertex__ {__mg_id__: 9, `country_id`: 5, `value`: 10, `country`: "Spain"});
CREATE (:__mg_vertex__ {__mg_id__: 10, `country_id`: 6, `value`: 11, `country`: "Latvia"});
CREATE (:__mg_vertex__ {__mg_id__: 11, `country_id`: 7, `value`: 12, `country`: "Russia"});
CREATE (:__mg_vertex__ {__mg_id__: 12, `country_id`: 8, `value`: 13, `country`: "Poland"});
CREATE (:__mg_vertex__ {__mg_id__: 13, `country_id`: 9, `value`: 14, `country`: "Czech Republic"});
CREATE (:__mg_vertex__ {__mg_id__: 14, `country_id`: 10, `value`: 15, `country`: "Moldova"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 5 AND v.__mg_id__ = 6 CREATE (u)-[:`NEIGHBOUR`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 7 AND v.__mg_id__ = 6 CREATE (u)-[:`NEIGHBOUR`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 9 AND v.__mg_id__ = 11 CREATE (u)-[:`NOT_NEIGHBOUR`]->(v);

View File

@ -1,14 +1,14 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__ {__mg_id__: 0, `value`: 6, `country`: "Austria"});
CREATE (:__mg_vertex__ {__mg_id__: 1, `value`: 7, `country`: "Hungary"});
CREATE (:__mg_vertex__ {__mg_id__: 2, `value`: 8, `country`: "Romania"});
CREATE (:__mg_vertex__ {__mg_id__: 3, `value`: 9, `country`: "Bulgaria"});
CREATE (:__mg_vertex__ {__mg_id__: 4, `value`: 10, `country`: "Spain"});
CREATE (:__mg_vertex__ {__mg_id__: 5, `value`: 11, `country`: "Latvia"});
CREATE (:__mg_vertex__ {__mg_id__: 6, `value`: 12, `country`: "Russia"});
CREATE (:__mg_vertex__ {__mg_id__: 7, `value`: 13, `country`: "Poland"});
CREATE (:__mg_vertex__ {__mg_id__: 8, `value`: 14, `country`: "Czech Republic"});
CREATE (:__mg_vertex__ {__mg_id__: 9, `value`: 15, `country`: "Moldova"});
CREATE (:__mg_vertex__ {__mg_id__: 0, `country`: "Austria", `value`: 6});
CREATE (:__mg_vertex__ {__mg_id__: 1, `country`: "Hungary", `value`: 7});
CREATE (:__mg_vertex__ {__mg_id__: 2, `country`: "Romania", `value`: 8});
CREATE (:__mg_vertex__ {__mg_id__: 3, `country`: "Bulgaria", `value`: 9});
CREATE (:__mg_vertex__ {__mg_id__: 4, `country`: "Spain", `value`: 10});
CREATE (:__mg_vertex__ {__mg_id__: 5, `country`: "Latvia", `value`: 11});
CREATE (:__mg_vertex__ {__mg_id__: 6, `country`: "Russia", `value`: 12});
CREATE (:__mg_vertex__ {__mg_id__: 7, `country`: "Poland", `value`: 13});
CREATE (:__mg_vertex__ {__mg_id__: 8, `country`: "Czech Republic", `value`: 14});
CREATE (:__mg_vertex__ {__mg_id__: 9, `country`: "Moldova", `value`: 15});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 1 CREATE (u)-[:`NEIGHBOUR` {`value`: "test"}]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 2 AND v.__mg_id__ = 1 CREATE (u)-[:`NEIGHBOUR` {`value`: "hello"}]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 4 AND v.__mg_id__ = 6 CREATE (u)-[:`NOT_NEIGHBOUR` {`value`: "world"}]->(v);

View File

@ -1,9 +1,9 @@
CREATE INDEX ON :__mg_vertex__(__mg_id__);
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `id`: "0", `country`: "Croatia", `browser`: "Chrome", `content`: "yes"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `id`: "1", `country`: "United Kingdom", `browser`: "Chrome", `content`: "thanks"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `id`: "2", `country`: "Germany", `content`: "LOL"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `id`: "3", `country`: "France", `browser`: "Firefox", `content`: "I see"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `id`: "4", `country`: "Italy", `browser`: "Internet Explorer", `content`: "fine"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 0, `content`: "yes", `browser`: "Chrome", `country`: "Croatia", `id`: "0"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 1, `content`: "thanks", `browser`: "Chrome", `country`: "United Kingdom", `id`: "1"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 2, `content`: "LOL", `country`: "Germany", `id`: "2"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 3, `content`: "I see", `browser`: "Firefox", `country`: "France", `id`: "3"});
CREATE (:__mg_vertex__:`Message`:`Comment` {__mg_id__: 4, `content`: "fine", `browser`: "Internet Explorer", `country`: "Italy", `id`: "4"});
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 1 CREATE (u)-[:`TYPE`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 0 AND v.__mg_id__ = 2 CREATE (u)-[:`TYPE`]->(v);
MATCH (u:__mg_vertex__), (v:__mg_vertex__) WHERE u.__mg_id__ = 1 AND v.__mg_id__ = 2 CREATE (u)-[:`TYPE`]->(v);

View File

@ -695,8 +695,8 @@ TEST(PropertyValue, Equal) {
storage::PropertyValue(123), storage::PropertyValue(123.5),
storage::PropertyValue("nandare"), storage::PropertyValue(vec),
storage::PropertyValue(map)};
for (const auto item1 : data) {
for (const auto item2 : data) {
for (const auto &item1 : data) {
for (const auto &item2 : data) {
if (item1.type() == item2.type()) {
ASSERT_TRUE(item1 == item2);
} else {

View File

@ -1,14 +1,12 @@
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <csignal>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include <algorithm>
#include <chrono>
#include <csignal>
#include <filesystem>
#include <iostream>
#include <thread>
@ -1606,12 +1604,14 @@ TEST_P(DurabilityTest, WalTransactionOrdering) {
{
storage::Storage store(
{.items = {.properties_on_edges = GetParam()},
.durability = {.storage_directory = storage_directory,
.snapshot_wal_mode = storage::Config::Durability::
SnapshotWalMode::PERIODIC_SNAPSHOT_WITH_WAL,
.snapshot_interval = std::chrono::minutes(20),
.wal_file_flush_every_n_tx = kFlushWalEvery,
.wal_file_size_kibibytes = 100000}});
.durability = {
.storage_directory = storage_directory,
.snapshot_wal_mode = storage::Config::Durability::SnapshotWalMode::
PERIODIC_SNAPSHOT_WITH_WAL,
.snapshot_interval = std::chrono::minutes(20),
.wal_file_size_kibibytes = 100000,
.wal_file_flush_every_n_tx = kFlushWalEvery,
}});
auto acc1 = store.Access();
auto acc2 = store.Access();
@ -1841,12 +1841,14 @@ TEST_P(DurabilityTest, WalDeathResilience) {
{
storage::Storage store(
{.items = {.properties_on_edges = GetParam()},
.durability = {.storage_directory = storage_directory,
.snapshot_wal_mode = storage::Config::Durability::
SnapshotWalMode::PERIODIC_SNAPSHOT_WITH_WAL,
.snapshot_interval = std::chrono::minutes(20),
.wal_file_flush_every_n_tx = kFlushWalEvery,
.recover_on_startup = true}});
.durability = {
.storage_directory = storage_directory,
.recover_on_startup = true,
.snapshot_wal_mode = storage::Config::Durability::SnapshotWalMode::
PERIODIC_SNAPSHOT_WITH_WAL,
.snapshot_interval = std::chrono::minutes(20),
.wal_file_flush_every_n_tx = kFlushWalEvery,
}});
{
auto acc = store.Access();
auto iterable = acc.Vertices(storage::View::OLD);

View File

@ -1,2 +0,0 @@
.cppcheck_errors
generated/*

View File

@ -1,10 +1,10 @@
#!/usr/bin/env python3
import argparse
import json
import os
import sys
from collections import defaultdict
def lines2phabricator(filename, lines):
ret = ""
numlines = 0
@ -12,11 +12,15 @@ def lines2phabricator(filename, lines):
for row in f:
numlines += 1
for i in range(1, numlines + 1):
if not i in lines: ret += "N"
elif lines[i] == 0: ret += "U"
else: ret += "C"
if i not in lines:
ret += "N"
elif lines[i] == 0:
ret += "U"
else:
ret += "C"
return ret
parser = argparse.ArgumentParser(description='Parse llvm-cov export data.')
parser.add_argument('input', help='input file')
parser.add_argument('coverage', help='coverage output file')
@ -35,15 +39,18 @@ for export in data["data"]:
for cfile in export["files"]:
for segment in cfile["segments"]:
filename = cfile["filename"]
if not filename in args.files: continue
line, col, count, has_count, is_region_entry = segment
if filename not in args.files:
continue
line, col, count, has_count, is_region_entry, is_gap_region = \
segment
sources[filename][line] += count
for function in export["functions"]:
for region in function["regions"]:
line_start, column_start, line_end, column_end, execution_count, \
file_id, expanded_file_id, kind = region
file_id, expanded_file_id, kind = region
filename = function["filenames"][file_id]
if filename not in args.files: continue
if filename not in args.files:
continue
for i in range(line_start, line_end + 1):
sources[filename][i] += execution_count
for total, values in export["totals"].items():
@ -58,8 +65,8 @@ for filename, lines in sources.items():
with open(args.coverage, "w") as f:
json.dump(coverage, f)
summary = """==== Code coverage: ====
summary = """<!DOCTYPE html>
<h1>Code coverage</h1>
<table>
<tr><th>Coverage</th><th>Total</th></tr>
"""