Merge branch 'master' into project-pineapples
This commit is contained in:
commit
21870a0e7e
@ -61,7 +61,9 @@ Checks: '*,
|
||||
-readability-magic-numbers,
|
||||
-readability-named-parameter,
|
||||
-misc-no-recursion,
|
||||
-concurrency-mt-unsafe'
|
||||
-concurrency-mt-unsafe,
|
||||
-bugprone-easily-swappable-parameters'
|
||||
|
||||
WarningsAsErrors: ''
|
||||
HeaderFilterRegex: 'src/.*'
|
||||
AnalyzeTemporaryDtors: false
|
||||
|
76
.github/workflows/package_all.yaml
vendored
76
.github/workflows/package_all.yaml
vendored
@ -6,11 +6,11 @@ on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
centos-7:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -22,29 +22,29 @@ jobs:
|
||||
name: centos-7
|
||||
path: build/output/centos-7/memgraph*.rpm
|
||||
|
||||
centos-8:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
centos-9:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package centos-8
|
||||
./release/package/run.sh package centos-9
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: centos-8
|
||||
path: build/output/centos-8/memgraph*.rpm
|
||||
name: centos-9
|
||||
path: build/output/centos-9/memgraph*.rpm
|
||||
|
||||
debian-10:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -57,11 +57,11 @@ jobs:
|
||||
path: build/output/debian-10/memgraph*.deb
|
||||
|
||||
debian-11:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -74,11 +74,11 @@ jobs:
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
docker:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -93,11 +93,11 @@ jobs:
|
||||
path: build/output/docker/memgraph*.tar.gz
|
||||
|
||||
ubuntu-1804:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -110,11 +110,11 @@ jobs:
|
||||
path: build/output/ubuntu-18.04/memgraph*.deb
|
||||
|
||||
ubuntu-2004:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -126,12 +126,29 @@ jobs:
|
||||
name: ubuntu-2004
|
||||
path: build/output/ubuntu-20.04/memgraph*.deb
|
||||
|
||||
debian-11-platform:
|
||||
runs-on: [self-hosted, DockerMgBuild]
|
||||
ubuntu-2204:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ubuntu-2204
|
||||
path: build/output/ubuntu-22.04/memgraph*.deb
|
||||
|
||||
debian-11-platform:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
@ -142,3 +159,20 @@ jobs:
|
||||
with:
|
||||
name: debian-11-platform
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
debian-11-arm:
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11-arm
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: debian-11
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
@ -1,161 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel libipt libipt-devel libbabeltrace-devel xz-devel python36-devel texinfo # for gdb
|
||||
libcurl-devel # for cmake
|
||||
curl # snappy
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
patch
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat libipt libbabeltrace xz-libs python36 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkgconf-pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python36-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python36 python3-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
|
||||
#
|
||||
# IMPORTANT: python3-yaml does NOT exist on CentOS
|
||||
# Install it manually using `pip3 install PyYAML`
|
||||
#
|
||||
PyYAML # Package name here does not correspond to the yum package!
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which mono-complete dotnet-sdk-3.1 nodejs golang zip unzip java-11-openjdk-devel # for driver tests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == "PyYAML" ]; then
|
||||
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
dnf install -y epel-release
|
||||
dnf install -y 'dnf-command(config-manager)'
|
||||
dnf config-manager --set-enabled powertools # Required to install texinfo.
|
||||
dnf update -y
|
||||
dnf install -y wget git python36 python3-pip
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == libipt ]; then
|
||||
if ! dnf list installed libipt >/dev/null 2>/dev/null; then
|
||||
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt-devel ]; then
|
||||
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
# Install GDB dependencies not present in the standard repos.
|
||||
# https://bugs.centos.org/view.php?id=17068
|
||||
# https://centos.pkgs.org
|
||||
# Since 2020, there is Babeltrace2 (https://babeltrace.org). Not used
|
||||
# within GDB yet (an assumption).
|
||||
# http://mirror.centos.org/centos/8/PowerTools/x86_64/os/Packages/libbabeltrace-devel-1.5.4-3.el8.x86_64.rpm not working
|
||||
if [ "$pkg" == libbabeltrace-devel ]; then
|
||||
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://rpmfind.net/linux/centos/8-stream/PowerTools/x86_64/os/Packages/libbabeltrace-devel-1.5.4-3.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == sbcl ]; then
|
||||
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
|
||||
fi
|
||||
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
|
||||
fi
|
||||
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dnf list installed dotnet-sdk-3.1 >/dev/null 2>/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm -O packages-microsoft-prod.rpm
|
||||
rpm -Uvh https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm
|
||||
dnf update -y
|
||||
dnf install -y dotnet-sdk-3.1
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == PyYAML ]; then
|
||||
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
|
||||
pip3 install --user PyYAML
|
||||
else # Running using sudo.
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
dnf install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -6,14 +6,12 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc gcc-c++ make # generic build tools
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo # for gdb
|
||||
libcurl-devel # for cmake
|
||||
curl # snappy
|
||||
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel automake bison # for swig
|
||||
@ -21,6 +19,9 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
libipt libipt-devel # intel
|
||||
patch
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
@ -31,18 +32,19 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
perl # for openssl
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkgconf-pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python3 python3-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
|
||||
python3 python3-pip python3-virtualenv nmap-ncat # for qa, macro_benchmark and stress tests
|
||||
#
|
||||
# IMPORTANT: python3-yaml does NOT exist on CentOS
|
||||
# Install it manually using `pip3 install PyYAML`
|
||||
@ -73,12 +75,6 @@ check() {
|
||||
if [ "$pkg" == "python3-virtualenv" ]; then
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == sbcl ]; then
|
||||
if ! sbcl --version &> /dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
@ -105,13 +101,37 @@ install() {
|
||||
yum update -y
|
||||
yum install -y wget git python3 python3-pip
|
||||
for pkg in $1; do
|
||||
# Since there is no support for libipt-devel for CentOS 9 we install
|
||||
# Fedoras version of same libs, they are the same version but released
|
||||
# for different OS
|
||||
# TODO Update when libipt-devel releases for CentOS 9
|
||||
if [ "$pkg" == libipt ]; then
|
||||
if ! dnf list installed libipt >/dev/null 2>/dev/null; then
|
||||
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt-devel ]; then
|
||||
if ! dnf list installed libipt-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libbabeltrace-devel ]; then
|
||||
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y http://mirror.stream.centos.org/9-stream/CRB/x86_64/os/Packages/libbabeltrace-devel-1.5.8-10.el9.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == sbcl ]; then
|
||||
if ! sbcl --version &> /dev/null; then
|
||||
curl -s https://altushost-swe.dl.sourceforge.net/project/sbcl/sbcl/1.4.2/sbcl-1.4.2-arm64-linux-binary.tar.bz2 -o /tmp/sbcl-arm64.tar.bz2
|
||||
tar xvjf /tmp/sbcl-arm64.tar.bz2 -C /tmp
|
||||
pushd /tmp/sbcl-1.4.2-arm64-linux
|
||||
INSTALL_ROOT=/usr/local sh install.sh
|
||||
popd
|
||||
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
|
||||
fi
|
||||
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
|
||||
fi
|
||||
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.dyn.su/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
@ -125,9 +145,11 @@ install() {
|
||||
fi
|
||||
if [ "$pkg" == python3-virtualenv ]; then
|
||||
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
|
||||
pip3 install --user virtualenv
|
||||
pip3 install virtualenv
|
||||
pip3 install virtualenvwrapper
|
||||
else # Running using sudo.
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user virtualenv"
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenv"
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenvwrapper"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
|
@ -87,15 +87,6 @@ EOF
|
||||
fi
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
apt-get update
|
||||
apt-get install -y apt-transport-https dotnet-sdk-3.1
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ EOF
|
||||
fi
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
|
93
environment/os/ubuntu-22.04.sh
Executable file
93
environment/os/ubuntu-22.04.sh
Executable file
@ -0,0 +1,93 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc g++ build-essential make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg # used for archive signature verification
|
||||
tar gzip bzip2 xz-utils unzip # used for archive unpacking
|
||||
zlib1g-dev # zlib library used for all builds
|
||||
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
curl # snappy
|
||||
file
|
||||
git # for thrift
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
libssl-dev
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz-utils # used for archive unpacking
|
||||
zlib1g # zlib library used for all builds
|
||||
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
|
||||
libcurl4 # for cmake
|
||||
libreadline8 # for cmake and llvm
|
||||
libffi7 libxml2 # for llvm
|
||||
libssl-dev # for libevent
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
libpython3-dev python3-dev # for query modules
|
||||
libssl-dev
|
||||
libseccomp-dev
|
||||
netcat # tests are using nc to wait for memgraph
|
||||
python3 python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
|
||||
python3-yaml # for the configuration generator
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
dotnet-sdk-6.0 golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == dotnet-sdk-6.0 ]; then
|
||||
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
apt-get update
|
||||
apt-get install -y apt-transport-https dotnet-sdk-6.0
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -675,7 +675,7 @@ PROXYGEN_SHA256=5360a8ccdfb2f5a6c7b3eed331ec7ab0e2c792d579c6fff499c85c516c11fe14
|
||||
SNAPPY_SHA256=75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7
|
||||
SNAPPY_VERSION=1.1.9
|
||||
XZ_VERSION=5.2.5 # for LZMA
|
||||
ZLIB_VERSION=1.2.11
|
||||
ZLIB_VERSION=1.2.12
|
||||
ZSTD_VERSION=1.5.0
|
||||
WANGLE_SHA256=1002e9c32b6f4837f6a760016e3b3e22f3509880ef3eaad191c80dc92655f23f
|
||||
|
||||
@ -1178,12 +1178,21 @@ popd
|
||||
|
||||
# create toolchain archive
|
||||
if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
|
||||
DISTRO_FULL_NAME=$DISTRO
|
||||
if [ "$for_arm" = true ]; then
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64"
|
||||
DISTRO_FULL_NAME=${DISTRO}
|
||||
if [[ "${DISTRO}" == centos* ]]; then
|
||||
if [[ "$for_arm" = "true" ]]; then
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64"
|
||||
else
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-x86_64"
|
||||
fi
|
||||
else
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-x86_64"
|
||||
if [[ "$for_arm" = "true" ]]; then
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-arm64"
|
||||
else
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-amd64"
|
||||
fi
|
||||
fi
|
||||
|
||||
tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO_FULL_NAME.tar.gz -C /opt $NAME
|
||||
fi
|
||||
|
||||
|
4
init
4
init
@ -24,7 +24,7 @@ function setup_virtualenv () {
|
||||
fi
|
||||
|
||||
# create new virtualenv
|
||||
virtualenv -p python3 ve3 || exit 1
|
||||
python3 -m virtualenv -p python3 ve3 || exit 1
|
||||
source ve3/bin/activate
|
||||
pip --timeout 1000 install -r requirements.txt || exit 1
|
||||
deactivate
|
||||
@ -66,7 +66,7 @@ fi
|
||||
|
||||
DISTRO=$(operating_system)
|
||||
ARCHITECTURE=$(architecture)
|
||||
if [ "${ARCHITECTURE}" = "arm64" ]; then
|
||||
if [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then
|
||||
OS_SCRIPT=$DIR/environment/os/$DISTRO-arm.sh
|
||||
else
|
||||
OS_SCRIPT=$DIR/environment/os/$DISTRO.sh
|
||||
|
@ -67,7 +67,7 @@ It aims to deliver developers the speed, simplicity and scale required to build
|
||||
the next generation of applications driver by real-time connected data.")
|
||||
# Add `openssl` package to dependencies list. Used to generate SSL certificates.
|
||||
# We also depend on `python3` because we embed it in Memgraph.
|
||||
set(CPACK_RPM_PACKAGE_REQUIRES "openssl >= 1.0.0, curl >= 7.29.0, python3 >= 3.5.0, libstdc >= 6")
|
||||
set(CPACK_RPM_PACKAGE_REQUIRES "openssl >= 1.0.0, curl >= 7.29.0, python3 >= 3.5.0, libstdc >= 6, logrotate")
|
||||
|
||||
# All variables must be set before including.
|
||||
include(CPack)
|
||||
|
@ -1,31 +0,0 @@
|
||||
FROM dokken/centos-stream-9
|
||||
# NOTE: If you change the base distro update release/package as well.
|
||||
|
||||
ARG release
|
||||
|
||||
RUN yum update && yum install -y \
|
||||
openssl libcurl libseccomp python3 python3-pip \
|
||||
--nobest --allowerasing \
|
||||
&& rm -rf /tmp/* \
|
||||
&& yum clean all
|
||||
|
||||
RUN pip3 install networkx==2.4 numpy==1.21.4 scipy==1.7.3
|
||||
|
||||
COPY ${release} /
|
||||
|
||||
# Install memgraph package
|
||||
RUN rpm -i ${release}
|
||||
|
||||
# Memgraph listens for Bolt Protocol on this port by default.
|
||||
EXPOSE 7687
|
||||
# Snapshots and logging volumes
|
||||
VOLUME /var/log/memgraph
|
||||
VOLUME /var/lib/memgraph
|
||||
# Configuration volume
|
||||
VOLUME /etc/memgraph
|
||||
|
||||
USER memgraph
|
||||
WORKDIR /usr/lib/memgraph
|
||||
|
||||
ENTRYPOINT ["/usr/lib/memgraph/memgraph"]
|
||||
CMD [""]
|
@ -7,8 +7,8 @@ RUN yum -y update \
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-centos-7.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-centos-7.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-centos-7.tar.gz -C /opt
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-centos-7-x86_64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-centos-7-x86_64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-centos-7-x86_64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
||||
|
@ -1,14 +0,0 @@
|
||||
FROM centos:8
|
||||
|
||||
ARG TOOLCHAIN_VERSION
|
||||
|
||||
RUN dnf -y update \
|
||||
&& dnf install -y wget git
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-centos-8.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-centos-8.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-centos-8.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
14
release/package/centos-9/Dockerfile
Normal file
14
release/package/centos-9/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM quay.io/centos/centos:stream9
|
||||
|
||||
ARG TOOLCHAIN_VERSION
|
||||
|
||||
RUN yum -y update \
|
||||
&& yum install -y wget git
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-centos-9-x86_64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-centos-9-x86_64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-centos-9-x86_64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
@ -10,8 +10,8 @@ RUN apt update && apt install -y \
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-debian-10.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-debian-10.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-debian-10.tar.gz -C /opt
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-debian-10-amd64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-debian-10-amd64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-debian-10-amd64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
||||
|
17
release/package/debian-11-arm/Dockerfile
Normal file
17
release/package/debian-11-arm/Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM debian:11
|
||||
|
||||
ARG TOOLCHAIN_VERSION
|
||||
|
||||
# Stops tzdata interactive configuration.
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
ca-certificates wget git
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-debian-11-arm64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-debian-11-arm64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-debian-11-arm64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
@ -10,8 +10,8 @@ RUN apt update && apt install -y \
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-debian-11.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-debian-11.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-debian-11.tar.gz -C /opt
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-debian-11-amd64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-debian-11-amd64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-debian-11-amd64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
||||
|
@ -4,10 +4,10 @@ services:
|
||||
build:
|
||||
context: centos-7
|
||||
container_name: "mgbuild_centos-7"
|
||||
mgbuild_centos-8:
|
||||
mgbuild_centos-9:
|
||||
build:
|
||||
context: centos-8
|
||||
container_name: "mgbuild_centos-8"
|
||||
context: centos-9
|
||||
container_name: "mgbuild_centos-9"
|
||||
mgbuild_debian-10:
|
||||
build:
|
||||
context: debian-10
|
||||
@ -24,3 +24,7 @@ services:
|
||||
build:
|
||||
context: ubuntu-20.04
|
||||
container_name: "mgbuild_ubuntu-20.04"
|
||||
mgbuild_ubuntu-22.04:
|
||||
build:
|
||||
context: ubuntu-22.04
|
||||
container_name: "mgbuild_ubuntu-22.04"
|
||||
|
@ -3,7 +3,7 @@
|
||||
set -Eeuo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
SUPPORTED_OS=(centos-7 centos-8 debian-10 debian-11 ubuntu-18.04 ubuntu-20.04)
|
||||
SUPPORTED_OS=(centos-7 centos-9 debian-10 debian-11 ubuntu-18.04 ubuntu-20.04 ubuntu-22.04 debian-11-arm)
|
||||
PROJECT_ROOT="$SCRIPT_DIR/../.."
|
||||
TOOLCHAIN_VERSION="toolchain-v4"
|
||||
ACTIVATE_TOOLCHAIN="source /opt/${TOOLCHAIN_VERSION}/activate"
|
||||
@ -67,14 +67,18 @@ make_package () {
|
||||
# environment/os/{os}.sh does not come within the toolchain package. When
|
||||
# migrating to the next version of toolchain do that, and remove the
|
||||
# TOOLCHAIN_RUN_DEPS installation from here.
|
||||
echo "Installing dependencies..."
|
||||
echo "Installing dependencies using '/memgraph/environment/os/$os.sh' script..."
|
||||
docker exec "$build_container" bash -c "/memgraph/environment/os/$os.sh install TOOLCHAIN_RUN_DEPS"
|
||||
docker exec "$build_container" bash -c "/memgraph/environment/os/$os.sh install MEMGRAPH_BUILD_DEPS"
|
||||
|
||||
echo "Building targeted package..."
|
||||
docker exec "$build_container" bash -c "cd /memgraph && $ACTIVATE_TOOLCHAIN && ./init"
|
||||
docker exec "$build_container" bash -c "cd $container_build_dir && rm -rf ./*"
|
||||
docker exec "$build_container" bash -c "cd $container_build_dir && $ACTIVATE_TOOLCHAIN && cmake -DCMAKE_BUILD_TYPE=release $telemetry_id_override_flag .."
|
||||
if [[ "$os" == "debian-11-arm" ]]; then
|
||||
docker exec "$build_container" bash -c "cd $container_build_dir && $ACTIVATE_TOOLCHAIN && cmake -DCMAKE_BUILD_TYPE=release -DMG_ARCH="ARM64" $telemetry_id_override_flag .."
|
||||
else
|
||||
docker exec "$build_container" bash -c "cd $container_build_dir && $ACTIVATE_TOOLCHAIN && cmake -DCMAKE_BUILD_TYPE=release $telemetry_id_override_flag .."
|
||||
fi
|
||||
# ' is used instead of " because we need to run make within the allowed
|
||||
# container resources.
|
||||
# shellcheck disable=SC2016
|
||||
|
@ -10,8 +10,8 @@ RUN apt update && apt install -y \
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04.tar.gz -C /opt
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04-amd64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04-amd64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-ubuntu-18.04-amd64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
||||
|
@ -10,8 +10,8 @@ RUN apt update && apt install -y \
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04.tar.gz -C /opt
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04-amd64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04-amd64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-ubuntu-20.04-amd64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
||||
|
17
release/package/ubuntu-22.04/Dockerfile
Normal file
17
release/package/ubuntu-22.04/Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM ubuntu:22.04
|
||||
|
||||
ARG TOOLCHAIN_VERSION
|
||||
|
||||
# Stops tzdata interactive configuration.
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
ca-certificates wget git
|
||||
# Do NOT be smart here and clean the cache because the container is used in the
|
||||
# stateful context.
|
||||
|
||||
RUN wget -q https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/${TOOLCHAIN_VERSION}/${TOOLCHAIN_VERSION}-binaries-ubuntu-22.04-amd64.tar.gz \
|
||||
-O ${TOOLCHAIN_VERSION}-binaries-ubuntu-22.04-amd64.tar.gz \
|
||||
&& tar xzvf ${TOOLCHAIN_VERSION}-binaries-ubuntu-22.04-amd64.tar.gz -C /opt
|
||||
|
||||
ENTRYPOINT ["sleep", "infinity"]
|
@ -170,7 +170,7 @@ enum class CsvParserState {
|
||||
EXPECT_DELIMITER,
|
||||
};
|
||||
|
||||
bool SubstringStartsWith(const std::string_view &str, size_t pos, const std::string_view &what) {
|
||||
bool SubstringStartsWith(const std::string_view str, size_t pos, const std::string_view what) {
|
||||
return memgraph::utils::StartsWith(memgraph::utils::Substr(str, pos), what);
|
||||
}
|
||||
|
||||
|
@ -310,11 +310,11 @@ class DbAccessor final {
|
||||
return std::make_optional<VertexAccessor>(*value);
|
||||
}
|
||||
|
||||
storage::PropertyId NameToProperty(const std::string_view &name) { return accessor_->NameToProperty(name); }
|
||||
storage::PropertyId NameToProperty(const std::string_view name) { return accessor_->NameToProperty(name); }
|
||||
|
||||
storage::LabelId NameToLabel(const std::string_view &name) { return accessor_->NameToLabel(name); }
|
||||
storage::LabelId NameToLabel(const std::string_view name) { return accessor_->NameToLabel(name); }
|
||||
|
||||
storage::EdgeTypeId NameToEdgeType(const std::string_view &name) { return accessor_->NameToEdgeType(name); }
|
||||
storage::EdgeTypeId NameToEdgeType(const std::string_view name) { return accessor_->NameToEdgeType(name); }
|
||||
|
||||
const std::string &PropertyToName(storage::PropertyId prop) const { return accessor_->PropertyToName(prop); }
|
||||
|
||||
|
@ -46,7 +46,7 @@ const char *kInternalPropertyId = "__mg_id__";
|
||||
const char *kInternalVertexLabel = "__mg_vertex__";
|
||||
|
||||
/// A helper function that escapes label, edge type and property names.
|
||||
std::string EscapeName(const std::string_view &value) {
|
||||
std::string EscapeName(const std::string_view value) {
|
||||
std::string out;
|
||||
out.reserve(value.size() + 2);
|
||||
out.append(1, '`');
|
||||
|
@ -716,7 +716,7 @@ class ExpressionEvaluator : public ExpressionVisitor<TypedValue> {
|
||||
}
|
||||
|
||||
template <class TRecordAccessor>
|
||||
storage::PropertyValue GetProperty(const TRecordAccessor &record_accessor, const std::string_view &name) {
|
||||
storage::PropertyValue GetProperty(const TRecordAccessor &record_accessor, const std::string_view name) {
|
||||
auto maybe_prop = record_accessor.GetProperty(view_, dba_->NameToProperty(name));
|
||||
if (maybe_prop.HasError() && maybe_prop.GetError() == storage::Error::NONEXISTENT_OBJECT) {
|
||||
// This is a very nasty and temporary hack in order to make MERGE work.
|
||||
|
@ -231,6 +231,11 @@ class ReplQueryHandler final : public query::ReplicationQueryHandler {
|
||||
if (repl_info.timeout) {
|
||||
replica.timeout = *repl_info.timeout;
|
||||
}
|
||||
|
||||
replica.current_timestamp_of_replica = repl_info.timestamp_info.current_timestamp_of_replica;
|
||||
replica.current_number_of_timestamp_behind_master =
|
||||
repl_info.timestamp_info.current_number_of_timestamp_behind_master;
|
||||
|
||||
switch (repl_info.state) {
|
||||
case storage::replication::ReplicaState::READY:
|
||||
replica.state = ReplicationQuery::ReplicaState::READY;
|
||||
@ -245,6 +250,7 @@ class ReplQueryHandler final : public query::ReplicationQueryHandler {
|
||||
replica.state = ReplicationQuery::ReplicaState::INVALID;
|
||||
break;
|
||||
}
|
||||
|
||||
return replica;
|
||||
};
|
||||
|
||||
@ -512,7 +518,13 @@ Callback HandleReplicationQuery(ReplicationQuery *repl_query, const Parameters &
|
||||
}
|
||||
|
||||
case ReplicationQuery::Action::SHOW_REPLICAS: {
|
||||
callback.header = {"name", "socket_address", "sync_mode", "timeout", "state"};
|
||||
callback.header = {"name",
|
||||
"socket_address",
|
||||
"sync_mode",
|
||||
"timeout",
|
||||
"current_timestamp_of_replica",
|
||||
"number_of_timestamp_behind_master",
|
||||
"state"};
|
||||
callback.fn = [handler = ReplQueryHandler{interpreter_context->db}, replica_nfields = callback.header.size()] {
|
||||
const auto &replicas = handler.ShowReplicas();
|
||||
auto typed_replicas = std::vector<std::vector<TypedValue>>{};
|
||||
@ -539,6 +551,10 @@ Callback HandleReplicationQuery(ReplicationQuery *repl_query, const Parameters &
|
||||
typed_replica.emplace_back(TypedValue());
|
||||
}
|
||||
|
||||
typed_replica.emplace_back(TypedValue(static_cast<int64_t>(replica.current_timestamp_of_replica)));
|
||||
typed_replica.emplace_back(
|
||||
TypedValue(static_cast<int64_t>(replica.current_number_of_timestamp_behind_master)));
|
||||
|
||||
switch (replica.state) {
|
||||
case ReplicationQuery::ReplicaState::READY:
|
||||
typed_replica.emplace_back(TypedValue("ready"));
|
||||
|
@ -127,6 +127,8 @@ class ReplicationQueryHandler {
|
||||
std::string socket_address;
|
||||
ReplicationQuery::SyncMode sync_mode;
|
||||
std::optional<double> timeout;
|
||||
uint64_t current_timestamp_of_replica;
|
||||
uint64_t current_number_of_timestamp_behind_master;
|
||||
ReplicationQuery::ReplicaState state;
|
||||
};
|
||||
|
||||
|
@ -3697,7 +3697,7 @@ std::unordered_map<std::string, int64_t> CallProcedure::GetAndResetCounters() {
|
||||
|
||||
namespace {
|
||||
|
||||
void CallCustomProcedure(const std::string_view &fully_qualified_procedure_name, const mgp_proc &proc,
|
||||
void CallCustomProcedure(const std::string_view fully_qualified_procedure_name, const mgp_proc &proc,
|
||||
const std::vector<Expression *> &args, mgp_graph &graph, ExpressionEvaluator *evaluator,
|
||||
utils::MemoryResource *memory, std::optional<size_t> memory_limit, mgp_result *result) {
|
||||
static_assert(std::uses_allocator_v<mgp_value, utils::Allocator<mgp_value>>,
|
||||
|
@ -1054,7 +1054,7 @@ std::unique_ptr<Module> LoadModuleFromFile(const std::filesystem::path &path) {
|
||||
|
||||
} // namespace
|
||||
|
||||
bool ModuleRegistry::RegisterModule(const std::string_view &name, std::unique_ptr<Module> module) {
|
||||
bool ModuleRegistry::RegisterModule(const std::string_view name, std::unique_ptr<Module> module) {
|
||||
MG_ASSERT(!name.empty(), "Module name cannot be empty");
|
||||
MG_ASSERT(module, "Tried to register an invalid module");
|
||||
if (modules_.find(name) != modules_.end()) {
|
||||
@ -1163,7 +1163,7 @@ void ModuleRegistry::UnloadAndLoadModulesFromDirectories() {
|
||||
}
|
||||
}
|
||||
|
||||
ModulePtr ModuleRegistry::GetModuleNamed(const std::string_view &name) const {
|
||||
ModulePtr ModuleRegistry::GetModuleNamed(const std::string_view name) const {
|
||||
std::shared_lock<utils::RWLock> guard(lock_);
|
||||
auto found_it = modules_.find(name);
|
||||
if (found_it == modules_.end()) return nullptr;
|
||||
|
@ -77,7 +77,7 @@ class ModuleRegistry final {
|
||||
mutable utils::RWLock lock_{utils::RWLock::Priority::WRITE};
|
||||
std::unique_ptr<utils::MemoryResource> shared_{std::make_unique<utils::ResourceWithOutOfMemoryException>()};
|
||||
|
||||
bool RegisterModule(const std::string_view &name, std::unique_ptr<Module> module);
|
||||
bool RegisterModule(std::string_view name, std::unique_ptr<Module> module);
|
||||
|
||||
void DoUnloadAllModules();
|
||||
|
||||
@ -105,7 +105,7 @@ class ModuleRegistry final {
|
||||
///
|
||||
/// Return true if the module was loaded or reloaded successfully, false
|
||||
/// otherwise.
|
||||
bool LoadOrReloadModuleFromName(const std::string_view name);
|
||||
bool LoadOrReloadModuleFromName(std::string_view name);
|
||||
|
||||
/// Atomically unload all modules and then load all possible modules from the
|
||||
/// set directories.
|
||||
@ -115,7 +115,7 @@ class ModuleRegistry final {
|
||||
|
||||
/// Find a module with given name or return nullptr.
|
||||
/// Takes a read lock.
|
||||
ModulePtr GetModuleNamed(const std::string_view &name) const;
|
||||
ModulePtr GetModuleNamed(std::string_view name) const;
|
||||
|
||||
/// Remove all loaded (non-builtin) modules.
|
||||
/// Takes a write lock.
|
||||
@ -175,7 +175,7 @@ extern ModuleRegistry gModuleRegistry;
|
||||
/// inside this function. ModulePtr must be kept alive to make sure it won't be
|
||||
/// unloaded.
|
||||
std::optional<std::pair<procedure::ModulePtr, const mgp_proc *>> FindProcedure(
|
||||
const ModuleRegistry &module_registry, const std::string_view fully_qualified_procedure_name,
|
||||
const ModuleRegistry &module_registry, std::string_view fully_qualified_procedure_name,
|
||||
utils::MemoryResource *memory);
|
||||
|
||||
/// Return the ModulePtr and `mgp_trans *` of the found transformation after resolving
|
||||
@ -183,7 +183,7 @@ std::optional<std::pair<procedure::ModulePtr, const mgp_proc *>> FindProcedure(
|
||||
/// inside this function. ModulePtr must be kept alive to make sure it won't be
|
||||
/// unloaded.
|
||||
std::optional<std::pair<procedure::ModulePtr, const mgp_trans *>> FindTransformation(
|
||||
const ModuleRegistry &module_registry, const std::string_view fully_qualified_transformation_name,
|
||||
const ModuleRegistry &module_registry, std::string_view fully_qualified_transformation_name,
|
||||
utils::MemoryResource *memory);
|
||||
|
||||
/// Return the ModulePtr and `mgp_func *` of the found function after resolving
|
||||
@ -191,7 +191,7 @@ std::optional<std::pair<procedure::ModulePtr, const mgp_trans *>> FindTransforma
|
||||
/// std::nullopt is returned. `memory` is used for temporary allocations
|
||||
/// inside this function. ModulePtr must be kept alive to make sure it won't be unloaded.
|
||||
std::optional<std::pair<procedure::ModulePtr, const mgp_func *>> FindFunction(
|
||||
const ModuleRegistry &module_registry, const std::string_view fully_qualified_function_name,
|
||||
const ModuleRegistry &module_registry, std::string_view fully_qualified_function_name,
|
||||
utils::MemoryResource *memory);
|
||||
|
||||
template <typename T>
|
||||
|
@ -407,7 +407,7 @@ DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(int, Int, int_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(bool, Bool, bool_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(int64_t, Int, int_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(double, Double, double_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(const std::string_view &, String, string_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(const std::string_view, String, string_v)
|
||||
DEFINE_TYPED_VALUE_COPY_ASSIGNMENT(const TypedValue::TVector &, List, list_v)
|
||||
|
||||
TypedValue &TypedValue::operator=(const std::vector<TypedValue> &other) {
|
||||
|
@ -185,7 +185,7 @@ class TypedValue {
|
||||
new (&string_v) TString(value, memory_);
|
||||
}
|
||||
|
||||
explicit TypedValue(const std::string_view &value, utils::MemoryResource *memory = utils::NewDeleteResource())
|
||||
explicit TypedValue(const std::string_view value, utils::MemoryResource *memory = utils::NewDeleteResource())
|
||||
: memory_(memory), type_(Type::String) {
|
||||
new (&string_v) TString(value, memory_);
|
||||
}
|
||||
@ -420,7 +420,7 @@ class TypedValue {
|
||||
TypedValue &operator=(bool);
|
||||
TypedValue &operator=(int64_t);
|
||||
TypedValue &operator=(double);
|
||||
TypedValue &operator=(const std::string_view &);
|
||||
TypedValue &operator=(std::string_view);
|
||||
TypedValue &operator=(const TVector &);
|
||||
TypedValue &operator=(const std::vector<TypedValue> &);
|
||||
TypedValue &operator=(const TMap &);
|
||||
|
@ -166,7 +166,7 @@ inline void Save(const char *obj, Builder *builder) {
|
||||
builder->Save(reinterpret_cast<const uint8_t *>(obj), size);
|
||||
}
|
||||
|
||||
inline void Save(const std::string_view &obj, Builder *builder) {
|
||||
inline void Save(const std::string_view obj, Builder *builder) {
|
||||
uint64_t size = obj.size();
|
||||
Save(size, builder);
|
||||
builder->Save(reinterpret_cast<const uint8_t *>(obj.data()), size);
|
||||
|
@ -27,7 +27,7 @@ void WriteSize(Encoder *encoder, uint64_t size) {
|
||||
}
|
||||
} // namespace
|
||||
|
||||
void Encoder::Initialize(const std::filesystem::path &path, const std::string_view &magic, uint64_t version) {
|
||||
void Encoder::Initialize(const std::filesystem::path &path, const std::string_view magic, uint64_t version) {
|
||||
file_.Open(path, utils::OutputFile::Mode::OVERWRITE_EXISTING);
|
||||
Write(reinterpret_cast<const uint8_t *>(magic.data()), magic.size());
|
||||
auto version_encoded = utils::HostToLittleEndian(version);
|
||||
@ -73,7 +73,7 @@ void Encoder::WriteDouble(double value) {
|
||||
Write(reinterpret_cast<const uint8_t *>(&value_uint), sizeof(value_uint));
|
||||
}
|
||||
|
||||
void Encoder::WriteString(const std::string_view &value) {
|
||||
void Encoder::WriteString(const std::string_view value) {
|
||||
WriteMarker(Marker::TYPE_STRING);
|
||||
WriteSize(this, value.size());
|
||||
Write(reinterpret_cast<const uint8_t *>(value.data()), value.size());
|
||||
|
@ -34,14 +34,14 @@ class BaseEncoder {
|
||||
virtual void WriteBool(bool value) = 0;
|
||||
virtual void WriteUint(uint64_t value) = 0;
|
||||
virtual void WriteDouble(double value) = 0;
|
||||
virtual void WriteString(const std::string_view &value) = 0;
|
||||
virtual void WriteString(std::string_view value) = 0;
|
||||
virtual void WritePropertyValue(const PropertyValue &value) = 0;
|
||||
};
|
||||
|
||||
/// Encoder that is used to generate a snapshot/WAL.
|
||||
class Encoder final : public BaseEncoder {
|
||||
public:
|
||||
void Initialize(const std::filesystem::path &path, const std::string_view &magic, uint64_t version);
|
||||
void Initialize(const std::filesystem::path &path, std::string_view magic, uint64_t version);
|
||||
|
||||
void OpenExisting(const std::filesystem::path &path);
|
||||
|
||||
@ -54,7 +54,7 @@ class Encoder final : public BaseEncoder {
|
||||
void WriteBool(bool value) override;
|
||||
void WriteUint(uint64_t value) override;
|
||||
void WriteDouble(double value) override;
|
||||
void WriteString(const std::string_view &value) override;
|
||||
void WriteString(std::string_view value) override;
|
||||
void WritePropertyValue(const PropertyValue &value) override;
|
||||
|
||||
uint64_t GetPosition();
|
||||
|
@ -29,8 +29,8 @@ class NameIdMapper final {
|
||||
bool operator<(const MapNameToId &other) { return name < other.name; }
|
||||
bool operator==(const MapNameToId &other) { return name == other.name; }
|
||||
|
||||
bool operator<(const std::string_view &other) { return name < other; }
|
||||
bool operator==(const std::string_view &other) { return name == other; }
|
||||
bool operator<(const std::string_view other) const { return name < other; }
|
||||
bool operator==(const std::string_view other) const { return name == other; }
|
||||
};
|
||||
|
||||
struct MapIdToName {
|
||||
@ -46,7 +46,7 @@ class NameIdMapper final {
|
||||
|
||||
public:
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
uint64_t NameToId(const std::string_view &name) {
|
||||
uint64_t NameToId(const std::string_view name) {
|
||||
auto name_to_id_acc = name_to_id_.access();
|
||||
auto found = name_to_id_acc.find(name);
|
||||
uint64_t id;
|
||||
|
@ -537,6 +537,33 @@ std::vector<Storage::ReplicationClient::RecoveryStep> Storage::ReplicationClient
|
||||
return recovery_steps;
|
||||
}
|
||||
|
||||
Storage::TimestampInfo Storage::ReplicationClient::GetTimestampInfo() {
|
||||
Storage::TimestampInfo info;
|
||||
info.current_timestamp_of_replica = 0;
|
||||
info.current_number_of_timestamp_behind_master = 0;
|
||||
|
||||
try {
|
||||
auto stream{rpc_client_->Stream<replication::TimestampRpc>()};
|
||||
const auto response = stream.AwaitResponse();
|
||||
const auto is_success = response.success;
|
||||
if (!is_success) {
|
||||
replica_state_.store(replication::ReplicaState::INVALID);
|
||||
HandleRpcFailure();
|
||||
}
|
||||
auto main_time_stamp = storage_->last_commit_timestamp_.load();
|
||||
info.current_timestamp_of_replica = response.current_commit_timestamp;
|
||||
info.current_number_of_timestamp_behind_master = response.current_commit_timestamp - main_time_stamp;
|
||||
} catch (const rpc::RpcFailedException &) {
|
||||
{
|
||||
std::unique_lock client_guard(client_lock_);
|
||||
replica_state_.store(replication::ReplicaState::INVALID);
|
||||
}
|
||||
HandleRpcFailure(); // mutex already unlocked, if the new enqueued task dispatches immediately it probably won't block
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
////// TimeoutDispatcher //////
|
||||
void Storage::ReplicationClient::TimeoutDispatcher::WaitForTaskToFinish() {
|
||||
// Wait for the previous timeout task to finish
|
||||
|
@ -124,6 +124,8 @@ class Storage::ReplicationClient {
|
||||
|
||||
const auto &Endpoint() const { return rpc_client_->Endpoint(); }
|
||||
|
||||
Storage::TimestampInfo GetTimestampInfo();
|
||||
|
||||
private:
|
||||
void FinalizeTransactionReplicationInternal();
|
||||
|
||||
|
@ -80,6 +80,10 @@ Storage::ReplicationServer::ReplicationServer(Storage *storage, io::network::End
|
||||
spdlog::debug("Received CurrentWalRpc");
|
||||
this->CurrentWalHandler(req_reader, res_builder);
|
||||
});
|
||||
rpc_server_->Register<replication::TimestampRpc>([this](auto *req_reader, auto *res_builder) {
|
||||
spdlog::debug("Received TimestampRpc");
|
||||
this->TimestampHandler(req_reader, res_builder);
|
||||
});
|
||||
rpc_server_->Start();
|
||||
}
|
||||
|
||||
@ -284,6 +288,14 @@ void Storage::ReplicationServer::LoadWal(replication::Decoder *decoder) {
|
||||
}
|
||||
}
|
||||
|
||||
void Storage::ReplicationServer::TimestampHandler(slk::Reader *req_reader, slk::Builder *res_builder) {
|
||||
replication::TimestampReq req;
|
||||
slk::Load(&req, req_reader);
|
||||
|
||||
replication::TimestampRes res{true, storage_->last_commit_timestamp_.load()};
|
||||
slk::Save(res, res_builder);
|
||||
}
|
||||
|
||||
Storage::ReplicationServer::~ReplicationServer() {
|
||||
if (rpc_server_) {
|
||||
rpc_server_->Shutdown();
|
||||
|
@ -34,6 +34,7 @@ class Storage::ReplicationServer {
|
||||
void SnapshotHandler(slk::Reader *req_reader, slk::Builder *res_builder);
|
||||
void WalFilesHandler(slk::Reader *req_reader, slk::Builder *res_builder);
|
||||
void CurrentWalHandler(slk::Reader *req_reader, slk::Builder *res_builder);
|
||||
void TimestampHandler(slk::Reader *req_reader, slk::Builder *res_builder);
|
||||
|
||||
void LoadWal(replication::Decoder *decoder);
|
||||
uint64_t ReadAndApplyDelta(durability::BaseDecoder *decoder);
|
||||
|
@ -67,6 +67,12 @@ cpp<#
|
||||
((success :bool)
|
||||
(current-commit-timestamp :uint64_t))))
|
||||
|
||||
(lcp:define-rpc timestamp
|
||||
(:request ())
|
||||
(:response
|
||||
((success :bool)
|
||||
(current-commit-timestamp :uint64_t))))
|
||||
|
||||
(lcp:pop-namespace) ;; replication
|
||||
(lcp:pop-namespace) ;; storage
|
||||
(lcp:pop-namespace) ;; memgraph
|
||||
|
@ -30,7 +30,7 @@ void Encoder::WriteDouble(double value) {
|
||||
slk::Save(value, builder_);
|
||||
}
|
||||
|
||||
void Encoder::WriteString(const std::string_view &value) {
|
||||
void Encoder::WriteString(const std::string_view value) {
|
||||
WriteMarker(durability::Marker::TYPE_STRING);
|
||||
slk::Save(value, builder_);
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ class Encoder final : public durability::BaseEncoder {
|
||||
|
||||
void WriteDouble(double value) override;
|
||||
|
||||
void WriteString(const std::string_view &value) override;
|
||||
void WriteString(std::string_view value) override;
|
||||
|
||||
void WritePropertyValue(const PropertyValue &value) override;
|
||||
|
||||
|
@ -812,11 +812,11 @@ const std::string &Storage::Accessor::EdgeTypeToName(EdgeTypeId edge_type) const
|
||||
return storage_->EdgeTypeToName(edge_type);
|
||||
}
|
||||
|
||||
LabelId Storage::Accessor::NameToLabel(const std::string_view &name) { return storage_->NameToLabel(name); }
|
||||
LabelId Storage::Accessor::NameToLabel(const std::string_view name) { return storage_->NameToLabel(name); }
|
||||
|
||||
PropertyId Storage::Accessor::NameToProperty(const std::string_view &name) { return storage_->NameToProperty(name); }
|
||||
PropertyId Storage::Accessor::NameToProperty(const std::string_view name) { return storage_->NameToProperty(name); }
|
||||
|
||||
EdgeTypeId Storage::Accessor::NameToEdgeType(const std::string_view &name) { return storage_->NameToEdgeType(name); }
|
||||
EdgeTypeId Storage::Accessor::NameToEdgeType(const std::string_view name) { return storage_->NameToEdgeType(name); }
|
||||
|
||||
void Storage::Accessor::AdvanceCommand() { ++transaction_.command_id; }
|
||||
|
||||
@ -1121,13 +1121,13 @@ const std::string &Storage::EdgeTypeToName(EdgeTypeId edge_type) const {
|
||||
return name_id_mapper_.IdToName(edge_type.AsUint());
|
||||
}
|
||||
|
||||
LabelId Storage::NameToLabel(const std::string_view &name) { return LabelId::FromUint(name_id_mapper_.NameToId(name)); }
|
||||
LabelId Storage::NameToLabel(const std::string_view name) { return LabelId::FromUint(name_id_mapper_.NameToId(name)); }
|
||||
|
||||
PropertyId Storage::NameToProperty(const std::string_view &name) {
|
||||
PropertyId Storage::NameToProperty(const std::string_view name) {
|
||||
return PropertyId::FromUint(name_id_mapper_.NameToId(name));
|
||||
}
|
||||
|
||||
EdgeTypeId Storage::NameToEdgeType(const std::string_view &name) {
|
||||
EdgeTypeId Storage::NameToEdgeType(const std::string_view name) {
|
||||
return EdgeTypeId::FromUint(name_id_mapper_.NameToId(name));
|
||||
}
|
||||
|
||||
@ -1954,7 +1954,8 @@ std::vector<Storage::ReplicaInfo> Storage::ReplicasInfo() {
|
||||
replica_info.reserve(clients.size());
|
||||
std::transform(clients.begin(), clients.end(), std::back_inserter(replica_info),
|
||||
[](const auto &client) -> ReplicaInfo {
|
||||
return {client->Name(), client->Mode(), client->Timeout(), client->Endpoint(), client->State()};
|
||||
return {client->Name(), client->Mode(), client->Timeout(),
|
||||
client->Endpoint(), client->State(), client->GetTimestampInfo()};
|
||||
});
|
||||
return replica_info;
|
||||
});
|
||||
|
@ -283,13 +283,13 @@ class Storage final {
|
||||
const std::string &EdgeTypeToName(EdgeTypeId edge_type) const;
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
LabelId NameToLabel(const std::string_view &name);
|
||||
LabelId NameToLabel(std::string_view name);
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
PropertyId NameToProperty(const std::string_view &name);
|
||||
PropertyId NameToProperty(std::string_view name);
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
EdgeTypeId NameToEdgeType(const std::string_view &name);
|
||||
EdgeTypeId NameToEdgeType(std::string_view name);
|
||||
|
||||
bool LabelIndexExists(LabelId label) const { return storage_->indices_.label_index.IndexExists(label); }
|
||||
|
||||
@ -343,13 +343,13 @@ class Storage final {
|
||||
const std::string &EdgeTypeToName(EdgeTypeId edge_type) const;
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
LabelId NameToLabel(const std::string_view &name);
|
||||
LabelId NameToLabel(std::string_view name);
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
PropertyId NameToProperty(const std::string_view &name);
|
||||
PropertyId NameToProperty(std::string_view name);
|
||||
|
||||
/// @throw std::bad_alloc if unable to insert a new mapping
|
||||
EdgeTypeId NameToEdgeType(const std::string_view &name);
|
||||
EdgeTypeId NameToEdgeType(std::string_view name);
|
||||
|
||||
/// @throw std::bad_alloc
|
||||
bool CreateIndex(LabelId label, std::optional<uint64_t> desired_commit_timestamp = {});
|
||||
@ -425,12 +425,18 @@ class Storage final {
|
||||
|
||||
ReplicationRole GetReplicationRole() const;
|
||||
|
||||
struct TimestampInfo {
|
||||
uint64_t current_timestamp_of_replica;
|
||||
uint64_t current_number_of_timestamp_behind_master;
|
||||
};
|
||||
|
||||
struct ReplicaInfo {
|
||||
std::string name;
|
||||
replication::ReplicationMode mode;
|
||||
std::optional<double> timeout;
|
||||
io::network::Endpoint endpoint;
|
||||
replication::ReplicaState state;
|
||||
TimestampInfo timestamp_info;
|
||||
};
|
||||
|
||||
std::vector<ReplicaInfo> ReplicasInfo();
|
||||
|
@ -370,7 +370,7 @@ void OutputFile::Write(const uint8_t *data, size_t size) {
|
||||
}
|
||||
|
||||
void OutputFile::Write(const char *data, size_t size) { Write(reinterpret_cast<const uint8_t *>(data), size); }
|
||||
void OutputFile::Write(const std::string_view &data) { Write(data.data(), data.size()); }
|
||||
void OutputFile::Write(const std::string_view data) { Write(data.data(), data.size()); }
|
||||
|
||||
size_t OutputFile::SeekFile(const Position position, const ssize_t offset) {
|
||||
int whence;
|
||||
|
@ -209,7 +209,7 @@ class OutputFile {
|
||||
/// the program.
|
||||
void Write(const uint8_t *data, size_t size);
|
||||
void Write(const char *data, size_t size);
|
||||
void Write(const std::string_view &data);
|
||||
void Write(std::string_view data);
|
||||
|
||||
/// This method gets the current absolute position in the file. On failure and
|
||||
/// misuse it crashes the program.
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
namespace memgraph::utils {
|
||||
|
||||
inline uint64_t Fnv(const std::string_view &s) {
|
||||
inline uint64_t Fnv(const std::string_view s) {
|
||||
// fnv1a is recommended so use it as the default implementation.
|
||||
uint64_t hash = 14695981039346656037UL;
|
||||
|
||||
|
@ -29,7 +29,7 @@
|
||||
namespace memgraph::utils {
|
||||
|
||||
/** Remove whitespace characters from the start of a string. */
|
||||
inline std::string_view LTrim(const std::string_view &s) {
|
||||
inline std::string_view LTrim(const std::string_view s) {
|
||||
size_t start = 0;
|
||||
while (start < s.size() && isspace(s[start])) {
|
||||
++start;
|
||||
@ -38,7 +38,7 @@ inline std::string_view LTrim(const std::string_view &s) {
|
||||
}
|
||||
|
||||
/** Remove characters found in `chars` from the start of a string. */
|
||||
inline std::string_view LTrim(const std::string_view &s, const std::string_view &chars) {
|
||||
inline std::string_view LTrim(const std::string_view s, const std::string_view chars) {
|
||||
size_t start = 0;
|
||||
while (start < s.size() && chars.find(s[start]) != std::string::npos) {
|
||||
++start;
|
||||
@ -47,7 +47,7 @@ inline std::string_view LTrim(const std::string_view &s, const std::string_view
|
||||
}
|
||||
|
||||
/** Remove whitespace characters from the end of a string. */
|
||||
inline std::string_view RTrim(const std::string_view &s) {
|
||||
inline std::string_view RTrim(const std::string_view s) {
|
||||
size_t count = s.size();
|
||||
while (count > static_cast<size_t>(0) && isspace(s[count - 1])) {
|
||||
--count;
|
||||
@ -56,7 +56,7 @@ inline std::string_view RTrim(const std::string_view &s) {
|
||||
}
|
||||
|
||||
/** Remove characters found in `chars` from the end of a string. */
|
||||
inline std::string_view RTrim(const std::string_view &s, const std::string_view &chars) {
|
||||
inline std::string_view RTrim(const std::string_view s, const std::string_view chars) {
|
||||
size_t count = s.size();
|
||||
while (count > static_cast<size_t>(0) && chars.find(s[count - 1]) != std::string::npos) {
|
||||
--count;
|
||||
@ -65,7 +65,7 @@ inline std::string_view RTrim(const std::string_view &s, const std::string_view
|
||||
}
|
||||
|
||||
/** Remove whitespace characters from the start and from the end of a string. */
|
||||
inline std::string_view Trim(const std::string_view &s) {
|
||||
inline std::string_view Trim(const std::string_view s) {
|
||||
size_t start = 0;
|
||||
size_t count = s.size();
|
||||
while (start < s.size() && isspace(s[start])) {
|
||||
@ -78,7 +78,7 @@ inline std::string_view Trim(const std::string_view &s) {
|
||||
}
|
||||
|
||||
/** Remove characters found in `chars` from the start and the end of `s`. */
|
||||
inline std::string_view Trim(const std::string_view &s, const std::string_view &chars) {
|
||||
inline std::string_view Trim(const std::string_view s, const std::string_view chars) {
|
||||
size_t start = 0;
|
||||
size_t count = s.size();
|
||||
while (start < s.size() && chars.find(s[start]) != std::string::npos) {
|
||||
@ -97,7 +97,7 @@ inline std::string_view Trim(const std::string_view &s, const std::string_view &
|
||||
*/
|
||||
template <class TAllocator>
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *ToLowerCase(
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view &s) {
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view s) {
|
||||
out->resize(s.size());
|
||||
std::transform(s.begin(), s.end(), out->begin(), [](char c) { return tolower(c); });
|
||||
return out;
|
||||
@ -107,7 +107,7 @@ std::basic_string<char, std::char_traits<char>, TAllocator> *ToLowerCase(
|
||||
* Lowercase all characters of a string.
|
||||
* Transformation is locale independent.
|
||||
*/
|
||||
inline std::string ToLowerCase(const std::string_view &s) {
|
||||
inline std::string ToLowerCase(const std::string_view s) {
|
||||
std::string res;
|
||||
ToLowerCase(&res, s);
|
||||
return res;
|
||||
@ -120,7 +120,7 @@ inline std::string ToLowerCase(const std::string_view &s) {
|
||||
*/
|
||||
template <class TAllocator>
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *ToUpperCase(
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view &s) {
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view s) {
|
||||
out->resize(s.size());
|
||||
std::transform(s.begin(), s.end(), out->begin(), [](char c) { return toupper(c); });
|
||||
return out;
|
||||
@ -130,7 +130,7 @@ std::basic_string<char, std::char_traits<char>, TAllocator> *ToUpperCase(
|
||||
* Uppercase all characters of a string and store the result in `out`.
|
||||
* Transformation is locale independent.
|
||||
*/
|
||||
inline std::string ToUpperCase(const std::string_view &s) {
|
||||
inline std::string ToUpperCase(const std::string_view s) {
|
||||
std::string res;
|
||||
ToUpperCase(&res, s);
|
||||
return res;
|
||||
@ -143,7 +143,7 @@ inline std::string ToUpperCase(const std::string_view &s) {
|
||||
template <class TCollection, class TAllocator>
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *Join(
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const TCollection &strings,
|
||||
const std::string_view &separator) {
|
||||
const std::string_view separator) {
|
||||
out->clear();
|
||||
if (strings.empty()) return out;
|
||||
int64_t total_size = 0;
|
||||
@ -163,7 +163,7 @@ std::basic_string<char, std::char_traits<char>, TAllocator> *Join(
|
||||
/**
|
||||
* Join the `strings` collection separated by a given separator.
|
||||
*/
|
||||
inline std::string Join(const std::vector<std::string> &strings, const std::string_view &separator) {
|
||||
inline std::string Join(const std::vector<std::string> &strings, const std::string_view separator) {
|
||||
std::string res;
|
||||
Join(&res, strings, separator);
|
||||
return res;
|
||||
@ -175,8 +175,8 @@ inline std::string Join(const std::vector<std::string> &strings, const std::stri
|
||||
*/
|
||||
template <class TAllocator>
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *Replace(
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view &src,
|
||||
const std::string_view &match, const std::string_view &replacement) {
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view src,
|
||||
const std::string_view match, const std::string_view replacement) {
|
||||
// TODO: This could be implemented much more efficiently.
|
||||
*out = src;
|
||||
for (size_t pos = out->find(match); pos != std::string::npos; pos = out->find(match, pos + replacement.size())) {
|
||||
@ -186,8 +186,8 @@ std::basic_string<char, std::char_traits<char>, TAllocator> *Replace(
|
||||
}
|
||||
|
||||
/** Replace all occurrences of `match` in `src` with `replacement`. */
|
||||
inline std::string Replace(const std::string_view &src, const std::string_view &match,
|
||||
const std::string_view &replacement) {
|
||||
inline std::string Replace(const std::string_view src, const std::string_view match,
|
||||
const std::string_view replacement) {
|
||||
std::string res;
|
||||
Replace(&res, src, match, replacement);
|
||||
return res;
|
||||
@ -200,8 +200,8 @@ inline std::string Replace(const std::string_view &src, const std::string_view &
|
||||
* @return pointer to `out`.
|
||||
*/
|
||||
template <class TString, class TAllocator>
|
||||
std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, const std::string_view &src,
|
||||
const std::string_view &delimiter, int splits = -1) {
|
||||
std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, const std::string_view src,
|
||||
const std::string_view delimiter, int splits = -1) {
|
||||
out->clear();
|
||||
if (src.empty()) return out;
|
||||
size_t index = 0;
|
||||
@ -220,7 +220,7 @@ std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, c
|
||||
* The vector will have at most `splits` + 1 elements. Negative value of
|
||||
* `splits` indicates to perform all possible splits.
|
||||
*/
|
||||
inline std::vector<std::string> Split(const std::string_view &src, const std::string_view &delimiter, int splits = -1) {
|
||||
inline std::vector<std::string> Split(const std::string_view src, const std::string_view delimiter, int splits = -1) {
|
||||
std::vector<std::string> res;
|
||||
Split(&res, src, delimiter, splits);
|
||||
return res;
|
||||
@ -234,7 +234,7 @@ inline std::vector<std::string> Split(const std::string_view &src, const std::st
|
||||
* @return pointer to `out`.
|
||||
*/
|
||||
template <class TString, class TAllocator>
|
||||
std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, const std::string_view &src) {
|
||||
std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, const std::string_view src) {
|
||||
out->clear();
|
||||
if (src.empty()) return out;
|
||||
// TODO: Investigate how much regex allocate and perhaps replace with custom
|
||||
@ -256,7 +256,7 @@ std::vector<TString, TAllocator> *Split(std::vector<TString, TAllocator> *out, c
|
||||
* Additionally, the result will not contain empty strings at the start or end
|
||||
* as if the string was trimmed before splitting.
|
||||
*/
|
||||
inline std::vector<std::string> Split(const std::string_view &src) {
|
||||
inline std::vector<std::string> Split(const std::string_view src) {
|
||||
std::vector<std::string> res;
|
||||
Split(&res, src);
|
||||
return res;
|
||||
@ -271,8 +271,8 @@ inline std::vector<std::string> Split(const std::string_view &src) {
|
||||
* @return pointer to `out`.
|
||||
*/
|
||||
template <class TString, class TAllocator>
|
||||
std::vector<TString, TAllocator> *RSplit(std::vector<TString, TAllocator> *out, const std::string_view &src,
|
||||
const std::string_view &delimiter, int splits = -1) {
|
||||
std::vector<TString, TAllocator> *RSplit(std::vector<TString, TAllocator> *out, const std::string_view src,
|
||||
const std::string_view delimiter, int splits = -1) {
|
||||
out->clear();
|
||||
if (src.empty()) return out;
|
||||
size_t index = src.size();
|
||||
@ -295,8 +295,7 @@ std::vector<TString, TAllocator> *RSplit(std::vector<TString, TAllocator> *out,
|
||||
* have at most `splits` + 1 elements. Negative value of `splits` indicates to
|
||||
* perform all possible splits.
|
||||
*/
|
||||
inline std::vector<std::string> RSplit(const std::string_view &src, const std::string_view &delimiter,
|
||||
int splits = -1) {
|
||||
inline std::vector<std::string> RSplit(const std::string_view src, const std::string_view delimiter, int splits = -1) {
|
||||
std::vector<std::string> res;
|
||||
RSplit(&res, src, delimiter, splits);
|
||||
return res;
|
||||
@ -309,7 +308,7 @@ inline std::vector<std::string> RSplit(const std::string_view &src, const std::s
|
||||
*
|
||||
* @throw BasicException if unable to parse the whole string.
|
||||
*/
|
||||
inline int64_t ParseInt(const std::string_view &s) {
|
||||
inline int64_t ParseInt(const std::string_view s) {
|
||||
// stol would be nicer but it uses current locale so we shouldn't use it.
|
||||
int64_t t = 0;
|
||||
// NOTE: Constructing std::istringstream will make a copy of the string, which
|
||||
@ -336,7 +335,7 @@ inline int64_t ParseInt(const std::string_view &s) {
|
||||
*
|
||||
* @throw BasicException if unable to parse the whole string.
|
||||
*/
|
||||
inline double ParseDouble(const std::string_view &s) {
|
||||
inline double ParseDouble(const std::string_view s) {
|
||||
// stod would be nicer but it uses current locale so we shouldn't use it.
|
||||
double t = 0.0;
|
||||
// NOTE: Constructing std::istringstream will make a copy of the string, which
|
||||
@ -357,17 +356,17 @@ inline double ParseDouble(const std::string_view &s) {
|
||||
}
|
||||
|
||||
/** Check if the given string `s` ends with the given `suffix`. */
|
||||
inline bool EndsWith(const std::string_view &s, const std::string_view &suffix) {
|
||||
inline bool EndsWith(const std::string_view s, const std::string_view suffix) {
|
||||
return s.size() >= suffix.size() && s.compare(s.size() - suffix.size(), std::string::npos, suffix) == 0;
|
||||
}
|
||||
|
||||
/** Check if the given string `s` starts with the given `prefix`. */
|
||||
inline bool StartsWith(const std::string_view &s, const std::string_view &prefix) {
|
||||
inline bool StartsWith(const std::string_view s, const std::string_view prefix) {
|
||||
return s.size() >= prefix.size() && s.compare(0, prefix.size(), prefix) == 0;
|
||||
}
|
||||
|
||||
/** Perform case-insensitive string equality test. */
|
||||
inline bool IEquals(const std::string_view &lhs, const std::string_view &rhs) {
|
||||
inline bool IEquals(const std::string_view lhs, const std::string_view rhs) {
|
||||
if (lhs.size() != rhs.size()) return false;
|
||||
for (size_t i = 0; i < lhs.size(); ++i) {
|
||||
if (tolower(lhs[i]) != tolower(rhs[i])) return false;
|
||||
@ -406,7 +405,7 @@ inline std::string RandomString(size_t length) {
|
||||
*/
|
||||
template <class TAllocator>
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *Escape(
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view &src) {
|
||||
std::basic_string<char, std::char_traits<char>, TAllocator> *out, const std::string_view src) {
|
||||
out->clear();
|
||||
out->reserve(src.size() + 2);
|
||||
out->append(1, '"');
|
||||
@ -433,7 +432,7 @@ std::basic_string<char, std::char_traits<char>, TAllocator> *Escape(
|
||||
}
|
||||
|
||||
/** Escape all whitespace and quotation characters in the given string. */
|
||||
inline std::string Escape(const std::string_view &src) {
|
||||
inline std::string Escape(const std::string_view src) {
|
||||
std::string res;
|
||||
Escape(&res, src);
|
||||
return res;
|
||||
@ -445,7 +444,7 @@ inline std::string Escape(const std::string_view &src) {
|
||||
* clamped to a valid interval. Therefore, this function never throws
|
||||
* std::out_of_range, unlike std::basic_string::substr.
|
||||
*/
|
||||
inline std::string_view Substr(const std::string_view &string, size_t pos = 0, size_t count = std::string::npos) {
|
||||
inline std::string_view Substr(const std::string_view string, size_t pos = 0, size_t count = std::string::npos) {
|
||||
if (pos >= string.size()) return std::string_view(string.data(), 0);
|
||||
auto len = std::min(string.size() - pos, count);
|
||||
return string.substr(pos, len);
|
||||
|
@ -12,6 +12,7 @@
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import time
|
||||
|
||||
from common import execute_and_fetch_all
|
||||
|
||||
@ -27,5 +28,84 @@ def test_show_replication_role(port, role, connection):
|
||||
assert data[0][0] == role
|
||||
|
||||
|
||||
def test_show_replicas(connection):
|
||||
cursor = connection(7687, "main").cursor()
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
|
||||
expected_column_names = {
|
||||
"name",
|
||||
"socket_address",
|
||||
"sync_mode",
|
||||
"timeout",
|
||||
"current_timestamp_of_replica",
|
||||
"number_of_timestamp_behind_master",
|
||||
"state",
|
||||
}
|
||||
actual_column_names = {x.name for x in cursor.description}
|
||||
assert expected_column_names == actual_column_names
|
||||
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 2.0, 0, 0, "ready"),
|
||||
("replica_2", "127.0.0.1:10002", "sync", 1.0, 0, 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 0, 0, "ready"),
|
||||
}
|
||||
assert expected_data == actual_data
|
||||
|
||||
|
||||
def test_show_replicas_while_inserting_data(connection):
|
||||
# Goal is to check the timestamp are correctly computed from the information we get from replicas.
|
||||
# 0/ Check original state of replicas.
|
||||
# 1/ Add some data on main.
|
||||
# 2/ Check state of replicas.
|
||||
# 3/ Execute a read only query.
|
||||
# 4/ Check that the states have not changed.
|
||||
|
||||
# 0/
|
||||
cursor = connection(7687, "main").cursor()
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
|
||||
expected_column_names = {
|
||||
"name",
|
||||
"socket_address",
|
||||
"sync_mode",
|
||||
"timeout",
|
||||
"current_timestamp_of_replica",
|
||||
"number_of_timestamp_behind_master",
|
||||
"state",
|
||||
}
|
||||
actual_column_names = {x.name for x in cursor.description}
|
||||
assert expected_column_names == actual_column_names
|
||||
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 2.0, 0, 0, "ready"),
|
||||
("replica_2", "127.0.0.1:10002", "sync", 1.0, 0, 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 0, 0, "ready"),
|
||||
}
|
||||
assert expected_data == actual_data
|
||||
|
||||
# 1/
|
||||
execute_and_fetch_all(cursor, "CREATE (n1:Number {name: 'forty_two', value:42});")
|
||||
time.sleep(1)
|
||||
|
||||
# 2/
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 2.0, 4, 0, "ready"),
|
||||
("replica_2", "127.0.0.1:10002", "sync", 1.0, 4, 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 4, 0, "ready"),
|
||||
}
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
print("actual_data=" + str(actual_data))
|
||||
print("expected_data=" + str(expected_data))
|
||||
assert expected_data == actual_data
|
||||
|
||||
# 3/
|
||||
res = execute_and_fetch_all(cursor, "MATCH (node) return node;")
|
||||
assert 1 == len(res)
|
||||
|
||||
# 4/
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
assert expected_data == actual_data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(pytest.main([__file__, "-rA"]))
|
||||
|
@ -78,16 +78,24 @@ def test_show_replicas(connection):
|
||||
|
||||
# 1/
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
EXPECTED_COLUMN_NAMES = {"name", "socket_address", "sync_mode", "timeout", "state"}
|
||||
EXPECTED_COLUMN_NAMES = {
|
||||
"name",
|
||||
"socket_address",
|
||||
"sync_mode",
|
||||
"timeout",
|
||||
"current_timestamp_of_replica",
|
||||
"number_of_timestamp_behind_master",
|
||||
"state",
|
||||
}
|
||||
|
||||
actual_column_names = {x.name for x in cursor.description}
|
||||
assert EXPECTED_COLUMN_NAMES == actual_column_names
|
||||
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, "ready"),
|
||||
("replica_2", "127.0.0.1:10002", "sync", 1.0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, "ready"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, "ready"),
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, 0, 0, "ready"),
|
||||
("replica_2", "127.0.0.1:10002", "sync", 1.0, 0, 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 0, 0, "ready"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, 0, 0, "ready"),
|
||||
}
|
||||
assert expected_data == actual_data
|
||||
|
||||
@ -95,9 +103,9 @@ def test_show_replicas(connection):
|
||||
execute_and_fetch_all(cursor, "DROP REPLICA replica_2")
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, "ready"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, "ready"),
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, 0, 0, "ready"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 0, 0, "ready"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, 0, 0, "ready"),
|
||||
}
|
||||
assert expected_data == actual_data
|
||||
|
||||
@ -110,9 +118,9 @@ def test_show_replicas(connection):
|
||||
time.sleep(2)
|
||||
actual_data = set(execute_and_fetch_all(cursor, "SHOW REPLICAS;"))
|
||||
expected_data = {
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, "invalid"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, "invalid"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, "invalid"),
|
||||
("replica_1", "127.0.0.1:10001", "sync", 0, 0, 0, "invalid"),
|
||||
("replica_3", "127.0.0.1:10003", "async", None, 0, 0, "invalid"),
|
||||
("replica_4", "127.0.0.1:10004", "async", None, 0, 0, "invalid"),
|
||||
}
|
||||
assert expected_data == actual_data
|
||||
|
||||
|
@ -69,7 +69,7 @@ workloads:
|
||||
args: ["--bolt-port", "7687", "--log-level=TRACE"]
|
||||
log_file: "replication-e2e-main.log"
|
||||
setup_queries: [
|
||||
"REGISTER REPLICA replica_1 SYNC WITH TIMEOUT 0 TO '127.0.0.1:10001'",
|
||||
"REGISTER REPLICA replica_1 SYNC WITH TIMEOUT 2 TO '127.0.0.1:10001'",
|
||||
"REGISTER REPLICA replica_2 SYNC WITH TIMEOUT 1 TO '127.0.0.1:10002'",
|
||||
"REGISTER REPLICA replica_3 ASYNC TO '127.0.0.1:10003'"
|
||||
]
|
||||
|
@ -3044,7 +3044,7 @@ void CheckParsedCallProcedure(const CypherQuery &query, Base &ast_generator,
|
||||
}
|
||||
std::vector<std::string> args_as_str{};
|
||||
std::transform(args.begin(), args.end(), std::back_inserter(args_as_str),
|
||||
[](const std::string_view &arg) { return std::string{arg}; });
|
||||
[](const std::string_view arg) { return std::string{arg}; });
|
||||
EXPECT_EQ(identifier_names, args_as_str);
|
||||
EXPECT_EQ(identifier_names, call_proc->result_fields_);
|
||||
ASSERT_EQ(call_proc->is_write_, type == ProcedureType::WRITE);
|
||||
|
Loading…
Reference in New Issue
Block a user